1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
25 #include "insn-config.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
31 #include "hard-reg-set.h"
34 #include "basic-block.h"
37 /* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
71 #ifndef REGISTER_MOVE_COST
72 #define REGISTER_MOVE_COST(x, y) 2
75 #ifndef MEMORY_MOVE_COST
76 #define MEMORY_MOVE_COST(x) 4
79 /* During reload_as_needed, element N contains a REG rtx for the hard reg
80 into which reg N has been reloaded (perhaps for a previous insn). */
81 static rtx
*reg_last_reload_reg
;
83 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85 static char *reg_has_output_reload
;
87 /* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89 static HARD_REG_SET reg_is_output_reload
;
91 /* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95 rtx
*reg_equiv_constant
;
97 /* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
101 rtx
*reg_equiv_memory_loc
;
103 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106 rtx
*reg_equiv_address
;
108 /* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
112 /* Widest width in which each pseudo reg is referred to (via subreg). */
113 static int *reg_max_ref_width
;
115 /* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117 static rtx
*reg_equiv_init
;
119 /* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124 static int reg_reloaded_contents
[FIRST_PSEUDO_REGISTER
];
126 /* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130 static rtx reg_reloaded_insn
[FIRST_PSEUDO_REGISTER
];
132 /* Number of spill-regs so far; number of valid elements of spill_regs. */
135 /* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
139 static rtx spill_reg_rtx
[FIRST_PSEUDO_REGISTER
];
141 /* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144 static rtx spill_reg_store
[FIRST_PSEUDO_REGISTER
];
146 /* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150 static short spill_reg_order
[FIRST_PSEUDO_REGISTER
];
152 /* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155 HARD_REG_SET forbidden_regs
;
157 /* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
162 (spill_reg_order prevents these registers from being used to start a
164 static HARD_REG_SET bad_spill_regs
;
166 /* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169 static short spill_regs
[FIRST_PSEUDO_REGISTER
];
171 /* Describes order of preference for putting regs into spill_regs.
172 Contains the numbers of all the hard regs, in order most preferred first.
173 This order is different for each function.
174 It is set up by order_regs_for_reload.
175 Empty elements at the end contain -1. */
176 static short potential_reload_regs
[FIRST_PSEUDO_REGISTER
];
178 /* 1 for a hard register that appears explicitly in the rtl
179 (for example, function value registers, special registers
180 used by insns, structure value pointer registers). */
181 static char regs_explicitly_used
[FIRST_PSEUDO_REGISTER
];
183 /* Indicates if a register was counted against the need for
184 groups. 0 means it can count against max_nongroup instead. */
185 static HARD_REG_SET counted_for_groups
;
187 /* Indicates if a register was counted against the need for
188 non-groups. 0 means it can become part of a new group.
189 During choose_reload_regs, 1 here means don't use this reg
190 as part of a group, even if it seems to be otherwise ok. */
191 static HARD_REG_SET counted_for_nongroups
;
193 /* Indexed by pseudo reg number N,
194 says may not delete stores into the real (memory) home of pseudo N.
195 This is set if we already substituted a memory equivalent in some uses,
196 which happens when we have to eliminate the fp from it. */
197 static char *cannot_omit_stores
;
199 /* Nonzero if indirect addressing is supported on the machine; this means
200 that spilling (REG n) does not require reloading it into a register in
201 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
202 value indicates the level of indirect addressing supported, e.g., two
203 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
206 static char spill_indirect_levels
;
208 /* Nonzero if indirect addressing is supported when the innermost MEM is
209 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
210 which these are valid is the same as spill_indirect_levels, above. */
212 char indirect_symref_ok
;
214 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
216 char double_reg_address_ok
;
218 /* Record the stack slot for each spilled hard register. */
220 static rtx spill_stack_slot
[FIRST_PSEUDO_REGISTER
];
222 /* Width allocated so far for that stack slot. */
224 static int spill_stack_slot_width
[FIRST_PSEUDO_REGISTER
];
226 /* Indexed by register class and basic block number, nonzero if there is
227 any need for a spill register of that class in that basic block.
228 The pointer is 0 if we did stupid allocation and don't know
229 the structure of basic blocks. */
231 char *basic_block_needs
[N_REG_CLASSES
];
233 /* First uid used by insns created by reload in this function.
234 Used in find_equiv_reg. */
235 int reload_first_uid
;
237 /* Flag set by local-alloc or global-alloc if anything is live in
238 a call-clobbered reg across calls. */
240 int caller_save_needed
;
242 /* Set to 1 while reload_as_needed is operating.
243 Required by some machines to handle any generated moves differently. */
245 int reload_in_progress
= 0;
247 /* These arrays record the insn_code of insns that may be needed to
248 perform input and output reloads of special objects. They provide a
249 place to pass a scratch register. */
251 enum insn_code reload_in_optab
[NUM_MACHINE_MODES
];
252 enum insn_code reload_out_optab
[NUM_MACHINE_MODES
];
254 /* This obstack is used for allocation of rtl during register elimination.
255 The allocated storage can be freed once find_reloads has processed the
258 struct obstack reload_obstack
;
259 char *reload_firstobj
;
261 #define obstack_chunk_alloc xmalloc
262 #define obstack_chunk_free free
264 /* List of labels that must never be deleted. */
265 extern rtx forced_labels
;
267 /* This structure is used to record information about register eliminations.
268 Each array entry describes one possible way of eliminating a register
269 in favor of another. If there is more than one way of eliminating a
270 particular register, the most preferred should be specified first. */
272 static struct elim_table
274 int from
; /* Register number to be eliminated. */
275 int to
; /* Register number used as replacement. */
276 int initial_offset
; /* Initial difference between values. */
277 int can_eliminate
; /* Non-zero if this elimination can be done. */
278 int can_eliminate_previous
; /* Value of CAN_ELIMINATE in previous scan over
279 insns made by reload. */
280 int offset
; /* Current offset between the two regs. */
281 int max_offset
; /* Maximum offset between the two regs. */
282 int previous_offset
; /* Offset at end of previous insn. */
283 int ref_outside_mem
; /* "to" has been referenced outside a MEM. */
284 rtx from_rtx
; /* REG rtx for the register to be eliminated.
285 We cannot simply compare the number since
286 we might then spuriously replace a hard
287 register corresponding to a pseudo
288 assigned to the reg to be eliminated. */
289 rtx to_rtx
; /* REG rtx for the replacement. */
292 /* If a set of eliminable registers was specified, define the table from it.
293 Otherwise, default to the normal case of the frame pointer being
294 replaced by the stack pointer. */
296 #ifdef ELIMINABLE_REGS
299 {{ FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
}};
302 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
304 /* Record the number of pending eliminations that have an offset not equal
305 to their initial offset. If non-zero, we use a new copy of each
306 replacement result in any insns encountered. */
307 static int num_not_at_initial_offset
;
309 /* Count the number of registers that we may be able to eliminate. */
310 static int num_eliminable
;
312 /* For each label, we record the offset of each elimination. If we reach
313 a label by more than one path and an offset differs, we cannot do the
314 elimination. This information is indexed by the number of the label.
315 The first table is an array of flags that records whether we have yet
316 encountered a label and the second table is an array of arrays, one
317 entry in the latter array for each elimination. */
319 static char *offsets_known_at
;
320 static int (*offsets_at
)[NUM_ELIMINABLE_REGS
];
322 /* Number of labels in the current function. */
324 static int num_labels
;
326 struct hard_reg_n_uses
{ int regno
; int uses
; };
328 static int possible_group_p
PROTO((int, int *));
329 static void count_possible_groups
PROTO((int *, enum machine_mode
*,
331 static int modes_equiv_for_class_p
PROTO((enum machine_mode
,
334 static void spill_failure
PROTO((rtx
));
335 static int new_spill_reg
PROTO((int, int, int *, int *, int,
337 static void delete_dead_insn
PROTO((rtx
));
338 static void alter_reg
PROTO((int, int));
339 static void mark_scratch_live
PROTO((rtx
));
340 static void set_label_offsets
PROTO((rtx
, rtx
, int));
341 static int eliminate_regs_in_insn
PROTO((rtx
, int));
342 static void mark_not_eliminable
PROTO((rtx
, rtx
));
343 static int spill_hard_reg
PROTO((int, int, FILE *, int));
344 static void scan_paradoxical_subregs
PROTO((rtx
));
345 static int hard_reg_use_compare
PROTO((struct hard_reg_n_uses
*,
346 struct hard_reg_n_uses
*));
347 static void order_regs_for_reload
PROTO((void));
348 static int compare_spill_regs
PROTO((short *, short *));
349 static void reload_as_needed
PROTO((rtx
, int));
350 static void forget_old_reloads_1
PROTO((rtx
, rtx
));
351 static int reload_reg_class_lower
PROTO((short *, short *));
352 static void mark_reload_reg_in_use
PROTO((int, int, enum reload_type
,
354 static void clear_reload_reg_in_use
PROTO((int, int, enum reload_type
,
356 static int reload_reg_free_p
PROTO((int, int, enum reload_type
));
357 static int reload_reg_free_before_p
PROTO((int, int, enum reload_type
));
358 static int reload_reg_reaches_end_p
PROTO((int, int, enum reload_type
));
359 static int reloads_conflict
PROTO((int, int));
360 static int allocate_reload_reg
PROTO((int, rtx
, int, int));
361 static void choose_reload_regs
PROTO((rtx
, rtx
));
362 static void merge_assigned_reloads
PROTO((rtx
));
363 static void emit_reload_insns
PROTO((rtx
));
364 static void delete_output_reload
PROTO((rtx
, int, rtx
));
365 static void inc_for_reload
PROTO((rtx
, rtx
, int));
366 static int constraint_accepts_reg_p
PROTO((char *, rtx
));
367 static int count_occurrences
PROTO((rtx
, rtx
));
369 /* Initialize the reload pass once per compilation. */
376 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
377 Set spill_indirect_levels to the number of levels such addressing is
378 permitted, zero if it is not permitted at all. */
381 = gen_rtx (MEM
, Pmode
,
382 gen_rtx (PLUS
, Pmode
,
383 gen_rtx (REG
, Pmode
, LAST_VIRTUAL_REGISTER
+ 1),
385 spill_indirect_levels
= 0;
387 while (memory_address_p (QImode
, tem
))
389 spill_indirect_levels
++;
390 tem
= gen_rtx (MEM
, Pmode
, tem
);
393 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
395 tem
= gen_rtx (MEM
, Pmode
, gen_rtx (SYMBOL_REF
, Pmode
, "foo"));
396 indirect_symref_ok
= memory_address_p (QImode
, tem
);
398 /* See if reg+reg is a valid (and offsettable) address. */
400 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
402 tem
= gen_rtx (PLUS
, Pmode
,
403 gen_rtx (REG
, Pmode
, HARD_FRAME_POINTER_REGNUM
),
404 gen_rtx (REG
, Pmode
, i
));
405 /* This way, we make sure that reg+reg is an offsettable address. */
406 tem
= plus_constant (tem
, 4);
408 if (memory_address_p (QImode
, tem
))
410 double_reg_address_ok
= 1;
415 /* Initialize obstack for our rtl allocation. */
416 gcc_obstack_init (&reload_obstack
);
417 reload_firstobj
= (char *) obstack_alloc (&reload_obstack
, 0);
420 /* Main entry point for the reload pass.
422 FIRST is the first insn of the function being compiled.
424 GLOBAL nonzero means we were called from global_alloc
425 and should attempt to reallocate any pseudoregs that we
426 displace from hard regs we will use for reloads.
427 If GLOBAL is zero, we do not have enough information to do that,
428 so any pseudo reg that is spilled must go to the stack.
430 DUMPFILE is the global-reg debugging dump file stream, or 0.
431 If it is nonzero, messages are written to it to describe
432 which registers are seized as reload regs, which pseudo regs
433 are spilled from them, and where the pseudo regs are reallocated to.
435 Return value is nonzero if reload failed
436 and we must not do any more for this function. */
439 reload (first
, global
, dumpfile
)
445 register int i
, j
, k
;
447 register struct elim_table
*ep
;
449 int something_changed
;
450 int something_needs_reloads
;
451 int something_needs_elimination
;
452 int new_basic_block_needs
;
453 enum reg_class caller_save_spill_class
= NO_REGS
;
454 int caller_save_group_size
= 1;
456 /* Nonzero means we couldn't get enough spill regs. */
459 /* The basic block number currently being processed for INSN. */
462 /* Make sure even insns with volatile mem refs are recognizable. */
465 /* Enable find_equiv_reg to distinguish insns made by reload. */
466 reload_first_uid
= get_max_uid ();
468 for (i
= 0; i
< N_REG_CLASSES
; i
++)
469 basic_block_needs
[i
] = 0;
471 #ifdef SECONDARY_MEMORY_NEEDED
472 /* Initialize the secondary memory table. */
473 clear_secondary_mem ();
476 /* Remember which hard regs appear explicitly
477 before we merge into `regs_ever_live' the ones in which
478 pseudo regs have been allocated. */
479 bcopy (regs_ever_live
, regs_explicitly_used
, sizeof regs_ever_live
);
481 /* We don't have a stack slot for any spill reg yet. */
482 bzero (spill_stack_slot
, sizeof spill_stack_slot
);
483 bzero (spill_stack_slot_width
, sizeof spill_stack_slot_width
);
485 /* Initialize the save area information for caller-save, in case some
489 /* Compute which hard registers are now in use
490 as homes for pseudo registers.
491 This is done here rather than (eg) in global_alloc
492 because this point is reached even if not optimizing. */
494 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
497 for (i
= 0; i
< scratch_list_length
; i
++)
499 mark_scratch_live (scratch_list
[i
]);
501 /* Make sure that the last insn in the chain
502 is not something that needs reloading. */
503 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
505 /* Find all the pseudo registers that didn't get hard regs
506 but do have known equivalent constants or memory slots.
507 These include parameters (known equivalent to parameter slots)
508 and cse'd or loop-moved constant memory addresses.
510 Record constant equivalents in reg_equiv_constant
511 so they will be substituted by find_reloads.
512 Record memory equivalents in reg_mem_equiv so they can
513 be substituted eventually by altering the REG-rtx's. */
515 reg_equiv_constant
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
516 bzero (reg_equiv_constant
, max_regno
* sizeof (rtx
));
517 reg_equiv_memory_loc
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
518 bzero (reg_equiv_memory_loc
, max_regno
* sizeof (rtx
));
519 reg_equiv_mem
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
520 bzero (reg_equiv_mem
, max_regno
* sizeof (rtx
));
521 reg_equiv_init
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
522 bzero (reg_equiv_init
, max_regno
* sizeof (rtx
));
523 reg_equiv_address
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
524 bzero (reg_equiv_address
, max_regno
* sizeof (rtx
));
525 reg_max_ref_width
= (int *) alloca (max_regno
* sizeof (int));
526 bzero (reg_max_ref_width
, max_regno
* sizeof (int));
527 cannot_omit_stores
= (char *) alloca (max_regno
);
528 bzero (cannot_omit_stores
, max_regno
);
530 #ifdef SMALL_REGISTER_CLASSES
531 CLEAR_HARD_REG_SET (forbidden_regs
);
534 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
535 Also find all paradoxical subregs and find largest such for each pseudo.
536 On machines with small register classes, record hard registers that
537 are used for user variables. These can never be used for spills. */
539 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
541 rtx set
= single_set (insn
);
543 if (set
!= 0 && GET_CODE (SET_DEST (set
)) == REG
)
545 rtx note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
);
547 #ifdef LEGITIMATE_PIC_OPERAND_P
548 && (! CONSTANT_P (XEXP (note
, 0)) || ! flag_pic
549 || LEGITIMATE_PIC_OPERAND_P (XEXP (note
, 0)))
553 rtx x
= XEXP (note
, 0);
554 i
= REGNO (SET_DEST (set
));
555 if (i
> LAST_VIRTUAL_REGISTER
)
557 if (GET_CODE (x
) == MEM
)
558 reg_equiv_memory_loc
[i
] = x
;
559 else if (CONSTANT_P (x
))
561 if (LEGITIMATE_CONSTANT_P (x
))
562 reg_equiv_constant
[i
] = x
;
564 reg_equiv_memory_loc
[i
]
565 = force_const_mem (GET_MODE (SET_DEST (set
)), x
);
570 /* If this register is being made equivalent to a MEM
571 and the MEM is not SET_SRC, the equivalencing insn
572 is one with the MEM as a SET_DEST and it occurs later.
573 So don't mark this insn now. */
574 if (GET_CODE (x
) != MEM
575 || rtx_equal_p (SET_SRC (set
), x
))
576 reg_equiv_init
[i
] = insn
;
581 /* If this insn is setting a MEM from a register equivalent to it,
582 this is the equivalencing insn. */
583 else if (set
&& GET_CODE (SET_DEST (set
)) == MEM
584 && GET_CODE (SET_SRC (set
)) == REG
585 && reg_equiv_memory_loc
[REGNO (SET_SRC (set
))]
586 && rtx_equal_p (SET_DEST (set
),
587 reg_equiv_memory_loc
[REGNO (SET_SRC (set
))]))
588 reg_equiv_init
[REGNO (SET_SRC (set
))] = insn
;
590 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
591 scan_paradoxical_subregs (PATTERN (insn
));
594 /* Does this function require a frame pointer? */
596 frame_pointer_needed
= (! flag_omit_frame_pointer
597 #ifdef EXIT_IGNORE_STACK
598 /* ?? If EXIT_IGNORE_STACK is set, we will not save
599 and restore sp for alloca. So we can't eliminate
600 the frame pointer in that case. At some point,
601 we should improve this by emitting the
602 sp-adjusting insns for this case. */
603 || (current_function_calls_alloca
604 && EXIT_IGNORE_STACK
)
606 || FRAME_POINTER_REQUIRED
);
610 /* Initialize the table of registers to eliminate. The way we do this
611 depends on how the eliminable registers were defined. */
612 #ifdef ELIMINABLE_REGS
613 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
615 ep
->can_eliminate
= ep
->can_eliminate_previous
616 = (CAN_ELIMINATE (ep
->from
, ep
->to
)
617 && (ep
->from
!= HARD_FRAME_POINTER_REGNUM
618 || ! frame_pointer_needed
));
621 reg_eliminate
[0].can_eliminate
= reg_eliminate
[0].can_eliminate_previous
622 = ! frame_pointer_needed
;
625 /* Count the number of eliminable registers and build the FROM and TO
626 REG rtx's. Note that code in gen_rtx will cause, e.g.,
627 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
628 We depend on this. */
629 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
631 num_eliminable
+= ep
->can_eliminate
;
632 ep
->from_rtx
= gen_rtx (REG
, Pmode
, ep
->from
);
633 ep
->to_rtx
= gen_rtx (REG
, Pmode
, ep
->to
);
636 num_labels
= max_label_num () - get_first_label_num ();
638 /* Allocate the tables used to store offset information at labels. */
639 offsets_known_at
= (char *) alloca (num_labels
);
641 = (int (*)[NUM_ELIMINABLE_REGS
])
642 alloca (num_labels
* NUM_ELIMINABLE_REGS
* sizeof (int));
644 offsets_known_at
-= get_first_label_num ();
645 offsets_at
-= get_first_label_num ();
647 /* Alter each pseudo-reg rtx to contain its hard reg number.
648 Assign stack slots to the pseudos that lack hard regs or equivalents.
649 Do not touch virtual registers. */
651 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_regno
; i
++)
654 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
655 because the stack size may be a part of the offset computation for
656 register elimination. */
657 assign_stack_local (BLKmode
, 0, 0);
659 /* If we have some registers we think can be eliminated, scan all insns to
660 see if there is an insn that sets one of these registers to something
661 other than itself plus a constant. If so, the register cannot be
662 eliminated. Doing this scan here eliminates an extra pass through the
663 main reload loop in the most common case where register elimination
665 for (insn
= first
; insn
&& num_eliminable
; insn
= NEXT_INSN (insn
))
666 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
667 || GET_CODE (insn
) == CALL_INSN
)
668 note_stores (PATTERN (insn
), mark_not_eliminable
);
670 #ifndef REGISTER_CONSTRAINTS
671 /* If all the pseudo regs have hard regs,
672 except for those that are never referenced,
673 we know that no reloads are needed. */
674 /* But that is not true if there are register constraints, since
675 in that case some pseudos might be in the wrong kind of hard reg. */
677 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
678 if (reg_renumber
[i
] == -1 && reg_n_refs
[i
] != 0)
681 if (i
== max_regno
&& num_eliminable
== 0 && ! caller_save_needed
)
685 /* Compute the order of preference for hard registers to spill.
686 Store them by decreasing preference in potential_reload_regs. */
688 order_regs_for_reload ();
690 /* So far, no hard regs have been spilled. */
692 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
693 spill_reg_order
[i
] = -1;
695 /* On most machines, we can't use any register explicitly used in the
696 rtl as a spill register. But on some, we have to. Those will have
697 taken care to keep the life of hard regs as short as possible. */
699 #ifndef SMALL_REGISTER_CLASSES
700 COPY_HARD_REG_SET (forbidden_regs
, bad_spill_regs
);
703 /* Spill any hard regs that we know we can't eliminate. */
704 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
705 if (! ep
->can_eliminate
)
707 spill_hard_reg (ep
->from
, global
, dumpfile
, 1);
708 regs_ever_live
[ep
->from
] = 1;
712 for (i
= 0; i
< N_REG_CLASSES
; i
++)
714 basic_block_needs
[i
] = (char *)alloca (n_basic_blocks
);
715 bzero (basic_block_needs
[i
], n_basic_blocks
);
718 /* From now on, we need to emit any moves without making new pseudos. */
719 reload_in_progress
= 1;
721 /* This loop scans the entire function each go-round
722 and repeats until one repetition spills no additional hard regs. */
724 /* This flag is set when a pseudo reg is spilled,
725 to require another pass. Note that getting an additional reload
726 reg does not necessarily imply any pseudo reg was spilled;
727 sometimes we find a reload reg that no pseudo reg was allocated in. */
728 something_changed
= 1;
729 /* This flag is set if there are any insns that require reloading. */
730 something_needs_reloads
= 0;
731 /* This flag is set if there are any insns that require register
733 something_needs_elimination
= 0;
734 while (something_changed
)
738 /* For each class, number of reload regs needed in that class.
739 This is the maximum over all insns of the needs in that class
740 of the individual insn. */
741 int max_needs
[N_REG_CLASSES
];
742 /* For each class, size of group of consecutive regs
743 that is needed for the reloads of this class. */
744 int group_size
[N_REG_CLASSES
];
745 /* For each class, max number of consecutive groups needed.
746 (Each group contains group_size[CLASS] consecutive registers.) */
747 int max_groups
[N_REG_CLASSES
];
748 /* For each class, max number needed of regs that don't belong
749 to any of the groups. */
750 int max_nongroups
[N_REG_CLASSES
];
751 /* For each class, the machine mode which requires consecutive
752 groups of regs of that class.
753 If two different modes ever require groups of one class,
754 they must be the same size and equally restrictive for that class,
755 otherwise we can't handle the complexity. */
756 enum machine_mode group_mode
[N_REG_CLASSES
];
757 /* Record the insn where each maximum need is first found. */
758 rtx max_needs_insn
[N_REG_CLASSES
];
759 rtx max_groups_insn
[N_REG_CLASSES
];
760 rtx max_nongroups_insn
[N_REG_CLASSES
];
762 int starting_frame_size
= get_frame_size ();
763 static char *reg_class_names
[] = REG_CLASS_NAMES
;
765 something_changed
= 0;
766 bzero (max_needs
, sizeof max_needs
);
767 bzero (max_groups
, sizeof max_groups
);
768 bzero (max_nongroups
, sizeof max_nongroups
);
769 bzero (max_needs_insn
, sizeof max_needs_insn
);
770 bzero (max_groups_insn
, sizeof max_groups_insn
);
771 bzero (max_nongroups_insn
, sizeof max_nongroups_insn
);
772 bzero (group_size
, sizeof group_size
);
773 for (i
= 0; i
< N_REG_CLASSES
; i
++)
774 group_mode
[i
] = VOIDmode
;
776 /* Keep track of which basic blocks are needing the reloads. */
779 /* Remember whether any element of basic_block_needs
780 changes from 0 to 1 in this pass. */
781 new_basic_block_needs
= 0;
783 /* Reset all offsets on eliminable registers to their initial values. */
784 #ifdef ELIMINABLE_REGS
785 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
787 INITIAL_ELIMINATION_OFFSET (ep
->from
, ep
->to
, ep
->initial_offset
);
788 ep
->previous_offset
= ep
->offset
789 = ep
->max_offset
= ep
->initial_offset
;
792 #ifdef INITIAL_FRAME_POINTER_OFFSET
793 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate
[0].initial_offset
);
795 if (!FRAME_POINTER_REQUIRED
)
797 reg_eliminate
[0].initial_offset
= 0;
799 reg_eliminate
[0].previous_offset
= reg_eliminate
[0].max_offset
800 = reg_eliminate
[0].offset
= reg_eliminate
[0].initial_offset
;
803 num_not_at_initial_offset
= 0;
805 bzero (&offsets_known_at
[get_first_label_num ()], num_labels
);
807 /* Set a known offset for each forced label to be at the initial offset
808 of each elimination. We do this because we assume that all
809 computed jumps occur from a location where each elimination is
810 at its initial offset. */
812 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
814 set_label_offsets (XEXP (x
, 0), NULL_RTX
, 1);
816 /* For each pseudo register that has an equivalent location defined,
817 try to eliminate any eliminable registers (such as the frame pointer)
818 assuming initial offsets for the replacement register, which
821 If the resulting location is directly addressable, substitute
822 the MEM we just got directly for the old REG.
824 If it is not addressable but is a constant or the sum of a hard reg
825 and constant, it is probably not addressable because the constant is
826 out of range, in that case record the address; we will generate
827 hairy code to compute the address in a register each time it is
828 needed. Similarly if it is a hard register, but one that is not
829 valid as an address register.
831 If the location is not addressable, but does not have one of the
832 above forms, assign a stack slot. We have to do this to avoid the
833 potential of producing lots of reloads if, e.g., a location involves
834 a pseudo that didn't get a hard register and has an equivalent memory
835 location that also involves a pseudo that didn't get a hard register.
837 Perhaps at some point we will improve reload_when_needed handling
838 so this problem goes away. But that's very hairy. */
840 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
841 if (reg_renumber
[i
] < 0 && reg_equiv_memory_loc
[i
])
843 rtx x
= eliminate_regs (reg_equiv_memory_loc
[i
], 0, NULL_RTX
);
845 if (strict_memory_address_p (GET_MODE (regno_reg_rtx
[i
]),
847 reg_equiv_mem
[i
] = x
, reg_equiv_address
[i
] = 0;
848 else if (CONSTANT_P (XEXP (x
, 0))
849 || (GET_CODE (XEXP (x
, 0)) == REG
850 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
)
851 || (GET_CODE (XEXP (x
, 0)) == PLUS
852 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
853 && (REGNO (XEXP (XEXP (x
, 0), 0))
854 < FIRST_PSEUDO_REGISTER
)
855 && CONSTANT_P (XEXP (XEXP (x
, 0), 1))))
856 reg_equiv_address
[i
] = XEXP (x
, 0), reg_equiv_mem
[i
] = 0;
859 /* Make a new stack slot. Then indicate that something
860 changed so we go back and recompute offsets for
861 eliminable registers because the allocation of memory
862 below might change some offset. reg_equiv_{mem,address}
863 will be set up for this pseudo on the next pass around
865 reg_equiv_memory_loc
[i
] = 0;
866 reg_equiv_init
[i
] = 0;
868 something_changed
= 1;
872 /* If we allocated another pseudo to the stack, redo elimination
874 if (something_changed
)
877 /* If caller-saves needs a group, initialize the group to include
878 the size and mode required for caller-saves. */
880 if (caller_save_group_size
> 1)
882 group_mode
[(int) caller_save_spill_class
] = Pmode
;
883 group_size
[(int) caller_save_spill_class
] = caller_save_group_size
;
886 /* Compute the most additional registers needed by any instruction.
887 Collect information separately for each class of regs. */
889 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
891 if (global
&& this_block
+ 1 < n_basic_blocks
892 && insn
== basic_block_head
[this_block
+1])
895 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
896 might include REG_LABEL), we need to see what effects this
897 has on the known offsets at labels. */
899 if (GET_CODE (insn
) == CODE_LABEL
|| GET_CODE (insn
) == JUMP_INSN
900 || (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
901 && REG_NOTES (insn
) != 0))
902 set_label_offsets (insn
, insn
, 0);
904 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
906 /* Nonzero means don't use a reload reg that overlaps
907 the place where a function value can be returned. */
908 rtx avoid_return_reg
= 0;
910 rtx old_body
= PATTERN (insn
);
911 int old_code
= INSN_CODE (insn
);
912 rtx old_notes
= REG_NOTES (insn
);
913 int did_elimination
= 0;
915 /* To compute the number of reload registers of each class
916 needed for an insn, we must similate what choose_reload_regs
917 can do. We do this by splitting an insn into an "input" and
918 an "output" part. RELOAD_OTHER reloads are used in both.
919 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
920 which must be live over the entire input section of reloads,
921 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
922 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
925 The registers needed for output are RELOAD_OTHER and
926 RELOAD_FOR_OUTPUT, which are live for the entire output
927 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
928 reloads for each operand.
930 The total number of registers needed is the maximum of the
931 inputs and outputs. */
935 /* [0] is normal, [1] is nongroup. */
936 int regs
[2][N_REG_CLASSES
];
937 int groups
[N_REG_CLASSES
];
940 /* Each `struct needs' corresponds to one RELOAD_... type. */
946 struct needs other_addr
;
947 struct needs op_addr
;
948 struct needs in_addr
[MAX_RECOG_OPERANDS
];
949 struct needs out_addr
[MAX_RECOG_OPERANDS
];
952 /* If needed, eliminate any eliminable registers. */
954 did_elimination
= eliminate_regs_in_insn (insn
, 0);
956 #ifdef SMALL_REGISTER_CLASSES
957 /* Set avoid_return_reg if this is an insn
958 that might use the value of a function call. */
959 if (GET_CODE (insn
) == CALL_INSN
)
961 if (GET_CODE (PATTERN (insn
)) == SET
)
962 after_call
= SET_DEST (PATTERN (insn
));
963 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
964 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
965 after_call
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 0));
969 else if (after_call
!= 0
970 && !(GET_CODE (PATTERN (insn
)) == SET
971 && SET_DEST (PATTERN (insn
)) == stack_pointer_rtx
))
973 if (reg_referenced_p (after_call
, PATTERN (insn
)))
974 avoid_return_reg
= after_call
;
977 #endif /* SMALL_REGISTER_CLASSES */
979 /* Analyze the instruction. */
980 find_reloads (insn
, 0, spill_indirect_levels
, global
,
983 /* Remember for later shortcuts which insns had any reloads or
984 register eliminations.
986 One might think that it would be worthwhile to mark insns
987 that need register replacements but not reloads, but this is
988 not safe because find_reloads may do some manipulation of
989 the insn (such as swapping commutative operands), which would
990 be lost when we restore the old pattern after register
991 replacement. So the actions of find_reloads must be redone in
992 subsequent passes or in reload_as_needed.
994 However, it is safe to mark insns that need reloads
995 but not register replacement. */
997 PUT_MODE (insn
, (did_elimination
? QImode
999 : GET_MODE (insn
) == DImode
? DImode
1002 /* Discard any register replacements done. */
1003 if (did_elimination
)
1005 obstack_free (&reload_obstack
, reload_firstobj
);
1006 PATTERN (insn
) = old_body
;
1007 INSN_CODE (insn
) = old_code
;
1008 REG_NOTES (insn
) = old_notes
;
1009 something_needs_elimination
= 1;
1012 /* If this insn has no reloads, we need not do anything except
1013 in the case of a CALL_INSN when we have caller-saves and
1014 caller-save needs reloads. */
1017 && ! (GET_CODE (insn
) == CALL_INSN
1018 && caller_save_spill_class
!= NO_REGS
))
1021 something_needs_reloads
= 1;
1022 bzero (&insn_needs
, sizeof insn_needs
);
1024 /* Count each reload once in every class
1025 containing the reload's own class. */
1027 for (i
= 0; i
< n_reloads
; i
++)
1029 register enum reg_class
*p
;
1030 enum reg_class
class = reload_reg_class
[i
];
1032 enum machine_mode mode
;
1034 struct needs
*this_needs
;
1036 /* Don't count the dummy reloads, for which one of the
1037 regs mentioned in the insn can be used for reloading.
1038 Don't count optional reloads.
1039 Don't count reloads that got combined with others. */
1040 if (reload_reg_rtx
[i
] != 0
1041 || reload_optional
[i
] != 0
1042 || (reload_out
[i
] == 0 && reload_in
[i
] == 0
1043 && ! reload_secondary_p
[i
]))
1046 /* Show that a reload register of this class is needed
1047 in this basic block. We do not use insn_needs and
1048 insn_groups because they are overly conservative for
1050 if (global
&& ! basic_block_needs
[(int) class][this_block
])
1052 basic_block_needs
[(int) class][this_block
] = 1;
1053 new_basic_block_needs
= 1;
1057 mode
= reload_inmode
[i
];
1058 if (GET_MODE_SIZE (reload_outmode
[i
]) > GET_MODE_SIZE (mode
))
1059 mode
= reload_outmode
[i
];
1060 size
= CLASS_MAX_NREGS (class, mode
);
1062 /* If this class doesn't want a group, determine if we have
1063 a nongroup need or a regular need. We have a nongroup
1064 need if this reload conflicts with a group reload whose
1065 class intersects with this reload's class. */
1069 for (j
= 0; j
< n_reloads
; j
++)
1070 if ((CLASS_MAX_NREGS (reload_reg_class
[j
],
1071 (GET_MODE_SIZE (reload_outmode
[j
])
1072 > GET_MODE_SIZE (reload_inmode
[j
]))
1076 && reloads_conflict (i
, j
)
1077 && reg_classes_intersect_p (class,
1078 reload_reg_class
[j
]))
1084 /* Decide which time-of-use to count this reload for. */
1085 switch (reload_when_needed
[i
])
1088 this_needs
= &insn_needs
.other
;
1090 case RELOAD_FOR_INPUT
:
1091 this_needs
= &insn_needs
.input
;
1093 case RELOAD_FOR_OUTPUT
:
1094 this_needs
= &insn_needs
.output
;
1096 case RELOAD_FOR_INSN
:
1097 this_needs
= &insn_needs
.insn
;
1099 case RELOAD_FOR_OTHER_ADDRESS
:
1100 this_needs
= &insn_needs
.other_addr
;
1102 case RELOAD_FOR_INPUT_ADDRESS
:
1103 this_needs
= &insn_needs
.in_addr
[reload_opnum
[i
]];
1105 case RELOAD_FOR_OUTPUT_ADDRESS
:
1106 this_needs
= &insn_needs
.out_addr
[reload_opnum
[i
]];
1108 case RELOAD_FOR_OPERAND_ADDRESS
:
1109 this_needs
= &insn_needs
.op_addr
;
1115 enum machine_mode other_mode
, allocate_mode
;
1117 /* Count number of groups needed separately from
1118 number of individual regs needed. */
1119 this_needs
->groups
[(int) class]++;
1120 p
= reg_class_superclasses
[(int) class];
1121 while (*p
!= LIM_REG_CLASSES
)
1122 this_needs
->groups
[(int) *p
++]++;
1124 /* Record size and mode of a group of this class. */
1125 /* If more than one size group is needed,
1126 make all groups the largest needed size. */
1127 if (group_size
[(int) class] < size
)
1129 other_mode
= group_mode
[(int) class];
1130 allocate_mode
= mode
;
1132 group_size
[(int) class] = size
;
1133 group_mode
[(int) class] = mode
;
1138 allocate_mode
= group_mode
[(int) class];
1141 /* Crash if two dissimilar machine modes both need
1142 groups of consecutive regs of the same class. */
1144 if (other_mode
!= VOIDmode
&& other_mode
!= allocate_mode
1145 && ! modes_equiv_for_class_p (allocate_mode
,
1151 this_needs
->regs
[nongroup_need
][(int) class] += 1;
1152 p
= reg_class_superclasses
[(int) class];
1153 while (*p
!= LIM_REG_CLASSES
)
1154 this_needs
->regs
[nongroup_need
][(int) *p
++] += 1;
1160 /* All reloads have been counted for this insn;
1161 now merge the various times of use.
1162 This sets insn_needs, etc., to the maximum total number
1163 of registers needed at any point in this insn. */
1165 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1167 int in_max
, out_max
;
1169 /* Compute normal and nongroup needs. */
1170 for (j
= 0; j
<= 1; j
++)
1172 for (in_max
= 0, out_max
= 0, k
= 0;
1173 k
< reload_n_operands
; k
++)
1176 = MAX (in_max
, insn_needs
.in_addr
[k
].regs
[j
][i
]);
1178 = MAX (out_max
, insn_needs
.out_addr
[k
].regs
[j
][i
]);
1181 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1182 and operand addresses but not things used to reload
1183 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1184 don't conflict with things needed to reload inputs or
1187 in_max
= MAX (in_max
, insn_needs
.op_addr
.regs
[j
][i
]);
1188 out_max
= MAX (out_max
, insn_needs
.insn
.regs
[j
][i
]);
1190 insn_needs
.input
.regs
[j
][i
]
1191 = MAX (insn_needs
.input
.regs
[j
][i
]
1192 + insn_needs
.op_addr
.regs
[j
][i
]
1193 + insn_needs
.insn
.regs
[j
][i
],
1194 in_max
+ insn_needs
.input
.regs
[j
][i
]);
1196 insn_needs
.output
.regs
[j
][i
] += out_max
;
1197 insn_needs
.other
.regs
[j
][i
]
1198 += MAX (MAX (insn_needs
.input
.regs
[j
][i
],
1199 insn_needs
.output
.regs
[j
][i
]),
1200 insn_needs
.other_addr
.regs
[j
][i
]);
1204 /* Now compute group needs. */
1205 for (in_max
= 0, out_max
= 0, j
= 0;
1206 j
< reload_n_operands
; j
++)
1208 in_max
= MAX (in_max
, insn_needs
.in_addr
[j
].groups
[i
]);
1210 = MAX (out_max
, insn_needs
.out_addr
[j
].groups
[i
]);
1213 in_max
= MAX (in_max
, insn_needs
.op_addr
.groups
[i
]);
1214 out_max
= MAX (out_max
, insn_needs
.insn
.groups
[i
]);
1216 insn_needs
.input
.groups
[i
]
1217 = MAX (insn_needs
.input
.groups
[i
]
1218 + insn_needs
.op_addr
.groups
[i
]
1219 + insn_needs
.insn
.groups
[i
],
1220 in_max
+ insn_needs
.input
.groups
[i
]);
1222 insn_needs
.output
.groups
[i
] += out_max
;
1223 insn_needs
.other
.groups
[i
]
1224 += MAX (MAX (insn_needs
.input
.groups
[i
],
1225 insn_needs
.output
.groups
[i
]),
1226 insn_needs
.other_addr
.groups
[i
]);
1229 /* If this is a CALL_INSN and caller-saves will need
1230 a spill register, act as if the spill register is
1231 needed for this insn. However, the spill register
1232 can be used by any reload of this insn, so we only
1233 need do something if no need for that class has
1236 The assumption that every CALL_INSN will trigger a
1237 caller-save is highly conservative, however, the number
1238 of cases where caller-saves will need a spill register but
1239 a block containing a CALL_INSN won't need a spill register
1240 of that class should be quite rare.
1242 If a group is needed, the size and mode of the group will
1243 have been set up at the beginning of this loop. */
1245 if (GET_CODE (insn
) == CALL_INSN
1246 && caller_save_spill_class
!= NO_REGS
)
1248 /* See if this register would conflict with any reload
1249 that needs a group. */
1250 int nongroup_need
= 0;
1251 int *caller_save_needs
;
1253 for (j
= 0; j
< n_reloads
; j
++)
1254 if ((CLASS_MAX_NREGS (reload_reg_class
[j
],
1255 (GET_MODE_SIZE (reload_outmode
[j
])
1256 > GET_MODE_SIZE (reload_inmode
[j
]))
1260 && reg_classes_intersect_p (caller_save_spill_class
,
1261 reload_reg_class
[j
]))
1268 = (caller_save_group_size
> 1
1269 ? insn_needs
.other
.groups
1270 : insn_needs
.other
.regs
[nongroup_need
]);
1272 if (caller_save_needs
[(int) caller_save_spill_class
] == 0)
1274 register enum reg_class
*p
1275 = reg_class_superclasses
[(int) caller_save_spill_class
];
1277 caller_save_needs
[(int) caller_save_spill_class
]++;
1279 while (*p
!= LIM_REG_CLASSES
)
1280 caller_save_needs
[(int) *p
++] += 1;
1283 /* Show that this basic block will need a register of
1287 && ! (basic_block_needs
[(int) caller_save_spill_class
]
1290 basic_block_needs
[(int) caller_save_spill_class
]
1292 new_basic_block_needs
= 1;
1296 #ifdef SMALL_REGISTER_CLASSES
1297 /* If this insn stores the value of a function call,
1298 and that value is in a register that has been spilled,
1299 and if the insn needs a reload in a class
1300 that might use that register as the reload register,
1301 then add add an extra need in that class.
1302 This makes sure we have a register available that does
1303 not overlap the return value. */
1305 if (avoid_return_reg
)
1307 int regno
= REGNO (avoid_return_reg
);
1309 = HARD_REGNO_NREGS (regno
, GET_MODE (avoid_return_reg
));
1311 int basic_needs
[N_REG_CLASSES
], basic_groups
[N_REG_CLASSES
];
1313 /* First compute the "basic needs", which counts a
1314 need only in the smallest class in which it
1317 bcopy (insn_needs
.other
.regs
[0], basic_needs
,
1318 sizeof basic_needs
);
1319 bcopy (insn_needs
.other
.groups
, basic_groups
,
1320 sizeof basic_groups
);
1322 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1326 if (basic_needs
[i
] >= 0)
1327 for (p
= reg_class_superclasses
[i
];
1328 *p
!= LIM_REG_CLASSES
; p
++)
1329 basic_needs
[(int) *p
] -= basic_needs
[i
];
1331 if (basic_groups
[i
] >= 0)
1332 for (p
= reg_class_superclasses
[i
];
1333 *p
!= LIM_REG_CLASSES
; p
++)
1334 basic_groups
[(int) *p
] -= basic_groups
[i
];
1337 /* Now count extra regs if there might be a conflict with
1338 the return value register.
1340 ??? This is not quite correct because we don't properly
1341 handle the case of groups, but if we end up doing
1342 something wrong, it either will end up not mattering or
1343 we will abort elsewhere. */
1345 for (r
= regno
; r
< regno
+ nregs
; r
++)
1346 if (spill_reg_order
[r
] >= 0)
1347 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1348 if (TEST_HARD_REG_BIT (reg_class_contents
[i
], r
))
1350 if (basic_needs
[i
] > 0 || basic_groups
[i
] > 0)
1354 insn_needs
.other
.regs
[0][i
]++;
1355 p
= reg_class_superclasses
[i
];
1356 while (*p
!= LIM_REG_CLASSES
)
1357 insn_needs
.other
.regs
[0][(int) *p
++]++;
1361 #endif /* SMALL_REGISTER_CLASSES */
1363 /* For each class, collect maximum need of any insn. */
1365 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1367 if (max_needs
[i
] < insn_needs
.other
.regs
[0][i
])
1369 max_needs
[i
] = insn_needs
.other
.regs
[0][i
];
1370 max_needs_insn
[i
] = insn
;
1372 if (max_groups
[i
] < insn_needs
.other
.groups
[i
])
1374 max_groups
[i
] = insn_needs
.other
.groups
[i
];
1375 max_groups_insn
[i
] = insn
;
1377 if (max_nongroups
[i
] < insn_needs
.other
.regs
[1][i
])
1379 max_nongroups
[i
] = insn_needs
.other
.regs
[1][i
];
1380 max_nongroups_insn
[i
] = insn
;
1384 /* Note that there is a continue statement above. */
1387 /* If we allocated any new memory locations, make another pass
1388 since it might have changed elimination offsets. */
1389 if (starting_frame_size
!= get_frame_size ())
1390 something_changed
= 1;
1393 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1395 if (max_needs
[i
] > 0)
1397 ";; Need %d reg%s of class %s (for insn %d).\n",
1398 max_needs
[i
], max_needs
[i
] == 1 ? "" : "s",
1399 reg_class_names
[i
], INSN_UID (max_needs_insn
[i
]));
1400 if (max_nongroups
[i
] > 0)
1402 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1403 max_nongroups
[i
], max_nongroups
[i
] == 1 ? "" : "s",
1404 reg_class_names
[i
], INSN_UID (max_nongroups_insn
[i
]));
1405 if (max_groups
[i
] > 0)
1407 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1408 max_groups
[i
], max_groups
[i
] == 1 ? "" : "s",
1409 mode_name
[(int) group_mode
[i
]],
1410 reg_class_names
[i
], INSN_UID (max_groups_insn
[i
]));
1413 /* If we have caller-saves, set up the save areas and see if caller-save
1414 will need a spill register. */
1416 if (caller_save_needed
1417 && ! setup_save_areas (&something_changed
)
1418 && caller_save_spill_class
== NO_REGS
)
1420 /* The class we will need depends on whether the machine
1421 supports the sum of two registers for an address; see
1422 find_address_reloads for details. */
1424 caller_save_spill_class
1425 = double_reg_address_ok
? INDEX_REG_CLASS
: BASE_REG_CLASS
;
1426 caller_save_group_size
1427 = CLASS_MAX_NREGS (caller_save_spill_class
, Pmode
);
1428 something_changed
= 1;
1431 /* See if anything that happened changes which eliminations are valid.
1432 For example, on the Sparc, whether or not the frame pointer can
1433 be eliminated can depend on what registers have been used. We need
1434 not check some conditions again (such as flag_omit_frame_pointer)
1435 since they can't have changed. */
1437 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
1438 if ((ep
->from
== HARD_FRAME_POINTER_REGNUM
&& FRAME_POINTER_REQUIRED
)
1439 #ifdef ELIMINABLE_REGS
1440 || ! CAN_ELIMINATE (ep
->from
, ep
->to
)
1443 ep
->can_eliminate
= 0;
1445 /* Look for the case where we have discovered that we can't replace
1446 register A with register B and that means that we will now be
1447 trying to replace register A with register C. This means we can
1448 no longer replace register C with register B and we need to disable
1449 such an elimination, if it exists. This occurs often with A == ap,
1450 B == sp, and C == fp. */
1452 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
1454 struct elim_table
*op
;
1455 register int new_to
= -1;
1457 if (! ep
->can_eliminate
&& ep
->can_eliminate_previous
)
1459 /* Find the current elimination for ep->from, if there is a
1461 for (op
= reg_eliminate
;
1462 op
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; op
++)
1463 if (op
->from
== ep
->from
&& op
->can_eliminate
)
1469 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1471 for (op
= reg_eliminate
;
1472 op
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; op
++)
1473 if (op
->from
== new_to
&& op
->to
== ep
->to
)
1474 op
->can_eliminate
= 0;
1478 /* See if any registers that we thought we could eliminate the previous
1479 time are no longer eliminable. If so, something has changed and we
1480 must spill the register. Also, recompute the number of eliminable
1481 registers and see if the frame pointer is needed; it is if there is
1482 no elimination of the frame pointer that we can perform. */
1484 frame_pointer_needed
= 1;
1485 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
1487 if (ep
->can_eliminate
&& ep
->from
== FRAME_POINTER_REGNUM
1488 && ep
->to
!= HARD_FRAME_POINTER_REGNUM
)
1489 frame_pointer_needed
= 0;
1491 if (! ep
->can_eliminate
&& ep
->can_eliminate_previous
)
1493 ep
->can_eliminate_previous
= 0;
1494 spill_hard_reg (ep
->from
, global
, dumpfile
, 1);
1495 regs_ever_live
[ep
->from
] = 1;
1496 something_changed
= 1;
1501 /* If all needs are met, we win. */
1503 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1504 if (max_needs
[i
] > 0 || max_groups
[i
] > 0 || max_nongroups
[i
] > 0)
1506 if (i
== N_REG_CLASSES
&& !new_basic_block_needs
&& ! something_changed
)
1509 /* Not all needs are met; must spill some hard regs. */
1511 /* Put all registers spilled so far back in potential_reload_regs, but
1512 put them at the front, since we've already spilled most of the
1513 psuedos in them (we might have left some pseudos unspilled if they
1514 were in a block that didn't need any spill registers of a conflicting
1515 class. We used to try to mark off the need for those registers,
1516 but doing so properly is very complex and reallocating them is the
1517 simpler approach. First, "pack" potential_reload_regs by pushing
1518 any nonnegative entries towards the end. That will leave room
1519 for the registers we already spilled.
1521 Also, undo the marking of the spill registers from the last time
1522 around in FORBIDDEN_REGS since we will be probably be allocating
1525 ??? It is theoretically possible that we might end up not using one
1526 of our previously-spilled registers in this allocation, even though
1527 they are at the head of the list. It's not clear what to do about
1528 this, but it was no better before, when we marked off the needs met
1529 by the previously-spilled registers. With the current code, globals
1530 can be allocated into these registers, but locals cannot. */
1534 for (i
= j
= FIRST_PSEUDO_REGISTER
- 1; i
>= 0; i
--)
1535 if (potential_reload_regs
[i
] != -1)
1536 potential_reload_regs
[j
--] = potential_reload_regs
[i
];
1538 for (i
= 0; i
< n_spills
; i
++)
1540 potential_reload_regs
[i
] = spill_regs
[i
];
1541 spill_reg_order
[spill_regs
[i
]] = -1;
1542 CLEAR_HARD_REG_BIT (forbidden_regs
, spill_regs
[i
]);
1548 /* Now find more reload regs to satisfy the remaining need
1549 Do it by ascending class number, since otherwise a reg
1550 might be spilled for a big class and might fail to count
1551 for a smaller class even though it belongs to that class.
1553 Count spilled regs in `spills', and add entries to
1554 `spill_regs' and `spill_reg_order'.
1556 ??? Note there is a problem here.
1557 When there is a need for a group in a high-numbered class,
1558 and also need for non-group regs that come from a lower class,
1559 the non-group regs are chosen first. If there aren't many regs,
1560 they might leave no room for a group.
1562 This was happening on the 386. To fix it, we added the code
1563 that calls possible_group_p, so that the lower class won't
1564 break up the last possible group.
1566 Really fixing the problem would require changes above
1567 in counting the regs already spilled, and in choose_reload_regs.
1568 It might be hard to avoid introducing bugs there. */
1570 CLEAR_HARD_REG_SET (counted_for_groups
);
1571 CLEAR_HARD_REG_SET (counted_for_nongroups
);
1573 for (class = 0; class < N_REG_CLASSES
; class++)
1575 /* First get the groups of registers.
1576 If we got single registers first, we might fragment
1578 while (max_groups
[class] > 0)
1580 /* If any single spilled regs happen to form groups,
1581 count them now. Maybe we don't really need
1582 to spill another group. */
1583 count_possible_groups (group_size
, group_mode
, max_groups
);
1585 if (max_groups
[class] <= 0)
1588 /* Groups of size 2 (the only groups used on most machines)
1589 are treated specially. */
1590 if (group_size
[class] == 2)
1592 /* First, look for a register that will complete a group. */
1593 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1597 j
= potential_reload_regs
[i
];
1598 if (j
>= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs
, j
)
1600 ((j
> 0 && (other
= j
- 1, spill_reg_order
[other
] >= 0)
1601 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
)
1602 && TEST_HARD_REG_BIT (reg_class_contents
[class], other
)
1603 && HARD_REGNO_MODE_OK (other
, group_mode
[class])
1604 && ! TEST_HARD_REG_BIT (counted_for_nongroups
,
1606 /* We don't want one part of another group.
1607 We could get "two groups" that overlap! */
1608 && ! TEST_HARD_REG_BIT (counted_for_groups
, other
))
1610 (j
< FIRST_PSEUDO_REGISTER
- 1
1611 && (other
= j
+ 1, spill_reg_order
[other
] >= 0)
1612 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
)
1613 && TEST_HARD_REG_BIT (reg_class_contents
[class], other
)
1614 && HARD_REGNO_MODE_OK (j
, group_mode
[class])
1615 && ! TEST_HARD_REG_BIT (counted_for_nongroups
,
1617 && ! TEST_HARD_REG_BIT (counted_for_groups
,
1620 register enum reg_class
*p
;
1622 /* We have found one that will complete a group,
1623 so count off one group as provided. */
1624 max_groups
[class]--;
1625 p
= reg_class_superclasses
[class];
1626 while (*p
!= LIM_REG_CLASSES
)
1627 max_groups
[(int) *p
++]--;
1629 /* Indicate both these regs are part of a group. */
1630 SET_HARD_REG_BIT (counted_for_groups
, j
);
1631 SET_HARD_REG_BIT (counted_for_groups
, other
);
1635 /* We can't complete a group, so start one. */
1636 #ifdef SMALL_REGISTER_CLASSES
1637 /* Look for a pair neither of which is explicitly used. */
1638 if (i
== FIRST_PSEUDO_REGISTER
)
1639 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1642 j
= potential_reload_regs
[i
];
1643 /* Verify that J+1 is a potential reload reg. */
1644 for (k
= 0; k
< FIRST_PSEUDO_REGISTER
; k
++)
1645 if (potential_reload_regs
[k
] == j
+ 1)
1647 if (j
>= 0 && j
+ 1 < FIRST_PSEUDO_REGISTER
1648 && k
< FIRST_PSEUDO_REGISTER
1649 && spill_reg_order
[j
] < 0 && spill_reg_order
[j
+ 1] < 0
1650 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
)
1651 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
+ 1)
1652 && HARD_REGNO_MODE_OK (j
, group_mode
[class])
1653 && ! TEST_HARD_REG_BIT (counted_for_nongroups
,
1655 && ! TEST_HARD_REG_BIT (bad_spill_regs
, j
+ 1)
1656 /* Reject J at this stage
1657 if J+1 was explicitly used. */
1658 && ! regs_explicitly_used
[j
+ 1])
1662 /* Now try any group at all
1663 whose registers are not in bad_spill_regs. */
1664 if (i
== FIRST_PSEUDO_REGISTER
)
1665 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1668 j
= potential_reload_regs
[i
];
1669 /* Verify that J+1 is a potential reload reg. */
1670 for (k
= 0; k
< FIRST_PSEUDO_REGISTER
; k
++)
1671 if (potential_reload_regs
[k
] == j
+ 1)
1673 if (j
>= 0 && j
+ 1 < FIRST_PSEUDO_REGISTER
1674 && k
< FIRST_PSEUDO_REGISTER
1675 && spill_reg_order
[j
] < 0 && spill_reg_order
[j
+ 1] < 0
1676 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
)
1677 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
+ 1)
1678 && HARD_REGNO_MODE_OK (j
, group_mode
[class])
1679 && ! TEST_HARD_REG_BIT (counted_for_nongroups
,
1681 && ! TEST_HARD_REG_BIT (bad_spill_regs
, j
+ 1))
1685 /* I should be the index in potential_reload_regs
1686 of the new reload reg we have found. */
1688 if (i
>= FIRST_PSEUDO_REGISTER
)
1690 /* There are no groups left to spill. */
1691 spill_failure (max_groups_insn
[class]);
1697 |= new_spill_reg (i
, class, max_needs
, NULL_PTR
,
1702 /* For groups of more than 2 registers,
1703 look for a sufficient sequence of unspilled registers,
1704 and spill them all at once. */
1705 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1709 j
= potential_reload_regs
[i
];
1711 && j
+ group_size
[class] <= FIRST_PSEUDO_REGISTER
1712 && HARD_REGNO_MODE_OK (j
, group_mode
[class]))
1714 /* Check each reg in the sequence. */
1715 for (k
= 0; k
< group_size
[class]; k
++)
1716 if (! (spill_reg_order
[j
+ k
] < 0
1717 && ! TEST_HARD_REG_BIT (bad_spill_regs
, j
+ k
)
1718 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
+ k
)))
1720 /* We got a full sequence, so spill them all. */
1721 if (k
== group_size
[class])
1723 register enum reg_class
*p
;
1724 for (k
= 0; k
< group_size
[class]; k
++)
1727 SET_HARD_REG_BIT (counted_for_groups
, j
+ k
);
1728 for (idx
= 0; idx
< FIRST_PSEUDO_REGISTER
; idx
++)
1729 if (potential_reload_regs
[idx
] == j
+ k
)
1732 |= new_spill_reg (idx
, class,
1733 max_needs
, NULL_PTR
,
1737 /* We have found one that will complete a group,
1738 so count off one group as provided. */
1739 max_groups
[class]--;
1740 p
= reg_class_superclasses
[class];
1741 while (*p
!= LIM_REG_CLASSES
)
1742 max_groups
[(int) *p
++]--;
1748 /* We couldn't find any registers for this reload.
1749 Avoid going into an infinite loop. */
1750 if (i
>= FIRST_PSEUDO_REGISTER
)
1752 /* There are no groups left. */
1753 spill_failure (max_groups_insn
[class]);
1760 /* Now similarly satisfy all need for single registers. */
1762 while (max_needs
[class] > 0 || max_nongroups
[class] > 0)
1764 #ifdef SMALL_REGISTER_CLASSES
1765 /* This should be right for all machines, but only the 386
1766 is known to need it, so this conditional plays safe.
1767 ??? For 2.5, try making this unconditional. */
1768 /* If we spilled enough regs, but they weren't counted
1769 against the non-group need, see if we can count them now.
1770 If so, we can avoid some actual spilling. */
1771 if (max_needs
[class] <= 0 && max_nongroups
[class] > 0)
1772 for (i
= 0; i
< n_spills
; i
++)
1773 if (TEST_HARD_REG_BIT (reg_class_contents
[class],
1775 && !TEST_HARD_REG_BIT (counted_for_groups
,
1777 && !TEST_HARD_REG_BIT (counted_for_nongroups
,
1779 && max_nongroups
[class] > 0)
1781 register enum reg_class
*p
;
1783 SET_HARD_REG_BIT (counted_for_nongroups
, spill_regs
[i
]);
1784 max_nongroups
[class]--;
1785 p
= reg_class_superclasses
[class];
1786 while (*p
!= LIM_REG_CLASSES
)
1787 max_nongroups
[(int) *p
++]--;
1789 if (max_needs
[class] <= 0 && max_nongroups
[class] <= 0)
1793 /* Consider the potential reload regs that aren't
1794 yet in use as reload regs, in order of preference.
1795 Find the most preferred one that's in this class. */
1797 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1798 if (potential_reload_regs
[i
] >= 0
1799 && TEST_HARD_REG_BIT (reg_class_contents
[class],
1800 potential_reload_regs
[i
])
1801 /* If this reg will not be available for groups,
1802 pick one that does not foreclose possible groups.
1803 This is a kludge, and not very general,
1804 but it should be sufficient to make the 386 work,
1805 and the problem should not occur on machines with
1807 && (max_nongroups
[class] == 0
1808 || possible_group_p (potential_reload_regs
[i
], max_groups
)))
1811 /* If we couldn't get a register, try to get one even if we
1812 might foreclose possible groups. This may cause problems
1813 later, but that's better than aborting now, since it is
1814 possible that we will, in fact, be able to form the needed
1815 group even with this allocation. */
1817 if (i
>= FIRST_PSEUDO_REGISTER
1818 && (asm_noperands (max_needs
[class] > 0
1819 ? max_needs_insn
[class]
1820 : max_nongroups_insn
[class])
1822 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1823 if (potential_reload_regs
[i
] >= 0
1824 && TEST_HARD_REG_BIT (reg_class_contents
[class],
1825 potential_reload_regs
[i
]))
1828 /* I should be the index in potential_reload_regs
1829 of the new reload reg we have found. */
1831 if (i
>= FIRST_PSEUDO_REGISTER
)
1833 /* There are no possible registers left to spill. */
1834 spill_failure (max_needs
[class] > 0 ? max_needs_insn
[class]
1835 : max_nongroups_insn
[class]);
1841 |= new_spill_reg (i
, class, max_needs
, max_nongroups
,
1847 /* If global-alloc was run, notify it of any register eliminations we have
1850 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
1851 if (ep
->can_eliminate
)
1852 mark_elimination (ep
->from
, ep
->to
);
1854 /* Insert code to save and restore call-clobbered hard regs
1855 around calls. Tell if what mode to use so that we will process
1856 those insns in reload_as_needed if we have to. */
1858 if (caller_save_needed
)
1859 save_call_clobbered_regs (num_eliminable
? QImode
1860 : caller_save_spill_class
!= NO_REGS
? HImode
1863 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1864 If that insn didn't set the register (i.e., it copied the register to
1865 memory), just delete that insn instead of the equivalencing insn plus
1866 anything now dead. If we call delete_dead_insn on that insn, we may
1867 delete the insn that actually sets the register if the register die
1868 there and that is incorrect. */
1870 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1871 if (reg_renumber
[i
] < 0 && reg_equiv_init
[i
] != 0
1872 && GET_CODE (reg_equiv_init
[i
]) != NOTE
)
1874 if (reg_set_p (regno_reg_rtx
[i
], PATTERN (reg_equiv_init
[i
])))
1875 delete_dead_insn (reg_equiv_init
[i
]);
1878 PUT_CODE (reg_equiv_init
[i
], NOTE
);
1879 NOTE_SOURCE_FILE (reg_equiv_init
[i
]) = 0;
1880 NOTE_LINE_NUMBER (reg_equiv_init
[i
]) = NOTE_INSN_DELETED
;
1884 /* Use the reload registers where necessary
1885 by generating move instructions to move the must-be-register
1886 values into or out of the reload registers. */
1888 if (something_needs_reloads
|| something_needs_elimination
1889 || (caller_save_needed
&& num_eliminable
)
1890 || caller_save_spill_class
!= NO_REGS
)
1891 reload_as_needed (first
, global
);
1893 /* If we were able to eliminate the frame pointer, show that it is no
1894 longer live at the start of any basic block. If it ls live by
1895 virtue of being in a pseudo, that pseudo will be marked live
1896 and hence the frame pointer will be known to be live via that
1899 if (! frame_pointer_needed
)
1900 for (i
= 0; i
< n_basic_blocks
; i
++)
1901 basic_block_live_at_start
[i
][HARD_FRAME_POINTER_REGNUM
/ REGSET_ELT_BITS
]
1902 &= ~ ((REGSET_ELT_TYPE
) 1 << (HARD_FRAME_POINTER_REGNUM
1903 % REGSET_ELT_BITS
));
1905 /* Come here (with failure set nonzero) if we can't get enough spill regs
1906 and we decide not to abort about it. */
1909 reload_in_progress
= 0;
1911 /* Now eliminate all pseudo regs by modifying them into
1912 their equivalent memory references.
1913 The REG-rtx's for the pseudos are modified in place,
1914 so all insns that used to refer to them now refer to memory.
1916 For a reg that has a reg_equiv_address, all those insns
1917 were changed by reloading so that no insns refer to it any longer;
1918 but the DECL_RTL of a variable decl may refer to it,
1919 and if so this causes the debugging info to mention the variable. */
1921 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1925 if (reg_equiv_mem
[i
])
1927 addr
= XEXP (reg_equiv_mem
[i
], 0);
1928 in_struct
= MEM_IN_STRUCT_P (reg_equiv_mem
[i
]);
1930 if (reg_equiv_address
[i
])
1931 addr
= reg_equiv_address
[i
];
1934 if (reg_renumber
[i
] < 0)
1936 rtx reg
= regno_reg_rtx
[i
];
1937 XEXP (reg
, 0) = addr
;
1938 REG_USERVAR_P (reg
) = 0;
1939 MEM_IN_STRUCT_P (reg
) = in_struct
;
1940 PUT_CODE (reg
, MEM
);
1942 else if (reg_equiv_mem
[i
])
1943 XEXP (reg_equiv_mem
[i
], 0) = addr
;
1947 #ifdef PRESERVE_DEATH_INFO_REGNO_P
1948 /* Make a pass over all the insns and remove death notes for things that
1949 are no longer registers or no longer die in the insn (e.g., an input
1950 and output pseudo being tied). */
1952 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
1953 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
1957 for (note
= REG_NOTES (insn
); note
; note
= next
)
1959 next
= XEXP (note
, 1);
1960 if (REG_NOTE_KIND (note
) == REG_DEAD
1961 && (GET_CODE (XEXP (note
, 0)) != REG
1962 || reg_set_p (XEXP (note
, 0), PATTERN (insn
))))
1963 remove_note (insn
, note
);
1968 /* Indicate that we no longer have known memory locations or constants. */
1969 reg_equiv_constant
= 0;
1970 reg_equiv_memory_loc
= 0;
1973 free (scratch_list
);
1976 free (scratch_block
);
1982 /* Nonzero if, after spilling reg REGNO for non-groups,
1983 it will still be possible to find a group if we still need one. */
1986 possible_group_p (regno
, max_groups
)
1991 int class = (int) NO_REGS
;
1993 for (i
= 0; i
< (int) N_REG_CLASSES
; i
++)
1994 if (max_groups
[i
] > 0)
2000 if (class == (int) NO_REGS
)
2003 /* Consider each pair of consecutive registers. */
2004 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
- 1; i
++)
2006 /* Ignore pairs that include reg REGNO. */
2007 if (i
== regno
|| i
+ 1 == regno
)
2010 /* Ignore pairs that are outside the class that needs the group.
2011 ??? Here we fail to handle the case where two different classes
2012 independently need groups. But this never happens with our
2013 current machine descriptions. */
2014 if (! (TEST_HARD_REG_BIT (reg_class_contents
[class], i
)
2015 && TEST_HARD_REG_BIT (reg_class_contents
[class], i
+ 1)))
2018 /* A pair of consecutive regs we can still spill does the trick. */
2019 if (spill_reg_order
[i
] < 0 && spill_reg_order
[i
+ 1] < 0
2020 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
)
2021 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
+ 1))
2024 /* A pair of one already spilled and one we can spill does it
2025 provided the one already spilled is not otherwise reserved. */
2026 if (spill_reg_order
[i
] < 0
2027 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
)
2028 && spill_reg_order
[i
+ 1] >= 0
2029 && ! TEST_HARD_REG_BIT (counted_for_groups
, i
+ 1)
2030 && ! TEST_HARD_REG_BIT (counted_for_nongroups
, i
+ 1))
2032 if (spill_reg_order
[i
+ 1] < 0
2033 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
+ 1)
2034 && spill_reg_order
[i
] >= 0
2035 && ! TEST_HARD_REG_BIT (counted_for_groups
, i
)
2036 && ! TEST_HARD_REG_BIT (counted_for_nongroups
, i
))
2043 /* Count any groups that can be formed from the registers recently spilled.
2044 This is done class by class, in order of ascending class number. */
2047 count_possible_groups (group_size
, group_mode
, max_groups
)
2049 enum machine_mode
*group_mode
;
2053 /* Now find all consecutive groups of spilled registers
2054 and mark each group off against the need for such groups.
2055 But don't count them against ordinary need, yet. */
2057 for (i
= 0; i
< N_REG_CLASSES
; i
++)
2058 if (group_size
[i
] > 1)
2063 CLEAR_HARD_REG_SET (new);
2065 /* Make a mask of all the regs that are spill regs in class I. */
2066 for (j
= 0; j
< n_spills
; j
++)
2067 if (TEST_HARD_REG_BIT (reg_class_contents
[i
], spill_regs
[j
])
2068 && ! TEST_HARD_REG_BIT (counted_for_groups
, spill_regs
[j
])
2069 && ! TEST_HARD_REG_BIT (counted_for_nongroups
,
2071 SET_HARD_REG_BIT (new, spill_regs
[j
]);
2073 /* Find each consecutive group of them. */
2074 for (j
= 0; j
< FIRST_PSEUDO_REGISTER
&& max_groups
[i
] > 0; j
++)
2075 if (TEST_HARD_REG_BIT (new, j
)
2076 && j
+ group_size
[i
] <= FIRST_PSEUDO_REGISTER
2077 /* Next line in case group-mode for this class
2078 demands an even-odd pair. */
2079 && HARD_REGNO_MODE_OK (j
, group_mode
[i
]))
2082 for (k
= 1; k
< group_size
[i
]; k
++)
2083 if (! TEST_HARD_REG_BIT (new, j
+ k
))
2085 if (k
== group_size
[i
])
2087 /* We found a group. Mark it off against this class's
2088 need for groups, and against each superclass too. */
2089 register enum reg_class
*p
;
2091 p
= reg_class_superclasses
[i
];
2092 while (*p
!= LIM_REG_CLASSES
)
2093 max_groups
[(int) *p
++]--;
2094 /* Don't count these registers again. */
2095 for (k
= 0; k
< group_size
[i
]; k
++)
2096 SET_HARD_REG_BIT (counted_for_groups
, j
+ k
);
2098 /* Skip to the last reg in this group. When j is incremented
2099 above, it will then point to the first reg of the next
2107 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2108 another mode that needs to be reloaded for the same register class CLASS.
2109 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2110 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2112 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2113 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2114 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2115 causes unnecessary failures on machines requiring alignment of register
2116 groups when the two modes are different sizes, because the larger mode has
2117 more strict alignment rules than the smaller mode. */
2120 modes_equiv_for_class_p (allocate_mode
, other_mode
, class)
2121 enum machine_mode allocate_mode
, other_mode
;
2122 enum reg_class
class;
2125 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
2127 if (TEST_HARD_REG_BIT (reg_class_contents
[(int) class], regno
)
2128 && HARD_REGNO_MODE_OK (regno
, allocate_mode
)
2129 && ! HARD_REGNO_MODE_OK (regno
, other_mode
))
2135 /* Handle the failure to find a register to spill.
2136 INSN should be one of the insns which needed this particular spill reg. */
2139 spill_failure (insn
)
2142 if (asm_noperands (PATTERN (insn
)) >= 0)
2143 error_for_asm (insn
, "`asm' needs too many reloads");
2148 /* Add a new register to the tables of available spill-registers
2149 (as well as spilling all pseudos allocated to the register).
2150 I is the index of this register in potential_reload_regs.
2151 CLASS is the regclass whose need is being satisfied.
2152 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2153 so that this register can count off against them.
2154 MAX_NONGROUPS is 0 if this register is part of a group.
2155 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2158 new_spill_reg (i
, class, max_needs
, max_nongroups
, global
, dumpfile
)
2166 register enum reg_class
*p
;
2168 int regno
= potential_reload_regs
[i
];
2170 if (i
>= FIRST_PSEUDO_REGISTER
)
2171 abort (); /* Caller failed to find any register. */
2173 if (fixed_regs
[regno
] || TEST_HARD_REG_BIT (forbidden_regs
, regno
))
2174 fatal ("fixed or forbidden register was spilled.\n\
2175 This may be due to a compiler bug or to impossible asm\n\
2176 statements or clauses.");
2178 /* Make reg REGNO an additional reload reg. */
2180 potential_reload_regs
[i
] = -1;
2181 spill_regs
[n_spills
] = regno
;
2182 spill_reg_order
[regno
] = n_spills
;
2184 fprintf (dumpfile
, "Spilling reg %d.\n", spill_regs
[n_spills
]);
2186 /* Clear off the needs we just satisfied. */
2189 p
= reg_class_superclasses
[class];
2190 while (*p
!= LIM_REG_CLASSES
)
2191 max_needs
[(int) *p
++]--;
2193 if (max_nongroups
&& max_nongroups
[class] > 0)
2195 SET_HARD_REG_BIT (counted_for_nongroups
, regno
);
2196 max_nongroups
[class]--;
2197 p
= reg_class_superclasses
[class];
2198 while (*p
!= LIM_REG_CLASSES
)
2199 max_nongroups
[(int) *p
++]--;
2202 /* Spill every pseudo reg that was allocated to this reg
2203 or to something that overlaps this reg. */
2205 val
= spill_hard_reg (spill_regs
[n_spills
], global
, dumpfile
, 0);
2207 /* If there are some registers still to eliminate and this register
2208 wasn't ever used before, additional stack space may have to be
2209 allocated to store this register. Thus, we may have changed the offset
2210 between the stack and frame pointers, so mark that something has changed.
2211 (If new pseudos were spilled, thus requiring more space, VAL would have
2212 been set non-zero by the call to spill_hard_reg above since additional
2213 reloads may be needed in that case.
2215 One might think that we need only set VAL to 1 if this is a call-used
2216 register. However, the set of registers that must be saved by the
2217 prologue is not identical to the call-used set. For example, the
2218 register used by the call insn for the return PC is a call-used register,
2219 but must be saved by the prologue. */
2220 if (num_eliminable
&& ! regs_ever_live
[spill_regs
[n_spills
]])
2223 regs_ever_live
[spill_regs
[n_spills
]] = 1;
2229 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2230 data that is dead in INSN. */
2233 delete_dead_insn (insn
)
2236 rtx prev
= prev_real_insn (insn
);
2239 /* If the previous insn sets a register that dies in our insn, delete it
2241 if (prev
&& GET_CODE (PATTERN (prev
)) == SET
2242 && (prev_dest
= SET_DEST (PATTERN (prev
)), GET_CODE (prev_dest
) == REG
)
2243 && reg_mentioned_p (prev_dest
, PATTERN (insn
))
2244 && find_regno_note (insn
, REG_DEAD
, REGNO (prev_dest
)))
2245 delete_dead_insn (prev
);
2247 PUT_CODE (insn
, NOTE
);
2248 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2249 NOTE_SOURCE_FILE (insn
) = 0;
2252 /* Modify the home of pseudo-reg I.
2253 The new home is present in reg_renumber[I].
2255 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2256 or it may be -1, meaning there is none or it is not relevant.
2257 This is used so that all pseudos spilled from a given hard reg
2258 can share one stack slot. */
2261 alter_reg (i
, from_reg
)
2265 /* When outputting an inline function, this can happen
2266 for a reg that isn't actually used. */
2267 if (regno_reg_rtx
[i
] == 0)
2270 /* If the reg got changed to a MEM at rtl-generation time,
2272 if (GET_CODE (regno_reg_rtx
[i
]) != REG
)
2275 /* Modify the reg-rtx to contain the new hard reg
2276 number or else to contain its pseudo reg number. */
2277 REGNO (regno_reg_rtx
[i
])
2278 = reg_renumber
[i
] >= 0 ? reg_renumber
[i
] : i
;
2280 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2281 allocate a stack slot for it. */
2283 if (reg_renumber
[i
] < 0
2284 && reg_n_refs
[i
] > 0
2285 && reg_equiv_constant
[i
] == 0
2286 && reg_equiv_memory_loc
[i
] == 0)
2289 int inherent_size
= PSEUDO_REGNO_BYTES (i
);
2290 int total_size
= MAX (inherent_size
, reg_max_ref_width
[i
]);
2293 /* Each pseudo reg has an inherent size which comes from its own mode,
2294 and a total size which provides room for paradoxical subregs
2295 which refer to the pseudo reg in wider modes.
2297 We can use a slot already allocated if it provides both
2298 enough inherent space and enough total space.
2299 Otherwise, we allocate a new slot, making sure that it has no less
2300 inherent space, and no less total space, then the previous slot. */
2303 /* No known place to spill from => no slot to reuse. */
2304 x
= assign_stack_local (GET_MODE (regno_reg_rtx
[i
]), total_size
, -1);
2305 #if BYTES_BIG_ENDIAN
2306 /* Cancel the big-endian correction done in assign_stack_local.
2307 Get the address of the beginning of the slot.
2308 This is so we can do a big-endian correction unconditionally
2310 adjust
= inherent_size
- total_size
;
2313 /* Reuse a stack slot if possible. */
2314 else if (spill_stack_slot
[from_reg
] != 0
2315 && spill_stack_slot_width
[from_reg
] >= total_size
2316 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot
[from_reg
]))
2318 x
= spill_stack_slot
[from_reg
];
2319 /* Allocate a bigger slot. */
2322 /* Compute maximum size needed, both for inherent size
2323 and for total size. */
2324 enum machine_mode mode
= GET_MODE (regno_reg_rtx
[i
]);
2325 if (spill_stack_slot
[from_reg
])
2327 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot
[from_reg
]))
2329 mode
= GET_MODE (spill_stack_slot
[from_reg
]);
2330 if (spill_stack_slot_width
[from_reg
] > total_size
)
2331 total_size
= spill_stack_slot_width
[from_reg
];
2333 /* Make a slot with that size. */
2334 x
= assign_stack_local (mode
, total_size
, -1);
2335 #if BYTES_BIG_ENDIAN
2336 /* Cancel the big-endian correction done in assign_stack_local.
2337 Get the address of the beginning of the slot.
2338 This is so we can do a big-endian correction unconditionally
2340 adjust
= GET_MODE_SIZE (mode
) - total_size
;
2342 spill_stack_slot
[from_reg
] = x
;
2343 spill_stack_slot_width
[from_reg
] = total_size
;
2346 #if BYTES_BIG_ENDIAN
2347 /* On a big endian machine, the "address" of the slot
2348 is the address of the low part that fits its inherent mode. */
2349 if (inherent_size
< total_size
)
2350 adjust
+= (total_size
- inherent_size
);
2351 #endif /* BYTES_BIG_ENDIAN */
2353 /* If we have any adjustment to make, or if the stack slot is the
2354 wrong mode, make a new stack slot. */
2355 if (adjust
!= 0 || GET_MODE (x
) != GET_MODE (regno_reg_rtx
[i
]))
2357 x
= gen_rtx (MEM
, GET_MODE (regno_reg_rtx
[i
]),
2358 plus_constant (XEXP (x
, 0), adjust
));
2359 RTX_UNCHANGING_P (x
) = RTX_UNCHANGING_P (regno_reg_rtx
[i
]);
2362 /* Save the stack slot for later. */
2363 reg_equiv_memory_loc
[i
] = x
;
2367 /* Mark the slots in regs_ever_live for the hard regs
2368 used by pseudo-reg number REGNO. */
2371 mark_home_live (regno
)
2374 register int i
, lim
;
2375 i
= reg_renumber
[regno
];
2378 lim
= i
+ HARD_REGNO_NREGS (i
, PSEUDO_REGNO_MODE (regno
));
2380 regs_ever_live
[i
++] = 1;
2383 /* Mark the registers used in SCRATCH as being live. */
2386 mark_scratch_live (scratch
)
2390 int regno
= REGNO (scratch
);
2391 int lim
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (scratch
));
2393 for (i
= regno
; i
< lim
; i
++)
2394 regs_ever_live
[i
] = 1;
2397 /* This function handles the tracking of elimination offsets around branches.
2399 X is a piece of RTL being scanned.
2401 INSN is the insn that it came from, if any.
2403 INITIAL_P is non-zero if we are to set the offset to be the initial
2404 offset and zero if we are setting the offset of the label to be the
2408 set_label_offsets (x
, insn
, initial_p
)
2413 enum rtx_code code
= GET_CODE (x
);
2416 struct elim_table
*p
;
2421 if (LABEL_REF_NONLOCAL_P (x
))
2426 /* ... fall through ... */
2429 /* If we know nothing about this label, set the desired offsets. Note
2430 that this sets the offset at a label to be the offset before a label
2431 if we don't know anything about the label. This is not correct for
2432 the label after a BARRIER, but is the best guess we can make. If
2433 we guessed wrong, we will suppress an elimination that might have
2434 been possible had we been able to guess correctly. */
2436 if (! offsets_known_at
[CODE_LABEL_NUMBER (x
)])
2438 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2439 offsets_at
[CODE_LABEL_NUMBER (x
)][i
]
2440 = (initial_p
? reg_eliminate
[i
].initial_offset
2441 : reg_eliminate
[i
].offset
);
2442 offsets_known_at
[CODE_LABEL_NUMBER (x
)] = 1;
2445 /* Otherwise, if this is the definition of a label and it is
2446 preceded by a BARRIER, set our offsets to the known offset of
2450 && (tem
= prev_nonnote_insn (insn
)) != 0
2451 && GET_CODE (tem
) == BARRIER
)
2453 num_not_at_initial_offset
= 0;
2454 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2456 reg_eliminate
[i
].offset
= reg_eliminate
[i
].previous_offset
2457 = offsets_at
[CODE_LABEL_NUMBER (x
)][i
];
2458 if (reg_eliminate
[i
].can_eliminate
2459 && (reg_eliminate
[i
].offset
2460 != reg_eliminate
[i
].initial_offset
))
2461 num_not_at_initial_offset
++;
2466 /* If neither of the above cases is true, compare each offset
2467 with those previously recorded and suppress any eliminations
2468 where the offsets disagree. */
2470 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2471 if (offsets_at
[CODE_LABEL_NUMBER (x
)][i
]
2472 != (initial_p
? reg_eliminate
[i
].initial_offset
2473 : reg_eliminate
[i
].offset
))
2474 reg_eliminate
[i
].can_eliminate
= 0;
2479 set_label_offsets (PATTERN (insn
), insn
, initial_p
);
2481 /* ... fall through ... */
2485 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2486 and hence must have all eliminations at their initial offsets. */
2487 for (tem
= REG_NOTES (x
); tem
; tem
= XEXP (tem
, 1))
2488 if (REG_NOTE_KIND (tem
) == REG_LABEL
)
2489 set_label_offsets (XEXP (tem
, 0), insn
, 1);
2494 /* Each of the labels in the address vector must be at their initial
2495 offsets. We want the first first for ADDR_VEC and the second
2496 field for ADDR_DIFF_VEC. */
2498 for (i
= 0; i
< XVECLEN (x
, code
== ADDR_DIFF_VEC
); i
++)
2499 set_label_offsets (XVECEXP (x
, code
== ADDR_DIFF_VEC
, i
),
2504 /* We only care about setting PC. If the source is not RETURN,
2505 IF_THEN_ELSE, or a label, disable any eliminations not at
2506 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2507 isn't one of those possibilities. For branches to a label,
2508 call ourselves recursively.
2510 Note that this can disable elimination unnecessarily when we have
2511 a non-local goto since it will look like a non-constant jump to
2512 someplace in the current function. This isn't a significant
2513 problem since such jumps will normally be when all elimination
2514 pairs are back to their initial offsets. */
2516 if (SET_DEST (x
) != pc_rtx
)
2519 switch (GET_CODE (SET_SRC (x
)))
2526 set_label_offsets (XEXP (SET_SRC (x
), 0), insn
, initial_p
);
2530 tem
= XEXP (SET_SRC (x
), 1);
2531 if (GET_CODE (tem
) == LABEL_REF
)
2532 set_label_offsets (XEXP (tem
, 0), insn
, initial_p
);
2533 else if (GET_CODE (tem
) != PC
&& GET_CODE (tem
) != RETURN
)
2536 tem
= XEXP (SET_SRC (x
), 2);
2537 if (GET_CODE (tem
) == LABEL_REF
)
2538 set_label_offsets (XEXP (tem
, 0), insn
, initial_p
);
2539 else if (GET_CODE (tem
) != PC
&& GET_CODE (tem
) != RETURN
)
2544 /* If we reach here, all eliminations must be at their initial
2545 offset because we are doing a jump to a variable address. */
2546 for (p
= reg_eliminate
; p
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; p
++)
2547 if (p
->offset
!= p
->initial_offset
)
2548 p
->can_eliminate
= 0;
2552 /* Used for communication between the next two function to properly share
2553 the vector for an ASM_OPERANDS. */
2555 static struct rtvec_def
*old_asm_operands_vec
, *new_asm_operands_vec
;
2557 /* Scan X and replace any eliminable registers (such as fp) with a
2558 replacement (such as sp), plus an offset.
2560 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2561 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2562 MEM, we are allowed to replace a sum of a register and the constant zero
2563 with the register, which we cannot do outside a MEM. In addition, we need
2564 to record the fact that a register is referenced outside a MEM.
2566 If INSN is an insn, it is the insn containing X. If we replace a REG
2567 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2568 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2569 that the REG is being modified.
2571 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2572 That's used when we eliminate in expressions stored in notes.
2573 This means, do not set ref_outside_mem even if the reference
2576 If we see a modification to a register we know about, take the
2577 appropriate action (see case SET, below).
2579 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2580 replacements done assuming all offsets are at their initial values. If
2581 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2582 encounter, return the actual location so that find_reloads will do
2583 the proper thing. */
2586 eliminate_regs (x
, mem_mode
, insn
)
2588 enum machine_mode mem_mode
;
2591 enum rtx_code code
= GET_CODE (x
);
2592 struct elim_table
*ep
;
2617 /* First handle the case where we encounter a bare register that
2618 is eliminable. Replace it with a PLUS. */
2619 if (regno
< FIRST_PSEUDO_REGISTER
)
2621 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2623 if (ep
->from_rtx
== x
&& ep
->can_eliminate
)
2626 /* Refs inside notes don't count for this purpose. */
2627 && ! (insn
!= 0 && (GET_CODE (insn
) == EXPR_LIST
2628 || GET_CODE (insn
) == INSN_LIST
)))
2629 ep
->ref_outside_mem
= 1;
2630 return plus_constant (ep
->to_rtx
, ep
->previous_offset
);
2634 else if (reg_equiv_memory_loc
&& reg_equiv_memory_loc
[regno
]
2635 && (reg_equiv_address
[regno
] || num_not_at_initial_offset
))
2637 /* In this case, find_reloads would attempt to either use an
2638 incorrect address (if something is not at its initial offset)
2639 or substitute an replaced address into an insn (which loses
2640 if the offset is changed by some later action). So we simply
2641 return the replaced stack slot (assuming it is changed by
2642 elimination) and ignore the fact that this is actually a
2643 reference to the pseudo. Ensure we make a copy of the
2644 address in case it is shared. */
2645 new = eliminate_regs (reg_equiv_memory_loc
[regno
],
2647 if (new != reg_equiv_memory_loc
[regno
])
2649 cannot_omit_stores
[regno
] = 1;
2650 return copy_rtx (new);
2656 /* If this is the sum of an eliminable register and a constant, rework
2658 if (GET_CODE (XEXP (x
, 0)) == REG
2659 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2660 && CONSTANT_P (XEXP (x
, 1)))
2662 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2664 if (ep
->from_rtx
== XEXP (x
, 0) && ep
->can_eliminate
)
2667 /* Refs inside notes don't count for this purpose. */
2668 && ! (insn
!= 0 && (GET_CODE (insn
) == EXPR_LIST
2669 || GET_CODE (insn
) == INSN_LIST
)))
2670 ep
->ref_outside_mem
= 1;
2672 /* The only time we want to replace a PLUS with a REG (this
2673 occurs when the constant operand of the PLUS is the negative
2674 of the offset) is when we are inside a MEM. We won't want
2675 to do so at other times because that would change the
2676 structure of the insn in a way that reload can't handle.
2677 We special-case the commonest situation in
2678 eliminate_regs_in_insn, so just replace a PLUS with a
2679 PLUS here, unless inside a MEM. */
2680 if (mem_mode
!= 0 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2681 && INTVAL (XEXP (x
, 1)) == - ep
->previous_offset
)
2684 return gen_rtx (PLUS
, Pmode
, ep
->to_rtx
,
2685 plus_constant (XEXP (x
, 1),
2686 ep
->previous_offset
));
2689 /* If the register is not eliminable, we are done since the other
2690 operand is a constant. */
2694 /* If this is part of an address, we want to bring any constant to the
2695 outermost PLUS. We will do this by doing register replacement in
2696 our operands and seeing if a constant shows up in one of them.
2698 We assume here this is part of an address (or a "load address" insn)
2699 since an eliminable register is not likely to appear in any other
2702 If we have (plus (eliminable) (reg)), we want to produce
2703 (plus (plus (replacement) (reg) (const))). If this was part of a
2704 normal add insn, (plus (replacement) (reg)) will be pushed as a
2705 reload. This is the desired action. */
2708 rtx new0
= eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2709 rtx new1
= eliminate_regs (XEXP (x
, 1), mem_mode
, insn
);
2711 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
2713 /* If one side is a PLUS and the other side is a pseudo that
2714 didn't get a hard register but has a reg_equiv_constant,
2715 we must replace the constant here since it may no longer
2716 be in the position of any operand. */
2717 if (GET_CODE (new0
) == PLUS
&& GET_CODE (new1
) == REG
2718 && REGNO (new1
) >= FIRST_PSEUDO_REGISTER
2719 && reg_renumber
[REGNO (new1
)] < 0
2720 && reg_equiv_constant
!= 0
2721 && reg_equiv_constant
[REGNO (new1
)] != 0)
2722 new1
= reg_equiv_constant
[REGNO (new1
)];
2723 else if (GET_CODE (new1
) == PLUS
&& GET_CODE (new0
) == REG
2724 && REGNO (new0
) >= FIRST_PSEUDO_REGISTER
2725 && reg_renumber
[REGNO (new0
)] < 0
2726 && reg_equiv_constant
[REGNO (new0
)] != 0)
2727 new0
= reg_equiv_constant
[REGNO (new0
)];
2729 new = form_sum (new0
, new1
);
2731 /* As above, if we are not inside a MEM we do not want to
2732 turn a PLUS into something else. We might try to do so here
2733 for an addition of 0 if we aren't optimizing. */
2734 if (! mem_mode
&& GET_CODE (new) != PLUS
)
2735 return gen_rtx (PLUS
, GET_MODE (x
), new, const0_rtx
);
2743 /* If this is the product of an eliminable register and a
2744 constant, apply the distribute law and move the constant out
2745 so that we have (plus (mult ..) ..). This is needed in order
2746 to keep load-address insns valid. This case is pathalogical.
2747 We ignore the possibility of overflow here. */
2748 if (GET_CODE (XEXP (x
, 0)) == REG
2749 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2750 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2751 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2753 if (ep
->from_rtx
== XEXP (x
, 0) && ep
->can_eliminate
)
2756 /* Refs inside notes don't count for this purpose. */
2757 && ! (insn
!= 0 && (GET_CODE (insn
) == EXPR_LIST
2758 || GET_CODE (insn
) == INSN_LIST
)))
2759 ep
->ref_outside_mem
= 1;
2762 plus_constant (gen_rtx (MULT
, Pmode
, ep
->to_rtx
, XEXP (x
, 1)),
2763 ep
->previous_offset
* INTVAL (XEXP (x
, 1)));
2766 /* ... fall through ... */
2771 case DIV
: case UDIV
:
2772 case MOD
: case UMOD
:
2773 case AND
: case IOR
: case XOR
:
2774 case ROTATERT
: case ROTATE
:
2775 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
2777 case GE
: case GT
: case GEU
: case GTU
:
2778 case LE
: case LT
: case LEU
: case LTU
:
2780 rtx new0
= eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2782 = XEXP (x
, 1) ? eliminate_regs (XEXP (x
, 1), mem_mode
, insn
) : 0;
2784 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
2785 return gen_rtx (code
, GET_MODE (x
), new0
, new1
);
2790 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2793 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2794 if (new != XEXP (x
, 0))
2795 x
= gen_rtx (EXPR_LIST
, REG_NOTE_KIND (x
), new, XEXP (x
, 1));
2798 /* ... fall through ... */
2801 /* Now do eliminations in the rest of the chain. If this was
2802 an EXPR_LIST, this might result in allocating more memory than is
2803 strictly needed, but it simplifies the code. */
2806 new = eliminate_regs (XEXP (x
, 1), mem_mode
, insn
);
2807 if (new != XEXP (x
, 1))
2808 return gen_rtx (GET_CODE (x
), GET_MODE (x
), XEXP (x
, 0), new);
2816 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
2817 if (ep
->to_rtx
== XEXP (x
, 0))
2819 int size
= GET_MODE_SIZE (mem_mode
);
2821 /* If more bytes than MEM_MODE are pushed, account for them. */
2822 #ifdef PUSH_ROUNDING
2823 if (ep
->to_rtx
== stack_pointer_rtx
)
2824 size
= PUSH_ROUNDING (size
);
2826 if (code
== PRE_DEC
|| code
== POST_DEC
)
2832 /* Fall through to generic unary operation case. */
2834 case STRICT_LOW_PART
:
2836 case SIGN_EXTEND
: case ZERO_EXTEND
:
2837 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
2838 case FLOAT
: case FIX
:
2839 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
2843 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2844 if (new != XEXP (x
, 0))
2845 return gen_rtx (code
, GET_MODE (x
), new);
2849 /* Similar to above processing, but preserve SUBREG_WORD.
2850 Convert (subreg (mem)) to (mem) if not paradoxical.
2851 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2852 pseudo didn't get a hard reg, we must replace this with the
2853 eliminated version of the memory location because push_reloads
2854 may do the replacement in certain circumstances. */
2855 if (GET_CODE (SUBREG_REG (x
)) == REG
2856 && (GET_MODE_SIZE (GET_MODE (x
))
2857 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
2858 && reg_equiv_memory_loc
!= 0
2859 && reg_equiv_memory_loc
[REGNO (SUBREG_REG (x
))] != 0)
2861 new = eliminate_regs (reg_equiv_memory_loc
[REGNO (SUBREG_REG (x
))],
2864 /* If we didn't change anything, we must retain the pseudo. */
2865 if (new == reg_equiv_memory_loc
[REGNO (SUBREG_REG (x
))])
2868 /* Otherwise, ensure NEW isn't shared in case we have to reload
2870 new = copy_rtx (new);
2873 new = eliminate_regs (SUBREG_REG (x
), mem_mode
, insn
);
2875 if (new != XEXP (x
, 0))
2877 if (GET_CODE (new) == MEM
2878 && (GET_MODE_SIZE (GET_MODE (x
))
2879 <= GET_MODE_SIZE (GET_MODE (new)))
2880 #ifdef LOAD_EXTEND_OP
2881 /* On these machines we will be reloading what is
2882 inside the SUBREG if it originally was a pseudo and
2883 the inner and outer modes are both a word or
2884 smaller. So leave the SUBREG then. */
2885 && ! (GET_CODE (SUBREG_REG (x
)) == REG
2886 && GET_MODE_SIZE (GET_MODE (x
)) <= UNITS_PER_WORD
2887 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD
)
2891 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
2892 enum machine_mode mode
= GET_MODE (x
);
2894 #if BYTES_BIG_ENDIAN
2895 offset
+= (MIN (UNITS_PER_WORD
,
2896 GET_MODE_SIZE (GET_MODE (new)))
2897 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
2900 PUT_MODE (new, mode
);
2901 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset
);
2905 return gen_rtx (SUBREG
, GET_MODE (x
), new, SUBREG_WORD (x
));
2911 /* If clobbering a register that is the replacement register for an
2912 elimination we still think can be performed, note that it cannot
2913 be performed. Otherwise, we need not be concerned about it. */
2914 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
2915 if (ep
->to_rtx
== XEXP (x
, 0))
2916 ep
->can_eliminate
= 0;
2918 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2919 if (new != XEXP (x
, 0))
2920 return gen_rtx (code
, GET_MODE (x
), new);
2926 /* Properly handle sharing input and constraint vectors. */
2927 if (ASM_OPERANDS_INPUT_VEC (x
) != old_asm_operands_vec
)
2929 /* When we come to a new vector not seen before,
2930 scan all its elements; keep the old vector if none
2931 of them changes; otherwise, make a copy. */
2932 old_asm_operands_vec
= ASM_OPERANDS_INPUT_VEC (x
);
2933 temp_vec
= (rtx
*) alloca (XVECLEN (x
, 3) * sizeof (rtx
));
2934 for (i
= 0; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
2935 temp_vec
[i
] = eliminate_regs (ASM_OPERANDS_INPUT (x
, i
),
2938 for (i
= 0; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
2939 if (temp_vec
[i
] != ASM_OPERANDS_INPUT (x
, i
))
2942 if (i
== ASM_OPERANDS_INPUT_LENGTH (x
))
2943 new_asm_operands_vec
= old_asm_operands_vec
;
2945 new_asm_operands_vec
2946 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x
), temp_vec
);
2949 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2950 if (new_asm_operands_vec
== old_asm_operands_vec
)
2953 new = gen_rtx (ASM_OPERANDS
, VOIDmode
, ASM_OPERANDS_TEMPLATE (x
),
2954 ASM_OPERANDS_OUTPUT_CONSTRAINT (x
),
2955 ASM_OPERANDS_OUTPUT_IDX (x
), new_asm_operands_vec
,
2956 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x
),
2957 ASM_OPERANDS_SOURCE_FILE (x
),
2958 ASM_OPERANDS_SOURCE_LINE (x
));
2959 new->volatil
= x
->volatil
;
2964 /* Check for setting a register that we know about. */
2965 if (GET_CODE (SET_DEST (x
)) == REG
)
2967 /* See if this is setting the replacement register for an
2970 If DEST is the hard frame pointer, we do nothing because we
2971 assume that all assignments to the frame pointer are for
2972 non-local gotos and are being done at a time when they are valid
2973 and do not disturb anything else. Some machines want to
2974 eliminate a fake argument pointer (or even a fake frame pointer)
2975 with either the real frame or the stack pointer. Assignments to
2976 the hard frame pointer must not prevent this elimination. */
2978 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2980 if (ep
->to_rtx
== SET_DEST (x
)
2981 && SET_DEST (x
) != hard_frame_pointer_rtx
)
2983 /* If it is being incremented, adjust the offset. Otherwise,
2984 this elimination can't be done. */
2985 rtx src
= SET_SRC (x
);
2987 if (GET_CODE (src
) == PLUS
2988 && XEXP (src
, 0) == SET_DEST (x
)
2989 && GET_CODE (XEXP (src
, 1)) == CONST_INT
)
2990 ep
->offset
-= INTVAL (XEXP (src
, 1));
2992 ep
->can_eliminate
= 0;
2995 /* Now check to see we are assigning to a register that can be
2996 eliminated. If so, it must be as part of a PARALLEL, since we
2997 will not have been called if this is a single SET. So indicate
2998 that we can no longer eliminate this reg. */
2999 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3001 if (ep
->from_rtx
== SET_DEST (x
) && ep
->can_eliminate
)
3002 ep
->can_eliminate
= 0;
3005 /* Now avoid the loop below in this common case. */
3007 rtx new0
= eliminate_regs (SET_DEST (x
), 0, insn
);
3008 rtx new1
= eliminate_regs (SET_SRC (x
), 0, insn
);
3010 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3011 write a CLOBBER insn. */
3012 if (GET_CODE (SET_DEST (x
)) == REG
&& GET_CODE (new0
) == MEM
3013 && insn
!= 0 && GET_CODE (insn
) != EXPR_LIST
3014 && GET_CODE (insn
) != INSN_LIST
)
3015 emit_insn_after (gen_rtx (CLOBBER
, VOIDmode
, SET_DEST (x
)), insn
);
3017 if (new0
!= SET_DEST (x
) || new1
!= SET_SRC (x
))
3018 return gen_rtx (SET
, VOIDmode
, new0
, new1
);
3024 /* Our only special processing is to pass the mode of the MEM to our
3025 recursive call and copy the flags. While we are here, handle this
3026 case more efficiently. */
3027 new = eliminate_regs (XEXP (x
, 0), GET_MODE (x
), insn
);
3028 if (new != XEXP (x
, 0))
3030 new = gen_rtx (MEM
, GET_MODE (x
), new);
3031 new->volatil
= x
->volatil
;
3032 new->unchanging
= x
->unchanging
;
3033 new->in_struct
= x
->in_struct
;
3040 /* Process each of our operands recursively. If any have changed, make a
3042 fmt
= GET_RTX_FORMAT (code
);
3043 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3047 new = eliminate_regs (XEXP (x
, i
), mem_mode
, insn
);
3048 if (new != XEXP (x
, i
) && ! copied
)
3050 rtx new_x
= rtx_alloc (code
);
3051 bcopy (x
, new_x
, (sizeof (*new_x
) - sizeof (new_x
->fld
)
3052 + (sizeof (new_x
->fld
[0])
3053 * GET_RTX_LENGTH (code
))));
3059 else if (*fmt
== 'E')
3062 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3064 new = eliminate_regs (XVECEXP (x
, i
, j
), mem_mode
, insn
);
3065 if (new != XVECEXP (x
, i
, j
) && ! copied_vec
)
3067 rtvec new_v
= gen_rtvec_v (XVECLEN (x
, i
),
3068 &XVECEXP (x
, i
, 0));
3071 rtx new_x
= rtx_alloc (code
);
3072 bcopy (x
, new_x
, (sizeof (*new_x
) - sizeof (new_x
->fld
)
3073 + (sizeof (new_x
->fld
[0])
3074 * GET_RTX_LENGTH (code
))));
3078 XVEC (x
, i
) = new_v
;
3081 XVECEXP (x
, i
, j
) = new;
3089 /* Scan INSN and eliminate all eliminable registers in it.
3091 If REPLACE is nonzero, do the replacement destructively. Also
3092 delete the insn as dead it if it is setting an eliminable register.
3094 If REPLACE is zero, do all our allocations in reload_obstack.
3096 If no eliminations were done and this insn doesn't require any elimination
3097 processing (these are not identical conditions: it might be updating sp,
3098 but not referencing fp; this needs to be seen during reload_as_needed so
3099 that the offset between fp and sp can be taken into consideration), zero
3100 is returned. Otherwise, 1 is returned. */
3103 eliminate_regs_in_insn (insn
, replace
)
3107 rtx old_body
= PATTERN (insn
);
3110 struct elim_table
*ep
;
3113 push_obstacks (&reload_obstack
, &reload_obstack
);
3115 if (GET_CODE (old_body
) == SET
&& GET_CODE (SET_DEST (old_body
)) == REG
3116 && REGNO (SET_DEST (old_body
)) < FIRST_PSEUDO_REGISTER
)
3118 /* Check for setting an eliminable register. */
3119 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3120 if (ep
->from_rtx
== SET_DEST (old_body
) && ep
->can_eliminate
)
3122 /* In this case this insn isn't serving a useful purpose. We
3123 will delete it in reload_as_needed once we know that this
3124 elimination is, in fact, being done.
3126 If REPLACE isn't set, we can't delete this insn, but neededn't
3127 process it since it won't be used unless something changes. */
3129 delete_dead_insn (insn
);
3134 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3135 in the insn is the negative of the offset in FROM. Substitute
3136 (set (reg) (reg to)) for the insn and change its code.
3138 We have to do this here, rather than in eliminate_regs, do that we can
3139 change the insn code. */
3141 if (GET_CODE (SET_SRC (old_body
)) == PLUS
3142 && GET_CODE (XEXP (SET_SRC (old_body
), 0)) == REG
3143 && GET_CODE (XEXP (SET_SRC (old_body
), 1)) == CONST_INT
)
3144 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3146 if (ep
->from_rtx
== XEXP (SET_SRC (old_body
), 0)
3147 && ep
->can_eliminate
)
3149 /* We must stop at the first elimination that will be used.
3150 If this one would replace the PLUS with a REG, do it
3151 now. Otherwise, quit the loop and let eliminate_regs
3152 do its normal replacement. */
3153 if (ep
->offset
== - INTVAL (XEXP (SET_SRC (old_body
), 1)))
3155 PATTERN (insn
) = gen_rtx (SET
, VOIDmode
,
3156 SET_DEST (old_body
), ep
->to_rtx
);
3157 INSN_CODE (insn
) = -1;
3166 old_asm_operands_vec
= 0;
3168 /* Replace the body of this insn with a substituted form. If we changed
3169 something, return non-zero.
3171 If we are replacing a body that was a (set X (plus Y Z)), try to
3172 re-recognize the insn. We do this in case we had a simple addition
3173 but now can do this as a load-address. This saves an insn in this
3176 new_body
= eliminate_regs (old_body
, 0, replace
? insn
: NULL_RTX
);
3177 if (new_body
!= old_body
)
3179 /* If we aren't replacing things permanently and we changed something,
3180 make another copy to ensure that all the RTL is new. Otherwise
3181 things can go wrong if find_reload swaps commutative operands
3182 and one is inside RTL that has been copied while the other is not. */
3184 /* Don't copy an asm_operands because (1) there's no need and (2)
3185 copy_rtx can't do it properly when there are multiple outputs. */
3186 if (! replace
&& asm_noperands (old_body
) < 0)
3187 new_body
= copy_rtx (new_body
);
3189 /* If we had a move insn but now we don't, rerecognize it. */
3190 if ((GET_CODE (old_body
) == SET
&& GET_CODE (SET_SRC (old_body
)) == REG
3191 && (GET_CODE (new_body
) != SET
3192 || GET_CODE (SET_SRC (new_body
)) != REG
))
3193 /* If this was a load from or store to memory, compare
3194 the MEM in recog_operand to the one in the insn. If they
3195 are not equal, then rerecognize the insn. */
3196 || (GET_CODE (old_body
) == SET
3197 && ((GET_CODE (SET_SRC (old_body
)) == MEM
3198 && SET_SRC (old_body
) != recog_operand
[1])
3199 || (GET_CODE (SET_DEST (old_body
)) == MEM
3200 && SET_DEST (old_body
) != recog_operand
[0])))
3201 /* If this was an add insn before, rerecognize. */
3203 (GET_CODE (old_body
) == SET
3204 && GET_CODE (SET_SRC (old_body
)) == PLUS
))
3206 if (! validate_change (insn
, &PATTERN (insn
), new_body
, 0))
3207 /* If recognition fails, store the new body anyway.
3208 It's normal to have recognition failures here
3209 due to bizarre memory addresses; reloading will fix them. */
3210 PATTERN (insn
) = new_body
;
3213 PATTERN (insn
) = new_body
;
3218 /* Loop through all elimination pairs. See if any have changed and
3219 recalculate the number not at initial offset.
3221 Compute the maximum offset (minimum offset if the stack does not
3222 grow downward) for each elimination pair.
3224 We also detect a cases where register elimination cannot be done,
3225 namely, if a register would be both changed and referenced outside a MEM
3226 in the resulting insn since such an insn is often undefined and, even if
3227 not, we cannot know what meaning will be given to it. Note that it is
3228 valid to have a register used in an address in an insn that changes it
3229 (presumably with a pre- or post-increment or decrement).
3231 If anything changes, return nonzero. */
3233 num_not_at_initial_offset
= 0;
3234 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3236 if (ep
->previous_offset
!= ep
->offset
&& ep
->ref_outside_mem
)
3237 ep
->can_eliminate
= 0;
3239 ep
->ref_outside_mem
= 0;
3241 if (ep
->previous_offset
!= ep
->offset
)
3244 ep
->previous_offset
= ep
->offset
;
3245 if (ep
->can_eliminate
&& ep
->offset
!= ep
->initial_offset
)
3246 num_not_at_initial_offset
++;
3248 #ifdef STACK_GROWS_DOWNWARD
3249 ep
->max_offset
= MAX (ep
->max_offset
, ep
->offset
);
3251 ep
->max_offset
= MIN (ep
->max_offset
, ep
->offset
);
3256 /* If we changed something, perform elmination in REG_NOTES. This is
3257 needed even when REPLACE is zero because a REG_DEAD note might refer
3258 to a register that we eliminate and could cause a different number
3259 of spill registers to be needed in the final reload pass than in
3261 if (val
&& REG_NOTES (insn
) != 0)
3262 REG_NOTES (insn
) = eliminate_regs (REG_NOTES (insn
), 0, REG_NOTES (insn
));
3270 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3271 replacement we currently believe is valid, mark it as not eliminable if X
3272 modifies DEST in any way other than by adding a constant integer to it.
3274 If DEST is the frame pointer, we do nothing because we assume that
3275 all assignments to the hard frame pointer are nonlocal gotos and are being
3276 done at a time when they are valid and do not disturb anything else.
3277 Some machines want to eliminate a fake argument pointer with either the
3278 frame or stack pointer. Assignments to the hard frame pointer must not
3279 prevent this elimination.
3281 Called via note_stores from reload before starting its passes to scan
3282 the insns of the function. */
3285 mark_not_eliminable (dest
, x
)
3291 /* A SUBREG of a hard register here is just changing its mode. We should
3292 not see a SUBREG of an eliminable hard register, but check just in
3294 if (GET_CODE (dest
) == SUBREG
)
3295 dest
= SUBREG_REG (dest
);
3297 if (dest
== hard_frame_pointer_rtx
)
3300 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
3301 if (reg_eliminate
[i
].can_eliminate
&& dest
== reg_eliminate
[i
].to_rtx
3302 && (GET_CODE (x
) != SET
3303 || GET_CODE (SET_SRC (x
)) != PLUS
3304 || XEXP (SET_SRC (x
), 0) != dest
3305 || GET_CODE (XEXP (SET_SRC (x
), 1)) != CONST_INT
))
3307 reg_eliminate
[i
].can_eliminate_previous
3308 = reg_eliminate
[i
].can_eliminate
= 0;
3313 /* Kick all pseudos out of hard register REGNO.
3314 If GLOBAL is nonzero, try to find someplace else to put them.
3315 If DUMPFILE is nonzero, log actions taken on that file.
3317 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3318 because we found we can't eliminate some register. In the case, no pseudos
3319 are allowed to be in the register, even if they are only in a block that
3320 doesn't require spill registers, unlike the case when we are spilling this
3321 hard reg to produce another spill register.
3323 Return nonzero if any pseudos needed to be kicked out. */
3326 spill_hard_reg (regno
, global
, dumpfile
, cant_eliminate
)
3332 enum reg_class
class = REGNO_REG_CLASS (regno
);
3333 int something_changed
= 0;
3336 SET_HARD_REG_BIT (forbidden_regs
, regno
);
3338 /* Spill every pseudo reg that was allocated to this reg
3339 or to something that overlaps this reg. */
3341 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
3342 if (reg_renumber
[i
] >= 0
3343 && reg_renumber
[i
] <= regno
3345 + HARD_REGNO_NREGS (reg_renumber
[i
],
3346 PSEUDO_REGNO_MODE (i
))
3349 /* If this register belongs solely to a basic block which needed no
3350 spilling of any class that this register is contained in,
3351 leave it be, unless we are spilling this register because
3352 it was a hard register that can't be eliminated. */
3354 if (! cant_eliminate
3355 && basic_block_needs
[0]
3356 && reg_basic_block
[i
] >= 0
3357 && basic_block_needs
[(int) class][reg_basic_block
[i
]] == 0)
3361 for (p
= reg_class_superclasses
[(int) class];
3362 *p
!= LIM_REG_CLASSES
; p
++)
3363 if (basic_block_needs
[(int) *p
][reg_basic_block
[i
]] > 0)
3366 if (*p
== LIM_REG_CLASSES
)
3370 /* Mark it as no longer having a hard register home. */
3371 reg_renumber
[i
] = -1;
3372 /* We will need to scan everything again. */
3373 something_changed
= 1;
3375 retry_global_alloc (i
, forbidden_regs
);
3377 alter_reg (i
, regno
);
3380 if (reg_renumber
[i
] == -1)
3381 fprintf (dumpfile
, " Register %d now on stack.\n\n", i
);
3383 fprintf (dumpfile
, " Register %d now in %d.\n\n",
3384 i
, reg_renumber
[i
]);
3387 for (i
= 0; i
< scratch_list_length
; i
++)
3389 if (scratch_list
[i
] && REGNO (scratch_list
[i
]) == regno
)
3391 if (! cant_eliminate
&& basic_block_needs
[0]
3392 && ! basic_block_needs
[(int) class][scratch_block
[i
]])
3396 for (p
= reg_class_superclasses
[(int) class];
3397 *p
!= LIM_REG_CLASSES
; p
++)
3398 if (basic_block_needs
[(int) *p
][scratch_block
[i
]] > 0)
3401 if (*p
== LIM_REG_CLASSES
)
3404 PUT_CODE (scratch_list
[i
], SCRATCH
);
3405 scratch_list
[i
] = 0;
3406 something_changed
= 1;
3411 return something_changed
;
3414 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3415 Also mark any hard registers used to store user variables as
3416 forbidden from being used for spill registers. */
3419 scan_paradoxical_subregs (x
)
3424 register enum rtx_code code
= GET_CODE (x
);
3429 #ifdef SMALL_REGISTER_CLASSES
3430 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
&& REG_USERVAR_P (x
))
3431 SET_HARD_REG_BIT (forbidden_regs
, REGNO (x
));
3447 if (GET_CODE (SUBREG_REG (x
)) == REG
3448 && GET_MODE_SIZE (GET_MODE (x
)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
3449 reg_max_ref_width
[REGNO (SUBREG_REG (x
))]
3450 = GET_MODE_SIZE (GET_MODE (x
));
3454 fmt
= GET_RTX_FORMAT (code
);
3455 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3458 scan_paradoxical_subregs (XEXP (x
, i
));
3459 else if (fmt
[i
] == 'E')
3462 for (j
= XVECLEN (x
, i
) - 1; j
>=0; j
--)
3463 scan_paradoxical_subregs (XVECEXP (x
, i
, j
));
3469 hard_reg_use_compare (p1
, p2
)
3470 struct hard_reg_n_uses
*p1
, *p2
;
3472 int tem
= p1
->uses
- p2
->uses
;
3473 if (tem
!= 0) return tem
;
3474 /* If regs are equally good, sort by regno,
3475 so that the results of qsort leave nothing to chance. */
3476 return p1
->regno
- p2
->regno
;
3479 /* Choose the order to consider regs for use as reload registers
3480 based on how much trouble would be caused by spilling one.
3481 Store them in order of decreasing preference in potential_reload_regs. */
3484 order_regs_for_reload ()
3490 struct hard_reg_n_uses hard_reg_n_uses
[FIRST_PSEUDO_REGISTER
];
3492 CLEAR_HARD_REG_SET (bad_spill_regs
);
3494 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3495 potential_reload_regs
[i
] = -1;
3497 /* Count number of uses of each hard reg by pseudo regs allocated to it
3498 and then order them by decreasing use. */
3500 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3502 hard_reg_n_uses
[i
].uses
= 0;
3503 hard_reg_n_uses
[i
].regno
= i
;
3506 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
3508 int regno
= reg_renumber
[i
];
3511 int lim
= regno
+ HARD_REGNO_NREGS (regno
, PSEUDO_REGNO_MODE (i
));
3513 hard_reg_n_uses
[regno
++].uses
+= reg_n_refs
[i
];
3515 large
+= reg_n_refs
[i
];
3518 /* Now fixed registers (which cannot safely be used for reloading)
3519 get a very high use count so they will be considered least desirable.
3520 Registers used explicitly in the rtl code are almost as bad. */
3522 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3526 hard_reg_n_uses
[i
].uses
+= 2 * large
+ 2;
3527 SET_HARD_REG_BIT (bad_spill_regs
, i
);
3529 else if (regs_explicitly_used
[i
])
3531 hard_reg_n_uses
[i
].uses
+= large
+ 1;
3532 #ifndef SMALL_REGISTER_CLASSES
3533 /* ??? We are doing this here because of the potential that
3534 bad code may be generated if a register explicitly used in
3535 an insn was used as a spill register for that insn. But
3536 not using these are spill registers may lose on some machine.
3537 We'll have to see how this works out. */
3538 SET_HARD_REG_BIT (bad_spill_regs
, i
);
3542 hard_reg_n_uses
[HARD_FRAME_POINTER_REGNUM
].uses
+= 2 * large
+ 2;
3543 SET_HARD_REG_BIT (bad_spill_regs
, HARD_FRAME_POINTER_REGNUM
);
3545 #ifdef ELIMINABLE_REGS
3546 /* If registers other than the frame pointer are eliminable, mark them as
3548 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
3550 hard_reg_n_uses
[reg_eliminate
[i
].from
].uses
+= 2 * large
+ 2;
3551 SET_HARD_REG_BIT (bad_spill_regs
, reg_eliminate
[i
].from
);
3555 /* Prefer registers not so far used, for use in temporary loading.
3556 Among them, if REG_ALLOC_ORDER is defined, use that order.
3557 Otherwise, prefer registers not preserved by calls. */
3559 #ifdef REG_ALLOC_ORDER
3560 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3562 int regno
= reg_alloc_order
[i
];
3564 if (hard_reg_n_uses
[regno
].uses
== 0)
3565 potential_reload_regs
[o
++] = regno
;
3568 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3570 if (hard_reg_n_uses
[i
].uses
== 0 && call_used_regs
[i
])
3571 potential_reload_regs
[o
++] = i
;
3573 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3575 if (hard_reg_n_uses
[i
].uses
== 0 && ! call_used_regs
[i
])
3576 potential_reload_regs
[o
++] = i
;
3580 qsort (hard_reg_n_uses
, FIRST_PSEUDO_REGISTER
,
3581 sizeof hard_reg_n_uses
[0], hard_reg_use_compare
);
3583 /* Now add the regs that are already used,
3584 preferring those used less often. The fixed and otherwise forbidden
3585 registers will be at the end of this list. */
3587 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3588 if (hard_reg_n_uses
[i
].uses
!= 0)
3589 potential_reload_regs
[o
++] = hard_reg_n_uses
[i
].regno
;
3592 /* Used in reload_as_needed to sort the spilled regs. */
3594 compare_spill_regs (r1
, r2
)
3597 return *r1
< *r2
? -1: 1;
3600 /* Reload pseudo-registers into hard regs around each insn as needed.
3601 Additional register load insns are output before the insn that needs it
3602 and perhaps store insns after insns that modify the reloaded pseudo reg.
3604 reg_last_reload_reg and reg_reloaded_contents keep track of
3605 which registers are already available in reload registers.
3606 We update these for the reloads that we perform,
3607 as the insns are scanned. */
3610 reload_as_needed (first
, live_known
)
3620 bzero (spill_reg_rtx
, sizeof spill_reg_rtx
);
3621 reg_last_reload_reg
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
3622 bzero (reg_last_reload_reg
, max_regno
* sizeof (rtx
));
3623 reg_has_output_reload
= (char *) alloca (max_regno
);
3624 for (i
= 0; i
< n_spills
; i
++)
3626 reg_reloaded_contents
[i
] = -1;
3627 reg_reloaded_insn
[i
] = 0;
3630 /* Reset all offsets on eliminable registers to their initial values. */
3631 #ifdef ELIMINABLE_REGS
3632 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
3634 INITIAL_ELIMINATION_OFFSET (reg_eliminate
[i
].from
, reg_eliminate
[i
].to
,
3635 reg_eliminate
[i
].initial_offset
);
3636 reg_eliminate
[i
].previous_offset
3637 = reg_eliminate
[i
].offset
= reg_eliminate
[i
].initial_offset
;
3640 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate
[0].initial_offset
);
3641 reg_eliminate
[0].previous_offset
3642 = reg_eliminate
[0].offset
= reg_eliminate
[0].initial_offset
;
3645 num_not_at_initial_offset
= 0;
3647 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3648 pack registers with group needs. */
3651 qsort (spill_regs
, n_spills
, sizeof (short), compare_spill_regs
);
3652 for (i
= 0; i
< n_spills
; i
++)
3653 spill_reg_order
[spill_regs
[i
]] = i
;
3656 for (insn
= first
; insn
;)
3658 register rtx next
= NEXT_INSN (insn
);
3660 /* Notice when we move to a new basic block. */
3661 if (live_known
&& this_block
+ 1 < n_basic_blocks
3662 && insn
== basic_block_head
[this_block
+1])
3665 /* If we pass a label, copy the offsets from the label information
3666 into the current offsets of each elimination. */
3667 if (GET_CODE (insn
) == CODE_LABEL
)
3669 num_not_at_initial_offset
= 0;
3670 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
3672 reg_eliminate
[i
].offset
= reg_eliminate
[i
].previous_offset
3673 = offsets_at
[CODE_LABEL_NUMBER (insn
)][i
];
3674 if (reg_eliminate
[i
].can_eliminate
3675 && (reg_eliminate
[i
].offset
3676 != reg_eliminate
[i
].initial_offset
))
3677 num_not_at_initial_offset
++;
3681 else if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
3683 rtx avoid_return_reg
= 0;
3685 #ifdef SMALL_REGISTER_CLASSES
3686 /* Set avoid_return_reg if this is an insn
3687 that might use the value of a function call. */
3688 if (GET_CODE (insn
) == CALL_INSN
)
3690 if (GET_CODE (PATTERN (insn
)) == SET
)
3691 after_call
= SET_DEST (PATTERN (insn
));
3692 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
3693 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
3694 after_call
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 0));
3698 else if (after_call
!= 0
3699 && !(GET_CODE (PATTERN (insn
)) == SET
3700 && SET_DEST (PATTERN (insn
)) == stack_pointer_rtx
))
3702 if (reg_referenced_p (after_call
, PATTERN (insn
)))
3703 avoid_return_reg
= after_call
;
3706 #endif /* SMALL_REGISTER_CLASSES */
3708 /* If this is a USE and CLOBBER of a MEM, ensure that any
3709 references to eliminable registers have been removed. */
3711 if ((GET_CODE (PATTERN (insn
)) == USE
3712 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
3713 && GET_CODE (XEXP (PATTERN (insn
), 0)) == MEM
)
3714 XEXP (XEXP (PATTERN (insn
), 0), 0)
3715 = eliminate_regs (XEXP (XEXP (PATTERN (insn
), 0), 0),
3716 GET_MODE (XEXP (PATTERN (insn
), 0)), NULL_RTX
);
3718 /* If we need to do register elimination processing, do so.
3719 This might delete the insn, in which case we are done. */
3720 if (num_eliminable
&& GET_MODE (insn
) == QImode
)
3722 eliminate_regs_in_insn (insn
, 1);
3723 if (GET_CODE (insn
) == NOTE
)
3730 if (GET_MODE (insn
) == VOIDmode
)
3732 /* First find the pseudo regs that must be reloaded for this insn.
3733 This info is returned in the tables reload_... (see reload.h).
3734 Also modify the body of INSN by substituting RELOAD
3735 rtx's for those pseudo regs. */
3738 bzero (reg_has_output_reload
, max_regno
);
3739 CLEAR_HARD_REG_SET (reg_is_output_reload
);
3741 find_reloads (insn
, 1, spill_indirect_levels
, live_known
,
3747 rtx prev
= PREV_INSN (insn
), next
= NEXT_INSN (insn
);
3751 /* If this block has not had spilling done for a
3752 particular clas and we have any non-optionals that need a
3753 spill reg in that class, abort. */
3755 for (class = 0; class < N_REG_CLASSES
; class++)
3756 if (basic_block_needs
[class] != 0
3757 && basic_block_needs
[class][this_block
] == 0)
3758 for (i
= 0; i
< n_reloads
; i
++)
3759 if (class == (int) reload_reg_class
[i
]
3760 && reload_reg_rtx
[i
] == 0
3761 && ! reload_optional
[i
]
3762 && (reload_in
[i
] != 0 || reload_out
[i
] != 0
3763 || reload_secondary_p
[i
] != 0))
3766 /* Now compute which reload regs to reload them into. Perhaps
3767 reusing reload regs from previous insns, or else output
3768 load insns to reload them. Maybe output store insns too.
3769 Record the choices of reload reg in reload_reg_rtx. */
3770 choose_reload_regs (insn
, avoid_return_reg
);
3772 #ifdef SMALL_REGISTER_CLASSES
3773 /* Merge any reloads that we didn't combine for fear of
3774 increasing the number of spill registers needed but now
3775 discover can be safely merged. */
3776 merge_assigned_reloads (insn
);
3779 /* Generate the insns to reload operands into or out of
3780 their reload regs. */
3781 emit_reload_insns (insn
);
3783 /* Substitute the chosen reload regs from reload_reg_rtx
3784 into the insn's body (or perhaps into the bodies of other
3785 load and store insn that we just made for reloading
3786 and that we moved the structure into). */
3789 /* If this was an ASM, make sure that all the reload insns
3790 we have generated are valid. If not, give an error
3793 if (asm_noperands (PATTERN (insn
)) >= 0)
3794 for (p
= NEXT_INSN (prev
); p
!= next
; p
= NEXT_INSN (p
))
3795 if (p
!= insn
&& GET_RTX_CLASS (GET_CODE (p
)) == 'i'
3796 && (recog_memoized (p
) < 0
3797 || (insn_extract (p
),
3798 ! constrain_operands (INSN_CODE (p
), 1))))
3800 error_for_asm (insn
,
3801 "`asm' operand requires impossible reload");
3803 NOTE_SOURCE_FILE (p
) = 0;
3804 NOTE_LINE_NUMBER (p
) = NOTE_INSN_DELETED
;
3807 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3808 is no longer validly lying around to save a future reload.
3809 Note that this does not detect pseudos that were reloaded
3810 for this insn in order to be stored in
3811 (obeying register constraints). That is correct; such reload
3812 registers ARE still valid. */
3813 note_stores (PATTERN (insn
), forget_old_reloads_1
);
3815 /* There may have been CLOBBER insns placed after INSN. So scan
3816 between INSN and NEXT and use them to forget old reloads. */
3817 for (x
= NEXT_INSN (insn
); x
!= next
; x
= NEXT_INSN (x
))
3818 if (GET_CODE (x
) == INSN
&& GET_CODE (PATTERN (x
)) == CLOBBER
)
3819 note_stores (PATTERN (x
), forget_old_reloads_1
);
3822 /* Likewise for regs altered by auto-increment in this insn.
3823 But note that the reg-notes are not changed by reloading:
3824 they still contain the pseudo-regs, not the spill regs. */
3825 for (x
= REG_NOTES (insn
); x
; x
= XEXP (x
, 1))
3826 if (REG_NOTE_KIND (x
) == REG_INC
)
3828 /* See if this pseudo reg was reloaded in this insn.
3829 If so, its last-reload info is still valid
3830 because it is based on this insn's reload. */
3831 for (i
= 0; i
< n_reloads
; i
++)
3832 if (reload_out
[i
] == XEXP (x
, 0))
3836 forget_old_reloads_1 (XEXP (x
, 0), NULL_RTX
);
3840 /* A reload reg's contents are unknown after a label. */
3841 if (GET_CODE (insn
) == CODE_LABEL
)
3842 for (i
= 0; i
< n_spills
; i
++)
3844 reg_reloaded_contents
[i
] = -1;
3845 reg_reloaded_insn
[i
] = 0;
3848 /* Don't assume a reload reg is still good after a call insn
3849 if it is a call-used reg. */
3850 else if (GET_CODE (insn
) == CALL_INSN
)
3851 for (i
= 0; i
< n_spills
; i
++)
3852 if (call_used_regs
[spill_regs
[i
]])
3854 reg_reloaded_contents
[i
] = -1;
3855 reg_reloaded_insn
[i
] = 0;
3858 /* In case registers overlap, allow certain insns to invalidate
3859 particular hard registers. */
3861 #ifdef INSN_CLOBBERS_REGNO_P
3862 for (i
= 0 ; i
< n_spills
; i
++)
3863 if (INSN_CLOBBERS_REGNO_P (insn
, spill_regs
[i
]))
3865 reg_reloaded_contents
[i
] = -1;
3866 reg_reloaded_insn
[i
] = 0;
3878 /* Discard all record of any value reloaded from X,
3879 or reloaded in X from someplace else;
3880 unless X is an output reload reg of the current insn.
3882 X may be a hard reg (the reload reg)
3883 or it may be a pseudo reg that was reloaded from. */
3886 forget_old_reloads_1 (x
, ignored
)
3894 /* note_stores does give us subregs of hard regs. */
3895 while (GET_CODE (x
) == SUBREG
)
3897 offset
+= SUBREG_WORD (x
);
3901 if (GET_CODE (x
) != REG
)
3904 regno
= REGNO (x
) + offset
;
3906 if (regno
>= FIRST_PSEUDO_REGISTER
)
3911 nr
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
3912 /* Storing into a spilled-reg invalidates its contents.
3913 This can happen if a block-local pseudo is allocated to that reg
3914 and it wasn't spilled because this block's total need is 0.
3915 Then some insn might have an optional reload and use this reg. */
3916 for (i
= 0; i
< nr
; i
++)
3917 if (spill_reg_order
[regno
+ i
] >= 0
3918 /* But don't do this if the reg actually serves as an output
3919 reload reg in the current instruction. */
3921 || ! TEST_HARD_REG_BIT (reg_is_output_reload
, regno
+ i
)))
3923 reg_reloaded_contents
[spill_reg_order
[regno
+ i
]] = -1;
3924 reg_reloaded_insn
[spill_reg_order
[regno
+ i
]] = 0;
3928 /* Since value of X has changed,
3929 forget any value previously copied from it. */
3932 /* But don't forget a copy if this is the output reload
3933 that establishes the copy's validity. */
3934 if (n_reloads
== 0 || reg_has_output_reload
[regno
+ nr
] == 0)
3935 reg_last_reload_reg
[regno
+ nr
] = 0;
3938 /* For each reload, the mode of the reload register. */
3939 static enum machine_mode reload_mode
[MAX_RELOADS
];
3941 /* For each reload, the largest number of registers it will require. */
3942 static int reload_nregs
[MAX_RELOADS
];
3944 /* Comparison function for qsort to decide which of two reloads
3945 should be handled first. *P1 and *P2 are the reload numbers. */
3948 reload_reg_class_lower (p1
, p2
)
3951 register int r1
= *p1
, r2
= *p2
;
3954 /* Consider required reloads before optional ones. */
3955 t
= reload_optional
[r1
] - reload_optional
[r2
];
3959 /* Count all solitary classes before non-solitary ones. */
3960 t
= ((reg_class_size
[(int) reload_reg_class
[r2
]] == 1)
3961 - (reg_class_size
[(int) reload_reg_class
[r1
]] == 1));
3965 /* Aside from solitaires, consider all multi-reg groups first. */
3966 t
= reload_nregs
[r2
] - reload_nregs
[r1
];
3970 /* Consider reloads in order of increasing reg-class number. */
3971 t
= (int) reload_reg_class
[r1
] - (int) reload_reg_class
[r2
];
3975 /* If reloads are equally urgent, sort by reload number,
3976 so that the results of qsort leave nothing to chance. */
3980 /* The following HARD_REG_SETs indicate when each hard register is
3981 used for a reload of various parts of the current insn. */
3983 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
3984 static HARD_REG_SET reload_reg_used
;
3985 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
3986 static HARD_REG_SET reload_reg_used_in_input_addr
[MAX_RECOG_OPERANDS
];
3987 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
3988 static HARD_REG_SET reload_reg_used_in_output_addr
[MAX_RECOG_OPERANDS
];
3989 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
3990 static HARD_REG_SET reload_reg_used_in_input
[MAX_RECOG_OPERANDS
];
3991 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
3992 static HARD_REG_SET reload_reg_used_in_output
[MAX_RECOG_OPERANDS
];
3993 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
3994 static HARD_REG_SET reload_reg_used_in_op_addr
;
3995 /* If reg is in use for a RELOAD_FOR_INSN reload. */
3996 static HARD_REG_SET reload_reg_used_in_insn
;
3997 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
3998 static HARD_REG_SET reload_reg_used_in_other_addr
;
4000 /* If reg is in use as a reload reg for any sort of reload. */
4001 static HARD_REG_SET reload_reg_used_at_all
;
4003 /* If reg is use as an inherited reload. We just mark the first register
4005 static HARD_REG_SET reload_reg_used_for_inherit
;
4007 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4008 TYPE. MODE is used to indicate how many consecutive regs are
4012 mark_reload_reg_in_use (regno
, opnum
, type
, mode
)
4015 enum reload_type type
;
4016 enum machine_mode mode
;
4018 int nregs
= HARD_REGNO_NREGS (regno
, mode
);
4021 for (i
= regno
; i
< nregs
+ regno
; i
++)
4026 SET_HARD_REG_BIT (reload_reg_used
, i
);
4029 case RELOAD_FOR_INPUT_ADDRESS
:
4030 SET_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], i
);
4033 case RELOAD_FOR_OUTPUT_ADDRESS
:
4034 SET_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], i
);
4037 case RELOAD_FOR_OPERAND_ADDRESS
:
4038 SET_HARD_REG_BIT (reload_reg_used_in_op_addr
, i
);
4041 case RELOAD_FOR_OTHER_ADDRESS
:
4042 SET_HARD_REG_BIT (reload_reg_used_in_other_addr
, i
);
4045 case RELOAD_FOR_INPUT
:
4046 SET_HARD_REG_BIT (reload_reg_used_in_input
[opnum
], i
);
4049 case RELOAD_FOR_OUTPUT
:
4050 SET_HARD_REG_BIT (reload_reg_used_in_output
[opnum
], i
);
4053 case RELOAD_FOR_INSN
:
4054 SET_HARD_REG_BIT (reload_reg_used_in_insn
, i
);
4058 SET_HARD_REG_BIT (reload_reg_used_at_all
, i
);
4062 /* Similarly, but show REGNO is no longer in use for a reload. */
4065 clear_reload_reg_in_use (regno
, opnum
, type
, mode
)
4068 enum reload_type type
;
4069 enum machine_mode mode
;
4071 int nregs
= HARD_REGNO_NREGS (regno
, mode
);
4074 for (i
= regno
; i
< nregs
+ regno
; i
++)
4079 CLEAR_HARD_REG_BIT (reload_reg_used
, i
);
4082 case RELOAD_FOR_INPUT_ADDRESS
:
4083 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], i
);
4086 case RELOAD_FOR_OUTPUT_ADDRESS
:
4087 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], i
);
4090 case RELOAD_FOR_OPERAND_ADDRESS
:
4091 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr
, i
);
4094 case RELOAD_FOR_OTHER_ADDRESS
:
4095 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr
, i
);
4098 case RELOAD_FOR_INPUT
:
4099 CLEAR_HARD_REG_BIT (reload_reg_used_in_input
[opnum
], i
);
4102 case RELOAD_FOR_OUTPUT
:
4103 CLEAR_HARD_REG_BIT (reload_reg_used_in_output
[opnum
], i
);
4106 case RELOAD_FOR_INSN
:
4107 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn
, i
);
4113 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4114 specified by OPNUM and TYPE. */
4117 reload_reg_free_p (regno
, opnum
, type
)
4120 enum reload_type type
;
4124 /* In use for a RELOAD_OTHER means it's not available for anything except
4125 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4126 to be used only for inputs. */
4128 if (type
!= RELOAD_FOR_OTHER_ADDRESS
4129 && TEST_HARD_REG_BIT (reload_reg_used
, regno
))
4135 /* In use for anything means not available for a RELOAD_OTHER. */
4136 return ! TEST_HARD_REG_BIT (reload_reg_used_at_all
, regno
);
4138 /* The other kinds of use can sometimes share a register. */
4139 case RELOAD_FOR_INPUT
:
4140 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4141 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
))
4144 /* If it is used for some other input, can't use it. */
4145 for (i
= 0; i
< reload_n_operands
; i
++)
4146 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4149 /* If it is used in a later operand's address, can't use it. */
4150 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4151 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
))
4156 case RELOAD_FOR_INPUT_ADDRESS
:
4157 /* Can't use a register if it is used for an input address for this
4158 operand or used as an input in an earlier one. */
4159 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], regno
))
4162 for (i
= 0; i
< opnum
; i
++)
4163 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4168 case RELOAD_FOR_OUTPUT_ADDRESS
:
4169 /* Can't use a register if it is used for an output address for this
4170 operand or used as an output in this or a later operand. */
4171 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], regno
))
4174 for (i
= opnum
; i
< reload_n_operands
; i
++)
4175 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4180 case RELOAD_FOR_OPERAND_ADDRESS
:
4181 for (i
= 0; i
< reload_n_operands
; i
++)
4182 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4185 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4186 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
4188 case RELOAD_FOR_OUTPUT
:
4189 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4190 outputs, or an operand address for this or an earlier output. */
4191 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
4194 for (i
= 0; i
< reload_n_operands
; i
++)
4195 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4198 for (i
= 0; i
<= opnum
; i
++)
4199 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
))
4204 case RELOAD_FOR_INSN
:
4205 for (i
= 0; i
< reload_n_operands
; i
++)
4206 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
4207 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4210 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4211 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
4213 case RELOAD_FOR_OTHER_ADDRESS
:
4214 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4219 /* Return 1 if the value in reload reg REGNO, as used by a reload
4220 needed for the part of the insn specified by OPNUM and TYPE,
4221 is not in use for a reload in any prior part of the insn.
4223 We can assume that the reload reg was already tested for availability
4224 at the time it is needed, and we should not check this again,
4225 in case the reg has already been marked in use. */
4228 reload_reg_free_before_p (regno
, opnum
, type
)
4231 enum reload_type type
;
4237 case RELOAD_FOR_OTHER_ADDRESS
:
4238 /* These always come first. */
4242 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4244 /* If this use is for part of the insn,
4245 check the reg is not in use for any prior part. It is tempting
4246 to try to do this by falling through from objecs that occur
4247 later in the insn to ones that occur earlier, but that will not
4248 correctly take into account the fact that here we MUST ignore
4249 things that would prevent the register from being allocated in
4250 the first place, since we know that it was allocated. */
4252 case RELOAD_FOR_OUTPUT_ADDRESS
:
4253 /* Earlier reloads are for earlier outputs or their addresses,
4254 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4255 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4257 for (i
= 0; i
< opnum
; i
++)
4258 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4259 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4262 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
4265 for (i
= 0; i
< reload_n_operands
; i
++)
4266 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4267 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4270 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
)
4271 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4272 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
4274 case RELOAD_FOR_OUTPUT
:
4275 /* This can't be used in the output address for this operand and
4276 anything that can't be used for it, except that we've already
4277 tested for RELOAD_FOR_INSN objects. */
4279 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], regno
))
4282 for (i
= 0; i
< opnum
; i
++)
4283 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4284 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4287 for (i
= 0; i
< reload_n_operands
; i
++)
4288 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4289 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
4290 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
))
4293 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4295 case RELOAD_FOR_OPERAND_ADDRESS
:
4296 case RELOAD_FOR_INSN
:
4297 /* These can't conflict with inputs, or each other, so all we have to
4298 test is input addresses and the addresses of OTHER items. */
4300 for (i
= 0; i
< reload_n_operands
; i
++)
4301 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
))
4304 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4306 case RELOAD_FOR_INPUT
:
4307 /* The only things earlier are the address for this and
4308 earlier inputs, other inputs (which we know we don't conflict
4309 with), and addresses of RELOAD_OTHER objects. */
4311 for (i
= 0; i
<= opnum
; i
++)
4312 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
))
4315 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4317 case RELOAD_FOR_INPUT_ADDRESS
:
4318 /* Similarly, all we have to check is for use in earlier inputs'
4320 for (i
= 0; i
< opnum
; i
++)
4321 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
))
4324 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4329 /* Return 1 if the value in reload reg REGNO, as used by a reload
4330 needed for the part of the insn specified by OPNUM and TYPE,
4331 is still available in REGNO at the end of the insn.
4333 We can assume that the reload reg was already tested for availability
4334 at the time it is needed, and we should not check this again,
4335 in case the reg has already been marked in use. */
4338 reload_reg_reaches_end_p (regno
, opnum
, type
)
4341 enum reload_type type
;
4348 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4349 its value must reach the end. */
4352 /* If this use is for part of the insn,
4353 its value reaches if no subsequent part uses the same register.
4354 Just like the above function, don't try to do this with lots
4357 case RELOAD_FOR_OTHER_ADDRESS
:
4358 /* Here we check for everything else, since these don't conflict
4359 with anything else and everything comes later. */
4361 for (i
= 0; i
< reload_n_operands
; i
++)
4362 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4363 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
)
4364 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4365 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4368 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4369 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4370 && ! TEST_HARD_REG_BIT (reload_reg_used
, regno
));
4372 case RELOAD_FOR_INPUT_ADDRESS
:
4373 /* Similar, except that we check only for this and subsequent inputs
4374 and the address of only subsequent inputs and we do not need
4375 to check for RELOAD_OTHER objects since they are known not to
4378 for (i
= opnum
; i
< reload_n_operands
; i
++)
4379 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4382 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4383 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
))
4386 for (i
= 0; i
< reload_n_operands
; i
++)
4387 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4388 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4391 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4392 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
));
4394 case RELOAD_FOR_INPUT
:
4395 /* Similar to input address, except we start at the next operand for
4396 both input and input address and we do not check for
4397 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4400 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4401 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4402 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4405 /* ... fall through ... */
4407 case RELOAD_FOR_OPERAND_ADDRESS
:
4408 /* Check outputs and their addresses. */
4410 for (i
= 0; i
< reload_n_operands
; i
++)
4411 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4412 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4417 case RELOAD_FOR_INSN
:
4418 /* These conflict with other outputs with with RELOAD_OTHER. So
4419 we need only check for output addresses. */
4423 /* ... fall through ... */
4425 case RELOAD_FOR_OUTPUT
:
4426 case RELOAD_FOR_OUTPUT_ADDRESS
:
4427 /* We already know these can't conflict with a later output. So the
4428 only thing to check are later output addresses. */
4429 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4430 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
))
4439 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4442 This function uses the same algorithm as reload_reg_free_p above. */
4445 reloads_conflict (r1
, r2
)
4448 enum reload_type r1_type
= reload_when_needed
[r1
];
4449 enum reload_type r2_type
= reload_when_needed
[r2
];
4450 int r1_opnum
= reload_opnum
[r1
];
4451 int r2_opnum
= reload_opnum
[r2
];
4453 /* RELOAD_OTHER conflicts with everything except
4454 RELOAD_FOR_OTHER_ADDRESS. */
4456 if ((r1_type
== RELOAD_OTHER
&& r2_type
!= RELOAD_FOR_OTHER_ADDRESS
)
4457 || (r2_type
== RELOAD_OTHER
&& r1_type
!= RELOAD_FOR_OTHER_ADDRESS
))
4460 /* Otherwise, check conflicts differently for each type. */
4464 case RELOAD_FOR_INPUT
:
4465 return (r2_type
== RELOAD_FOR_INSN
4466 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
4467 || r2_type
== RELOAD_FOR_INPUT
4468 || (r2_type
== RELOAD_FOR_INPUT_ADDRESS
&& r2_opnum
> r1_opnum
));
4470 case RELOAD_FOR_INPUT_ADDRESS
:
4471 return ((r2_type
== RELOAD_FOR_INPUT_ADDRESS
&& r1_opnum
== r2_opnum
)
4472 || (r2_type
== RELOAD_FOR_INPUT
&& r2_opnum
< r1_opnum
));
4474 case RELOAD_FOR_OUTPUT_ADDRESS
:
4475 return ((r2_type
== RELOAD_FOR_OUTPUT_ADDRESS
&& r2_opnum
== r1_opnum
)
4476 || (r2_type
== RELOAD_FOR_OUTPUT
&& r2_opnum
>= r1_opnum
));
4478 case RELOAD_FOR_OPERAND_ADDRESS
:
4479 return (r2_type
== RELOAD_FOR_INPUT
|| r2_type
== RELOAD_FOR_INSN
4480 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
);
4482 case RELOAD_FOR_OUTPUT
:
4483 return (r2_type
== RELOAD_FOR_INSN
|| r2_type
== RELOAD_FOR_OUTPUT
4484 || (r2_type
== RELOAD_FOR_OUTPUT_ADDRESS
4485 && r2_opnum
>= r1_opnum
));
4487 case RELOAD_FOR_INSN
:
4488 return (r2_type
== RELOAD_FOR_INPUT
|| r2_type
== RELOAD_FOR_OUTPUT
4489 || r2_type
== RELOAD_FOR_INSN
4490 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
);
4492 case RELOAD_FOR_OTHER_ADDRESS
:
4493 return r2_type
== RELOAD_FOR_OTHER_ADDRESS
;
4500 /* Vector of reload-numbers showing the order in which the reloads should
4502 short reload_order
[MAX_RELOADS
];
4504 /* Indexed by reload number, 1 if incoming value
4505 inherited from previous insns. */
4506 char reload_inherited
[MAX_RELOADS
];
4508 /* For an inherited reload, this is the insn the reload was inherited from,
4509 if we know it. Otherwise, this is 0. */
4510 rtx reload_inheritance_insn
[MAX_RELOADS
];
4512 /* If non-zero, this is a place to get the value of the reload,
4513 rather than using reload_in. */
4514 rtx reload_override_in
[MAX_RELOADS
];
4516 /* For each reload, the index in spill_regs of the spill register used,
4517 or -1 if we did not need one of the spill registers for this reload. */
4518 int reload_spill_index
[MAX_RELOADS
];
4520 /* Index of last register assigned as a spill register. We allocate in
4521 a round-robin fashio. */
4523 static int last_spill_reg
= 0;
4525 /* Find a spill register to use as a reload register for reload R.
4526 LAST_RELOAD is non-zero if this is the last reload for the insn being
4529 Set reload_reg_rtx[R] to the register allocated.
4531 If NOERROR is nonzero, we return 1 if successful,
4532 or 0 if we couldn't find a spill reg and we didn't change anything. */
4535 allocate_reload_reg (r
, insn
, last_reload
, noerror
)
4547 /* If we put this reload ahead, thinking it is a group,
4548 then insist on finding a group. Otherwise we can grab a
4549 reg that some other reload needs.
4550 (That can happen when we have a 68000 DATA_OR_FP_REG
4551 which is a group of data regs or one fp reg.)
4552 We need not be so restrictive if there are no more reloads
4555 ??? Really it would be nicer to have smarter handling
4556 for that kind of reg class, where a problem like this is normal.
4557 Perhaps those classes should be avoided for reloading
4558 by use of more alternatives. */
4560 int force_group
= reload_nregs
[r
] > 1 && ! last_reload
;
4562 /* If we want a single register and haven't yet found one,
4563 take any reg in the right class and not in use.
4564 If we want a consecutive group, here is where we look for it.
4566 We use two passes so we can first look for reload regs to
4567 reuse, which are already in use for other reloads in this insn,
4568 and only then use additional registers.
4569 I think that maximizing reuse is needed to make sure we don't
4570 run out of reload regs. Suppose we have three reloads, and
4571 reloads A and B can share regs. These need two regs.
4572 Suppose A and B are given different regs.
4573 That leaves none for C. */
4574 for (pass
= 0; pass
< 2; pass
++)
4576 /* I is the index in spill_regs.
4577 We advance it round-robin between insns to use all spill regs
4578 equally, so that inherited reloads have a chance
4579 of leapfrogging each other. Don't do this, however, when we have
4580 group needs and failure would be fatal; if we only have a relatively
4581 small number of spill registers, and more than one of them has
4582 group needs, then by starting in the middle, we may end up
4583 allocating the first one in such a way that we are not left with
4584 sufficient groups to handle the rest. */
4586 if (noerror
|| ! force_group
)
4591 for (count
= 0; count
< n_spills
; count
++)
4593 int class = (int) reload_reg_class
[r
];
4595 i
= (i
+ 1) % n_spills
;
4597 if (reload_reg_free_p (spill_regs
[i
], reload_opnum
[r
],
4598 reload_when_needed
[r
])
4599 && TEST_HARD_REG_BIT (reg_class_contents
[class], spill_regs
[i
])
4600 && HARD_REGNO_MODE_OK (spill_regs
[i
], reload_mode
[r
])
4601 /* Look first for regs to share, then for unshared. But
4602 don't share regs used for inherited reloads; they are
4603 the ones we want to preserve. */
4605 || (TEST_HARD_REG_BIT (reload_reg_used_at_all
,
4607 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit
,
4610 int nr
= HARD_REGNO_NREGS (spill_regs
[i
], reload_mode
[r
]);
4611 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4612 (on 68000) got us two FP regs. If NR is 1,
4613 we would reject both of them. */
4615 nr
= CLASS_MAX_NREGS (reload_reg_class
[r
], reload_mode
[r
]);
4616 /* If we need only one reg, we have already won. */
4619 /* But reject a single reg if we demand a group. */
4624 /* Otherwise check that as many consecutive regs as we need
4626 Also, don't use for a group registers that are
4627 needed for nongroups. */
4628 if (! TEST_HARD_REG_BIT (counted_for_nongroups
, spill_regs
[i
]))
4631 regno
= spill_regs
[i
] + nr
- 1;
4632 if (!(TEST_HARD_REG_BIT (reg_class_contents
[class], regno
)
4633 && spill_reg_order
[regno
] >= 0
4634 && reload_reg_free_p (regno
, reload_opnum
[r
],
4635 reload_when_needed
[r
])
4636 && ! TEST_HARD_REG_BIT (counted_for_nongroups
,
4646 /* If we found something on pass 1, omit pass 2. */
4647 if (count
< n_spills
)
4651 /* We should have found a spill register by now. */
4652 if (count
== n_spills
)
4659 /* I is the index in SPILL_REG_RTX of the reload register we are to
4660 allocate. Get an rtx for it and find its register number. */
4662 new = spill_reg_rtx
[i
];
4664 if (new == 0 || GET_MODE (new) != reload_mode
[r
])
4665 spill_reg_rtx
[i
] = new
4666 = gen_rtx (REG
, reload_mode
[r
], spill_regs
[i
]);
4668 regno
= true_regnum (new);
4670 /* Detect when the reload reg can't hold the reload mode.
4671 This used to be one `if', but Sequent compiler can't handle that. */
4672 if (HARD_REGNO_MODE_OK (regno
, reload_mode
[r
]))
4674 enum machine_mode test_mode
= VOIDmode
;
4676 test_mode
= GET_MODE (reload_in
[r
]);
4677 /* If reload_in[r] has VOIDmode, it means we will load it
4678 in whatever mode the reload reg has: to wit, reload_mode[r].
4679 We have already tested that for validity. */
4680 /* Aside from that, we need to test that the expressions
4681 to reload from or into have modes which are valid for this
4682 reload register. Otherwise the reload insns would be invalid. */
4683 if (! (reload_in
[r
] != 0 && test_mode
!= VOIDmode
4684 && ! HARD_REGNO_MODE_OK (regno
, test_mode
)))
4685 if (! (reload_out
[r
] != 0
4686 && ! HARD_REGNO_MODE_OK (regno
, GET_MODE (reload_out
[r
]))))
4688 /* The reg is OK. */
4691 /* Mark as in use for this insn the reload regs we use
4693 mark_reload_reg_in_use (spill_regs
[i
], reload_opnum
[r
],
4694 reload_when_needed
[r
], reload_mode
[r
]);
4696 reload_reg_rtx
[r
] = new;
4697 reload_spill_index
[r
] = i
;
4702 /* The reg is not OK. */
4707 if (asm_noperands (PATTERN (insn
)) < 0)
4708 /* It's the compiler's fault. */
4711 /* It's the user's fault; the operand's mode and constraint
4712 don't match. Disable this reload so we don't crash in final. */
4713 error_for_asm (insn
,
4714 "`asm' operand constraint incompatible with operand size");
4717 reload_reg_rtx
[r
] = 0;
4718 reload_optional
[r
] = 1;
4719 reload_secondary_p
[r
] = 1;
4724 /* Assign hard reg targets for the pseudo-registers we must reload
4725 into hard regs for this insn.
4726 Also output the instructions to copy them in and out of the hard regs.
4728 For machines with register classes, we are responsible for
4729 finding a reload reg in the proper class. */
4732 choose_reload_regs (insn
, avoid_return_reg
)
4734 rtx avoid_return_reg
;
4737 int max_group_size
= 1;
4738 enum reg_class group_class
= NO_REGS
;
4741 rtx save_reload_reg_rtx
[MAX_RELOADS
];
4742 char save_reload_inherited
[MAX_RELOADS
];
4743 rtx save_reload_inheritance_insn
[MAX_RELOADS
];
4744 rtx save_reload_override_in
[MAX_RELOADS
];
4745 int save_reload_spill_index
[MAX_RELOADS
];
4746 HARD_REG_SET save_reload_reg_used
;
4747 HARD_REG_SET save_reload_reg_used_in_input_addr
[MAX_RECOG_OPERANDS
];
4748 HARD_REG_SET save_reload_reg_used_in_output_addr
[MAX_RECOG_OPERANDS
];
4749 HARD_REG_SET save_reload_reg_used_in_input
[MAX_RECOG_OPERANDS
];
4750 HARD_REG_SET save_reload_reg_used_in_output
[MAX_RECOG_OPERANDS
];
4751 HARD_REG_SET save_reload_reg_used_in_op_addr
;
4752 HARD_REG_SET save_reload_reg_used_in_insn
;
4753 HARD_REG_SET save_reload_reg_used_in_other_addr
;
4754 HARD_REG_SET save_reload_reg_used_at_all
;
4756 bzero (reload_inherited
, MAX_RELOADS
);
4757 bzero (reload_inheritance_insn
, MAX_RELOADS
* sizeof (rtx
));
4758 bzero (reload_override_in
, MAX_RELOADS
* sizeof (rtx
));
4760 CLEAR_HARD_REG_SET (reload_reg_used
);
4761 CLEAR_HARD_REG_SET (reload_reg_used_at_all
);
4762 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr
);
4763 CLEAR_HARD_REG_SET (reload_reg_used_in_insn
);
4764 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr
);
4766 for (i
= 0; i
< reload_n_operands
; i
++)
4768 CLEAR_HARD_REG_SET (reload_reg_used_in_output
[i
]);
4769 CLEAR_HARD_REG_SET (reload_reg_used_in_input
[i
]);
4770 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr
[i
]);
4771 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr
[i
]);
4774 #ifdef SMALL_REGISTER_CLASSES
4775 /* Don't bother with avoiding the return reg
4776 if we have no mandatory reload that could use it. */
4777 if (avoid_return_reg
)
4780 int regno
= REGNO (avoid_return_reg
);
4782 = HARD_REGNO_NREGS (regno
, GET_MODE (avoid_return_reg
));
4785 for (r
= regno
; r
< regno
+ nregs
; r
++)
4786 if (spill_reg_order
[r
] >= 0)
4787 for (j
= 0; j
< n_reloads
; j
++)
4788 if (!reload_optional
[j
] && reload_reg_rtx
[j
] == 0
4789 && (reload_in
[j
] != 0 || reload_out
[j
] != 0
4790 || reload_secondary_p
[j
])
4792 TEST_HARD_REG_BIT (reg_class_contents
[(int) reload_reg_class
[j
]], r
))
4795 avoid_return_reg
= 0;
4797 #endif /* SMALL_REGISTER_CLASSES */
4799 #if 0 /* Not needed, now that we can always retry without inheritance. */
4800 /* See if we have more mandatory reloads than spill regs.
4801 If so, then we cannot risk optimizations that could prevent
4802 reloads from sharing one spill register.
4804 Since we will try finding a better register than reload_reg_rtx
4805 unless it is equal to reload_in or reload_out, count such reloads. */
4809 #ifdef SMALL_REGISTER_CLASSES
4810 int tem
= (avoid_return_reg
!= 0);
4812 for (j
= 0; j
< n_reloads
; j
++)
4813 if (! reload_optional
[j
]
4814 && (reload_in
[j
] != 0 || reload_out
[j
] != 0 || reload_secondary_p
[j
])
4815 && (reload_reg_rtx
[j
] == 0
4816 || (! rtx_equal_p (reload_reg_rtx
[j
], reload_in
[j
])
4817 && ! rtx_equal_p (reload_reg_rtx
[j
], reload_out
[j
]))))
4824 #ifdef SMALL_REGISTER_CLASSES
4825 /* Don't use the subroutine call return reg for a reload
4826 if we are supposed to avoid it. */
4827 if (avoid_return_reg
)
4829 int regno
= REGNO (avoid_return_reg
);
4831 = HARD_REGNO_NREGS (regno
, GET_MODE (avoid_return_reg
));
4834 for (r
= regno
; r
< regno
+ nregs
; r
++)
4835 if (spill_reg_order
[r
] >= 0)
4836 SET_HARD_REG_BIT (reload_reg_used
, r
);
4838 #endif /* SMALL_REGISTER_CLASSES */
4840 /* In order to be certain of getting the registers we need,
4841 we must sort the reloads into order of increasing register class.
4842 Then our grabbing of reload registers will parallel the process
4843 that provided the reload registers.
4845 Also note whether any of the reloads wants a consecutive group of regs.
4846 If so, record the maximum size of the group desired and what
4847 register class contains all the groups needed by this insn. */
4849 for (j
= 0; j
< n_reloads
; j
++)
4851 reload_order
[j
] = j
;
4852 reload_spill_index
[j
] = -1;
4855 = (reload_inmode
[j
] == VOIDmode
4856 || (GET_MODE_SIZE (reload_outmode
[j
])
4857 > GET_MODE_SIZE (reload_inmode
[j
])))
4858 ? reload_outmode
[j
] : reload_inmode
[j
];
4860 reload_nregs
[j
] = CLASS_MAX_NREGS (reload_reg_class
[j
], reload_mode
[j
]);
4862 if (reload_nregs
[j
] > 1)
4864 max_group_size
= MAX (reload_nregs
[j
], max_group_size
);
4865 group_class
= reg_class_superunion
[(int)reload_reg_class
[j
]][(int)group_class
];
4868 /* If we have already decided to use a certain register,
4869 don't use it in another way. */
4870 if (reload_reg_rtx
[j
])
4871 mark_reload_reg_in_use (REGNO (reload_reg_rtx
[j
]), reload_opnum
[j
],
4872 reload_when_needed
[j
], reload_mode
[j
]);
4876 qsort (reload_order
, n_reloads
, sizeof (short), reload_reg_class_lower
);
4878 bcopy (reload_reg_rtx
, save_reload_reg_rtx
, sizeof reload_reg_rtx
);
4879 bcopy (reload_inherited
, save_reload_inherited
, sizeof reload_inherited
);
4880 bcopy (reload_inheritance_insn
, save_reload_inheritance_insn
,
4881 sizeof reload_inheritance_insn
);
4882 bcopy (reload_override_in
, save_reload_override_in
,
4883 sizeof reload_override_in
);
4884 bcopy (reload_spill_index
, save_reload_spill_index
,
4885 sizeof reload_spill_index
);
4886 COPY_HARD_REG_SET (save_reload_reg_used
, reload_reg_used
);
4887 COPY_HARD_REG_SET (save_reload_reg_used_at_all
, reload_reg_used_at_all
);
4888 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr
,
4889 reload_reg_used_in_op_addr
);
4890 COPY_HARD_REG_SET (save_reload_reg_used_in_insn
,
4891 reload_reg_used_in_insn
);
4892 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr
,
4893 reload_reg_used_in_other_addr
);
4895 for (i
= 0; i
< reload_n_operands
; i
++)
4897 COPY_HARD_REG_SET (save_reload_reg_used_in_output
[i
],
4898 reload_reg_used_in_output
[i
]);
4899 COPY_HARD_REG_SET (save_reload_reg_used_in_input
[i
],
4900 reload_reg_used_in_input
[i
]);
4901 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr
[i
],
4902 reload_reg_used_in_input_addr
[i
]);
4903 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr
[i
],
4904 reload_reg_used_in_output_addr
[i
]);
4907 /* If -O, try first with inheritance, then turning it off.
4908 If not -O, don't do inheritance.
4909 Using inheritance when not optimizing leads to paradoxes
4910 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
4911 because one side of the comparison might be inherited. */
4913 for (inheritance
= optimize
> 0; inheritance
>= 0; inheritance
--)
4915 /* Process the reloads in order of preference just found.
4916 Beyond this point, subregs can be found in reload_reg_rtx.
4918 This used to look for an existing reloaded home for all
4919 of the reloads, and only then perform any new reloads.
4920 But that could lose if the reloads were done out of reg-class order
4921 because a later reload with a looser constraint might have an old
4922 home in a register needed by an earlier reload with a tighter constraint.
4924 To solve this, we make two passes over the reloads, in the order
4925 described above. In the first pass we try to inherit a reload
4926 from a previous insn. If there is a later reload that needs a
4927 class that is a proper subset of the class being processed, we must
4928 also allocate a spill register during the first pass.
4930 Then make a second pass over the reloads to allocate any reloads
4931 that haven't been given registers yet. */
4933 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit
);
4935 for (j
= 0; j
< n_reloads
; j
++)
4937 register int r
= reload_order
[j
];
4939 /* Ignore reloads that got marked inoperative. */
4940 if (reload_out
[r
] == 0 && reload_in
[r
] == 0 && ! reload_secondary_p
[r
])
4943 /* If find_reloads chose a to use reload_in or reload_out as a reload
4944 register, we don't need to chose one. Otherwise, try even if it found
4945 one since we might save an insn if we find the value lying around. */
4946 if (reload_in
[r
] != 0 && reload_reg_rtx
[r
] != 0
4947 && (rtx_equal_p (reload_in
[r
], reload_reg_rtx
[r
])
4948 || rtx_equal_p (reload_out
[r
], reload_reg_rtx
[r
])))
4951 #if 0 /* No longer needed for correct operation.
4952 It might give better code, or might not; worth an experiment? */
4953 /* If this is an optional reload, we can't inherit from earlier insns
4954 until we are sure that any non-optional reloads have been allocated.
4955 The following code takes advantage of the fact that optional reloads
4956 are at the end of reload_order. */
4957 if (reload_optional
[r
] != 0)
4958 for (i
= 0; i
< j
; i
++)
4959 if ((reload_out
[reload_order
[i
]] != 0
4960 || reload_in
[reload_order
[i
]] != 0
4961 || reload_secondary_p
[reload_order
[i
]])
4962 && ! reload_optional
[reload_order
[i
]]
4963 && reload_reg_rtx
[reload_order
[i
]] == 0)
4964 allocate_reload_reg (reload_order
[i
], insn
, 0, inheritance
);
4967 /* First see if this pseudo is already available as reloaded
4968 for a previous insn. We cannot try to inherit for reloads
4969 that are smaller than the maximum number of registers needed
4970 for groups unless the register we would allocate cannot be used
4973 We could check here to see if this is a secondary reload for
4974 an object that is already in a register of the desired class.
4975 This would avoid the need for the secondary reload register.
4976 But this is complex because we can't easily determine what
4977 objects might want to be loaded via this reload. So let a register
4978 be allocated here. In `emit_reload_insns' we suppress one of the
4979 loads in the case described above. */
4983 register int regno
= -1;
4984 enum machine_mode mode
;
4986 if (reload_in
[r
] == 0)
4988 else if (GET_CODE (reload_in
[r
]) == REG
)
4990 regno
= REGNO (reload_in
[r
]);
4991 mode
= GET_MODE (reload_in
[r
]);
4993 else if (GET_CODE (reload_in_reg
[r
]) == REG
)
4995 regno
= REGNO (reload_in_reg
[r
]);
4996 mode
= GET_MODE (reload_in_reg
[r
]);
4999 /* This won't work, since REGNO can be a pseudo reg number.
5000 Also, it takes much more hair to keep track of all the things
5001 that can invalidate an inherited reload of part of a pseudoreg. */
5002 else if (GET_CODE (reload_in
[r
]) == SUBREG
5003 && GET_CODE (SUBREG_REG (reload_in
[r
])) == REG
)
5004 regno
= REGNO (SUBREG_REG (reload_in
[r
])) + SUBREG_WORD (reload_in
[r
]);
5007 if (regno
>= 0 && reg_last_reload_reg
[regno
] != 0)
5009 i
= spill_reg_order
[REGNO (reg_last_reload_reg
[regno
])];
5011 if (reg_reloaded_contents
[i
] == regno
5012 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg
[regno
]))
5013 >= GET_MODE_SIZE (mode
))
5014 && HARD_REGNO_MODE_OK (spill_regs
[i
], reload_mode
[r
])
5015 && TEST_HARD_REG_BIT (reg_class_contents
[(int) reload_reg_class
[r
]],
5017 && (reload_nregs
[r
] == max_group_size
5018 || ! TEST_HARD_REG_BIT (reg_class_contents
[(int) group_class
],
5020 && reload_reg_free_p (spill_regs
[i
], reload_opnum
[r
],
5021 reload_when_needed
[r
])
5022 && reload_reg_free_before_p (spill_regs
[i
],
5024 reload_when_needed
[r
]))
5026 /* If a group is needed, verify that all the subsequent
5027 registers still have their values intact. */
5029 = HARD_REGNO_NREGS (spill_regs
[i
], reload_mode
[r
]);
5032 for (k
= 1; k
< nr
; k
++)
5033 if (reg_reloaded_contents
[spill_reg_order
[spill_regs
[i
] + k
]]
5041 /* We found a register that contains the
5042 value we need. If this register is the
5043 same as an `earlyclobber' operand of the
5044 current insn, just mark it as a place to
5045 reload from since we can't use it as the
5046 reload register itself. */
5048 for (i1
= 0; i1
< n_earlyclobbers
; i1
++)
5049 if (reg_overlap_mentioned_for_reload_p
5050 (reg_last_reload_reg
[regno
],
5051 reload_earlyclobbers
[i1
]))
5054 if (i1
!= n_earlyclobbers
5055 /* Don't really use the inherited spill reg
5056 if we need it wider than we've got it. */
5057 || (GET_MODE_SIZE (reload_mode
[r
])
5058 > GET_MODE_SIZE (mode
)))
5059 reload_override_in
[r
] = reg_last_reload_reg
[regno
];
5063 /* We can use this as a reload reg. */
5064 /* Mark the register as in use for this part of
5066 mark_reload_reg_in_use (spill_regs
[i
],
5068 reload_when_needed
[r
],
5070 reload_reg_rtx
[r
] = reg_last_reload_reg
[regno
];
5071 reload_inherited
[r
] = 1;
5072 reload_inheritance_insn
[r
]
5073 = reg_reloaded_insn
[i
];
5074 reload_spill_index
[r
] = i
;
5075 for (k
= 0; k
< nr
; k
++)
5076 SET_HARD_REG_BIT (reload_reg_used_for_inherit
,
5084 /* Here's another way to see if the value is already lying around. */
5086 && reload_in
[r
] != 0
5087 && ! reload_inherited
[r
]
5088 && reload_out
[r
] == 0
5089 && (CONSTANT_P (reload_in
[r
])
5090 || GET_CODE (reload_in
[r
]) == PLUS
5091 || GET_CODE (reload_in
[r
]) == REG
5092 || GET_CODE (reload_in
[r
]) == MEM
)
5093 && (reload_nregs
[r
] == max_group_size
5094 || ! reg_classes_intersect_p (reload_reg_class
[r
], group_class
)))
5097 = find_equiv_reg (reload_in
[r
], insn
, reload_reg_class
[r
],
5098 -1, NULL_PTR
, 0, reload_mode
[r
]);
5103 if (GET_CODE (equiv
) == REG
)
5104 regno
= REGNO (equiv
);
5105 else if (GET_CODE (equiv
) == SUBREG
)
5107 /* This must be a SUBREG of a hard register.
5108 Make a new REG since this might be used in an
5109 address and not all machines support SUBREGs
5111 regno
= REGNO (SUBREG_REG (equiv
)) + SUBREG_WORD (equiv
);
5112 equiv
= gen_rtx (REG
, reload_mode
[r
], regno
);
5118 /* If we found a spill reg, reject it unless it is free
5119 and of the desired class. */
5121 && ((spill_reg_order
[regno
] >= 0
5122 && ! reload_reg_free_before_p (regno
, reload_opnum
[r
],
5123 reload_when_needed
[r
]))
5124 || ! TEST_HARD_REG_BIT (reg_class_contents
[(int) reload_reg_class
[r
]],
5128 if (equiv
!= 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all
, regno
))
5131 if (equiv
!= 0 && ! HARD_REGNO_MODE_OK (regno
, reload_mode
[r
]))
5134 /* We found a register that contains the value we need.
5135 If this register is the same as an `earlyclobber' operand
5136 of the current insn, just mark it as a place to reload from
5137 since we can't use it as the reload register itself. */
5140 for (i
= 0; i
< n_earlyclobbers
; i
++)
5141 if (reg_overlap_mentioned_for_reload_p (equiv
,
5142 reload_earlyclobbers
[i
]))
5144 reload_override_in
[r
] = equiv
;
5149 /* JRV: If the equiv register we have found is explicitly
5150 clobbered in the current insn, mark but don't use, as above. */
5152 if (equiv
!= 0 && regno_clobbered_p (regno
, insn
))
5154 reload_override_in
[r
] = equiv
;
5158 /* If we found an equivalent reg, say no code need be generated
5159 to load it, and use it as our reload reg. */
5160 if (equiv
!= 0 && regno
!= HARD_FRAME_POINTER_REGNUM
)
5162 reload_reg_rtx
[r
] = equiv
;
5163 reload_inherited
[r
] = 1;
5164 /* If it is a spill reg,
5165 mark the spill reg as in use for this insn. */
5166 i
= spill_reg_order
[regno
];
5169 int nr
= HARD_REGNO_NREGS (regno
, reload_mode
[r
]);
5171 mark_reload_reg_in_use (regno
, reload_opnum
[r
],
5172 reload_when_needed
[r
],
5174 for (k
= 0; k
< nr
; k
++)
5175 SET_HARD_REG_BIT (reload_reg_used_for_inherit
, regno
+ k
);
5180 /* If we found a register to use already, or if this is an optional
5181 reload, we are done. */
5182 if (reload_reg_rtx
[r
] != 0 || reload_optional
[r
] != 0)
5185 #if 0 /* No longer needed for correct operation. Might or might not
5186 give better code on the average. Want to experiment? */
5188 /* See if there is a later reload that has a class different from our
5189 class that intersects our class or that requires less register
5190 than our reload. If so, we must allocate a register to this
5191 reload now, since that reload might inherit a previous reload
5192 and take the only available register in our class. Don't do this
5193 for optional reloads since they will force all previous reloads
5194 to be allocated. Also don't do this for reloads that have been
5197 for (i
= j
+ 1; i
< n_reloads
; i
++)
5199 int s
= reload_order
[i
];
5201 if ((reload_in
[s
] == 0 && reload_out
[s
] == 0
5202 && ! reload_secondary_p
[s
])
5203 || reload_optional
[s
])
5206 if ((reload_reg_class
[s
] != reload_reg_class
[r
]
5207 && reg_classes_intersect_p (reload_reg_class
[r
],
5208 reload_reg_class
[s
]))
5209 || reload_nregs
[s
] < reload_nregs
[r
])
5216 allocate_reload_reg (r
, insn
, j
== n_reloads
- 1, inheritance
);
5220 /* Now allocate reload registers for anything non-optional that
5221 didn't get one yet. */
5222 for (j
= 0; j
< n_reloads
; j
++)
5224 register int r
= reload_order
[j
];
5226 /* Ignore reloads that got marked inoperative. */
5227 if (reload_out
[r
] == 0 && reload_in
[r
] == 0 && ! reload_secondary_p
[r
])
5230 /* Skip reloads that already have a register allocated or are
5232 if (reload_reg_rtx
[r
] != 0 || reload_optional
[r
])
5235 if (! allocate_reload_reg (r
, insn
, j
== n_reloads
- 1, inheritance
))
5239 /* If that loop got all the way, we have won. */
5244 /* Loop around and try without any inheritance. */
5245 /* First undo everything done by the failed attempt
5246 to allocate with inheritance. */
5247 bcopy (save_reload_reg_rtx
, reload_reg_rtx
, sizeof reload_reg_rtx
);
5248 bcopy (save_reload_inherited
, reload_inherited
, sizeof reload_inherited
);
5249 bcopy (save_reload_inheritance_insn
, reload_inheritance_insn
,
5250 sizeof reload_inheritance_insn
);
5251 bcopy (save_reload_override_in
, reload_override_in
,
5252 sizeof reload_override_in
);
5253 bcopy (save_reload_spill_index
, reload_spill_index
,
5254 sizeof reload_spill_index
);
5255 COPY_HARD_REG_SET (reload_reg_used
, save_reload_reg_used
);
5256 COPY_HARD_REG_SET (reload_reg_used_at_all
, save_reload_reg_used_at_all
);
5257 COPY_HARD_REG_SET (reload_reg_used_in_op_addr
,
5258 save_reload_reg_used_in_op_addr
);
5259 COPY_HARD_REG_SET (reload_reg_used_in_insn
,
5260 save_reload_reg_used_in_insn
);
5261 COPY_HARD_REG_SET (reload_reg_used_in_other_addr
,
5262 save_reload_reg_used_in_other_addr
);
5264 for (i
= 0; i
< reload_n_operands
; i
++)
5266 COPY_HARD_REG_SET (reload_reg_used_in_input
[i
],
5267 save_reload_reg_used_in_input
[i
]);
5268 COPY_HARD_REG_SET (reload_reg_used_in_output
[i
],
5269 save_reload_reg_used_in_output
[i
]);
5270 COPY_HARD_REG_SET (reload_reg_used_in_input_addr
[i
],
5271 save_reload_reg_used_in_input_addr
[i
]);
5272 COPY_HARD_REG_SET (reload_reg_used_in_output_addr
[i
],
5273 save_reload_reg_used_in_output_addr
[i
]);
5277 /* If we thought we could inherit a reload, because it seemed that
5278 nothing else wanted the same reload register earlier in the insn,
5279 verify that assumption, now that all reloads have been assigned. */
5281 for (j
= 0; j
< n_reloads
; j
++)
5283 register int r
= reload_order
[j
];
5285 if (reload_inherited
[r
] && reload_reg_rtx
[r
] != 0
5286 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx
[r
]),
5288 reload_when_needed
[r
]))
5289 reload_inherited
[r
] = 0;
5291 /* If we found a better place to reload from,
5292 validate it in the same fashion, if it is a reload reg. */
5293 if (reload_override_in
[r
]
5294 && (GET_CODE (reload_override_in
[r
]) == REG
5295 || GET_CODE (reload_override_in
[r
]) == SUBREG
))
5297 int regno
= true_regnum (reload_override_in
[r
]);
5298 if (spill_reg_order
[regno
] >= 0
5299 && ! reload_reg_free_before_p (regno
, reload_opnum
[r
],
5300 reload_when_needed
[r
]))
5301 reload_override_in
[r
] = 0;
5305 /* Now that reload_override_in is known valid,
5306 actually override reload_in. */
5307 for (j
= 0; j
< n_reloads
; j
++)
5308 if (reload_override_in
[j
])
5309 reload_in
[j
] = reload_override_in
[j
];
5311 /* If this reload won't be done because it has been cancelled or is
5312 optional and not inherited, clear reload_reg_rtx so other
5313 routines (such as subst_reloads) don't get confused. */
5314 for (j
= 0; j
< n_reloads
; j
++)
5315 if (reload_reg_rtx
[j
] != 0
5316 && ((reload_optional
[j
] && ! reload_inherited
[j
])
5317 || (reload_in
[j
] == 0 && reload_out
[j
] == 0
5318 && ! reload_secondary_p
[j
])))
5320 int regno
= true_regnum (reload_reg_rtx
[j
]);
5322 if (spill_reg_order
[regno
] >= 0)
5323 clear_reload_reg_in_use (regno
, reload_opnum
[j
],
5324 reload_when_needed
[j
], reload_mode
[j
]);
5325 reload_reg_rtx
[j
] = 0;
5328 /* Record which pseudos and which spill regs have output reloads. */
5329 for (j
= 0; j
< n_reloads
; j
++)
5331 register int r
= reload_order
[j
];
5333 i
= reload_spill_index
[r
];
5335 /* I is nonneg if this reload used one of the spill regs.
5336 If reload_reg_rtx[r] is 0, this is an optional reload
5337 that we opted to ignore. */
5338 if (reload_out
[r
] != 0 && GET_CODE (reload_out
[r
]) == REG
5339 && reload_reg_rtx
[r
] != 0)
5341 register int nregno
= REGNO (reload_out
[r
]);
5344 if (nregno
< FIRST_PSEUDO_REGISTER
)
5345 nr
= HARD_REGNO_NREGS (nregno
, reload_mode
[r
]);
5348 reg_has_output_reload
[nregno
+ nr
] = 1;
5352 nr
= HARD_REGNO_NREGS (spill_regs
[i
], reload_mode
[r
]);
5354 SET_HARD_REG_BIT (reg_is_output_reload
, spill_regs
[i
] + nr
);
5357 if (reload_when_needed
[r
] != RELOAD_OTHER
5358 && reload_when_needed
[r
] != RELOAD_FOR_OUTPUT
5359 && reload_when_needed
[r
] != RELOAD_FOR_INSN
)
5365 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5366 reloads of the same item for fear that we might not have enough reload
5367 registers. However, normally they will get the same reload register
5368 and hence actually need not be loaded twice.
5370 Here we check for the most common case of this phenomenon: when we have
5371 a number of reloads for the same object, each of which were allocated
5372 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5373 reload, and is not modified in the insn itself. If we find such,
5374 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5375 This will not increase the number of spill registers needed and will
5376 prevent redundant code. */
5378 #ifdef SMALL_REGISTER_CLASSES
5381 merge_assigned_reloads (insn
)
5386 /* Scan all the reloads looking for ones that only load values and
5387 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5388 assigned and not modified by INSN. */
5390 for (i
= 0; i
< n_reloads
; i
++)
5392 if (reload_in
[i
] == 0 || reload_when_needed
[i
] == RELOAD_OTHER
5393 || reload_out
[i
] != 0 || reload_reg_rtx
[i
] == 0
5394 || reg_set_p (reload_reg_rtx
[i
], insn
))
5397 /* Look at all other reloads. Ensure that the only use of this
5398 reload_reg_rtx is in a reload that just loads the same value
5399 as we do. Note that any secondary reloads must be of the identical
5400 class since the values, modes, and result registers are the
5401 same, so we need not do anything with any secondary reloads. */
5403 for (j
= 0; j
< n_reloads
; j
++)
5405 if (i
== j
|| reload_reg_rtx
[j
] == 0
5406 || ! reg_overlap_mentioned_p (reload_reg_rtx
[j
],
5410 /* If the reload regs aren't exactly the same (e.g, different modes)
5411 or if the values are different, we can't merge anything with this
5414 if (! rtx_equal_p (reload_reg_rtx
[i
], reload_reg_rtx
[j
])
5415 || reload_out
[j
] != 0 || reload_in
[j
] == 0
5416 || ! rtx_equal_p (reload_in
[i
], reload_in
[j
]))
5420 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5421 we, in fact, found any matching reloads. */
5425 for (j
= 0; j
< n_reloads
; j
++)
5426 if (i
!= j
&& reload_reg_rtx
[j
] != 0
5427 && rtx_equal_p (reload_reg_rtx
[i
], reload_reg_rtx
[j
]))
5429 reload_when_needed
[i
] = RELOAD_OTHER
;
5431 transfer_replacements (i
, j
);
5434 /* If this is now RELOAD_OTHER, look for any reloads that load
5435 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5436 if they were for inputs, RELOAD_OTHER for outputs. Note that
5437 this test is equivalent to looking for reloads for this operand
5440 if (reload_when_needed
[i
] == RELOAD_OTHER
)
5441 for (j
= 0; j
< n_reloads
; j
++)
5442 if (reload_in
[j
] != 0
5443 && reload_when_needed
[i
] != RELOAD_OTHER
5444 && reg_overlap_mentioned_for_reload_p (reload_in
[j
],
5446 reload_when_needed
[j
]
5447 = reload_when_needed
[i
] == RELOAD_FOR_INPUT_ADDRESS
5448 ? RELOAD_FOR_OTHER_ADDRESS
: RELOAD_OTHER
;
5452 #endif /* SMALL_RELOAD_CLASSES */
5454 /* Output insns to reload values in and out of the chosen reload regs. */
5457 emit_reload_insns (insn
)
5461 rtx input_reload_insns
[MAX_RECOG_OPERANDS
];
5462 rtx other_input_address_reload_insns
= 0;
5463 rtx other_input_reload_insns
= 0;
5464 rtx input_address_reload_insns
[MAX_RECOG_OPERANDS
];
5465 rtx output_reload_insns
[MAX_RECOG_OPERANDS
];
5466 rtx output_address_reload_insns
[MAX_RECOG_OPERANDS
];
5467 rtx operand_reload_insns
= 0;
5468 rtx following_insn
= NEXT_INSN (insn
);
5469 rtx before_insn
= insn
;
5471 /* Values to be put in spill_reg_store are put here first. */
5472 rtx new_spill_reg_store
[FIRST_PSEUDO_REGISTER
];
5474 for (j
= 0; j
< reload_n_operands
; j
++)
5475 input_reload_insns
[j
] = input_address_reload_insns
[j
]
5476 = output_reload_insns
[j
] = output_address_reload_insns
[j
] = 0;
5478 /* Now output the instructions to copy the data into and out of the
5479 reload registers. Do these in the order that the reloads were reported,
5480 since reloads of base and index registers precede reloads of operands
5481 and the operands may need the base and index registers reloaded. */
5483 for (j
= 0; j
< n_reloads
; j
++)
5486 rtx oldequiv_reg
= 0;
5490 if (old
!= 0 && ! reload_inherited
[j
]
5491 && ! rtx_equal_p (reload_reg_rtx
[j
], old
)
5492 && reload_reg_rtx
[j
] != 0)
5494 register rtx reloadreg
= reload_reg_rtx
[j
];
5496 enum machine_mode mode
;
5499 /* Determine the mode to reload in.
5500 This is very tricky because we have three to choose from.
5501 There is the mode the insn operand wants (reload_inmode[J]).
5502 There is the mode of the reload register RELOADREG.
5503 There is the intrinsic mode of the operand, which we could find
5504 by stripping some SUBREGs.
5505 It turns out that RELOADREG's mode is irrelevant:
5506 we can change that arbitrarily.
5508 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5509 then the reload reg may not support QImode moves, so use SImode.
5510 If foo is in memory due to spilling a pseudo reg, this is safe,
5511 because the QImode value is in the least significant part of a
5512 slot big enough for a SImode. If foo is some other sort of
5513 memory reference, then it is impossible to reload this case,
5514 so previous passes had better make sure this never happens.
5516 Then consider a one-word union which has SImode and one of its
5517 members is a float, being fetched as (SUBREG:SF union:SI).
5518 We must fetch that as SFmode because we could be loading into
5519 a float-only register. In this case OLD's mode is correct.
5521 Consider an immediate integer: it has VOIDmode. Here we need
5522 to get a mode from something else.
5524 In some cases, there is a fourth mode, the operand's
5525 containing mode. If the insn specifies a containing mode for
5526 this operand, it overrides all others.
5528 I am not sure whether the algorithm here is always right,
5529 but it does the right things in those cases. */
5531 mode
= GET_MODE (old
);
5532 if (mode
== VOIDmode
)
5533 mode
= reload_inmode
[j
];
5535 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5536 /* If we need a secondary register for this operation, see if
5537 the value is already in a register in that class. Don't
5538 do this if the secondary register will be used as a scratch
5541 if (reload_secondary_in_reload
[j
] >= 0
5542 && reload_secondary_in_icode
[j
] == CODE_FOR_nothing
5545 = find_equiv_reg (old
, insn
,
5546 reload_reg_class
[reload_secondary_in_reload
[j
]],
5547 -1, NULL_PTR
, 0, mode
);
5550 /* If reloading from memory, see if there is a register
5551 that already holds the same value. If so, reload from there.
5552 We can pass 0 as the reload_reg_p argument because
5553 any other reload has either already been emitted,
5554 in which case find_equiv_reg will see the reload-insn,
5555 or has yet to be emitted, in which case it doesn't matter
5556 because we will use this equiv reg right away. */
5558 if (oldequiv
== 0 && optimize
5559 && (GET_CODE (old
) == MEM
5560 || (GET_CODE (old
) == REG
5561 && REGNO (old
) >= FIRST_PSEUDO_REGISTER
5562 && reg_renumber
[REGNO (old
)] < 0)))
5563 oldequiv
= find_equiv_reg (old
, insn
, ALL_REGS
,
5564 -1, NULL_PTR
, 0, mode
);
5568 int regno
= true_regnum (oldequiv
);
5570 /* If OLDEQUIV is a spill register, don't use it for this
5571 if any other reload needs it at an earlier stage of this insn
5572 or at this stage. */
5573 if (spill_reg_order
[regno
] >= 0
5574 && (! reload_reg_free_p (regno
, reload_opnum
[j
],
5575 reload_when_needed
[j
])
5576 || ! reload_reg_free_before_p (regno
, reload_opnum
[j
],
5577 reload_when_needed
[j
])))
5580 /* If OLDEQUIV is not a spill register,
5581 don't use it if any other reload wants it. */
5582 if (spill_reg_order
[regno
] < 0)
5585 for (k
= 0; k
< n_reloads
; k
++)
5586 if (reload_reg_rtx
[k
] != 0 && k
!= j
5587 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx
[k
],
5595 /* If it is no cheaper to copy from OLDEQUIV into the
5596 reload register than it would be to move from memory,
5597 don't use it. Likewise, if we need a secondary register
5601 && ((REGNO_REG_CLASS (regno
) != reload_reg_class
[j
]
5602 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno
),
5603 reload_reg_class
[j
])
5604 >= MEMORY_MOVE_COST (mode
)))
5605 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5606 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class
[j
],
5610 #ifdef SECONDARY_MEMORY_NEEDED
5611 || SECONDARY_MEMORY_NEEDED (reload_reg_class
[j
],
5612 REGNO_REG_CLASS (regno
),
5621 else if (GET_CODE (oldequiv
) == REG
)
5622 oldequiv_reg
= oldequiv
;
5623 else if (GET_CODE (oldequiv
) == SUBREG
)
5624 oldequiv_reg
= SUBREG_REG (oldequiv
);
5626 /* If we are reloading from a register that was recently stored in
5627 with an output-reload, see if we can prove there was
5628 actually no need to store the old value in it. */
5630 if (optimize
&& GET_CODE (oldequiv
) == REG
5631 && REGNO (oldequiv
) < FIRST_PSEUDO_REGISTER
5632 && spill_reg_order
[REGNO (oldequiv
)] >= 0
5633 && spill_reg_store
[reload_spill_index
[REGNO (oldequiv
)]] != 0
5634 && find_reg_note (insn
, REG_DEAD
, reload_in
[j
])
5635 /* This is unsafe if operand occurs more than once in current
5636 insn. Perhaps some occurrences weren't reloaded. */
5637 && count_occurrences (PATTERN (insn
), reload_in
[j
]) == 1
5638 && spill_reg_store
[spill_reg_order
[REGNO (oldequiv
)]] != 0)
5639 delete_output_reload
5640 (insn
, j
, spill_reg_store
[spill_reg_order
[REGNO (oldequiv
)]]);
5642 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5643 then load RELOADREG from OLDEQUIV. Note that we cannot use
5644 gen_lowpart_common since it can do the wrong thing when
5645 RELOADREG has a multi-word mode. Note that RELOADREG
5646 must always be a REG here. */
5648 if (GET_MODE (reloadreg
) != mode
)
5649 reloadreg
= gen_rtx (REG
, mode
, REGNO (reloadreg
));
5650 while (GET_CODE (oldequiv
) == SUBREG
&& GET_MODE (oldequiv
) != mode
)
5651 oldequiv
= SUBREG_REG (oldequiv
);
5652 if (GET_MODE (oldequiv
) != VOIDmode
5653 && mode
!= GET_MODE (oldequiv
))
5654 oldequiv
= gen_rtx (SUBREG
, mode
, oldequiv
, 0);
5656 /* Switch to the right place to emit the reload insns. */
5657 switch (reload_when_needed
[j
])
5660 where
= &other_input_reload_insns
;
5662 case RELOAD_FOR_INPUT
:
5663 where
= &input_reload_insns
[reload_opnum
[j
]];
5665 case RELOAD_FOR_INPUT_ADDRESS
:
5666 where
= &input_address_reload_insns
[reload_opnum
[j
]];
5668 case RELOAD_FOR_OUTPUT_ADDRESS
:
5669 where
= &output_address_reload_insns
[reload_opnum
[j
]];
5671 case RELOAD_FOR_OPERAND_ADDRESS
:
5672 where
= &operand_reload_insns
;
5674 case RELOAD_FOR_OTHER_ADDRESS
:
5675 where
= &other_input_address_reload_insns
;
5681 push_to_sequence (*where
);
5684 /* Auto-increment addresses must be reloaded in a special way. */
5685 if (GET_CODE (oldequiv
) == POST_INC
5686 || GET_CODE (oldequiv
) == POST_DEC
5687 || GET_CODE (oldequiv
) == PRE_INC
5688 || GET_CODE (oldequiv
) == PRE_DEC
)
5690 /* We are not going to bother supporting the case where a
5691 incremented register can't be copied directly from
5692 OLDEQUIV since this seems highly unlikely. */
5693 if (reload_secondary_in_reload
[j
] >= 0)
5695 /* Prevent normal processing of this reload. */
5697 /* Output a special code sequence for this case. */
5698 inc_for_reload (reloadreg
, oldequiv
, reload_inc
[j
]);
5701 /* If we are reloading a pseudo-register that was set by the previous
5702 insn, see if we can get rid of that pseudo-register entirely
5703 by redirecting the previous insn into our reload register. */
5705 else if (optimize
&& GET_CODE (old
) == REG
5706 && REGNO (old
) >= FIRST_PSEUDO_REGISTER
5707 && dead_or_set_p (insn
, old
)
5708 /* This is unsafe if some other reload
5709 uses the same reg first. */
5710 && reload_reg_free_before_p (REGNO (reloadreg
),
5712 reload_when_needed
[j
]))
5714 rtx temp
= PREV_INSN (insn
);
5715 while (temp
&& GET_CODE (temp
) == NOTE
)
5716 temp
= PREV_INSN (temp
);
5718 && GET_CODE (temp
) == INSN
5719 && GET_CODE (PATTERN (temp
)) == SET
5720 && SET_DEST (PATTERN (temp
)) == old
5721 /* Make sure we can access insn_operand_constraint. */
5722 && asm_noperands (PATTERN (temp
)) < 0
5723 /* This is unsafe if prev insn rejects our reload reg. */
5724 && constraint_accepts_reg_p (insn_operand_constraint
[recog_memoized (temp
)][0],
5726 /* This is unsafe if operand occurs more than once in current
5727 insn. Perhaps some occurrences aren't reloaded. */
5728 && count_occurrences (PATTERN (insn
), old
) == 1
5729 /* Don't risk splitting a matching pair of operands. */
5730 && ! reg_mentioned_p (old
, SET_SRC (PATTERN (temp
))))
5732 /* Store into the reload register instead of the pseudo. */
5733 SET_DEST (PATTERN (temp
)) = reloadreg
;
5734 /* If these are the only uses of the pseudo reg,
5735 pretend for GDB it lives in the reload reg we used. */
5736 if (reg_n_deaths
[REGNO (old
)] == 1
5737 && reg_n_sets
[REGNO (old
)] == 1)
5739 reg_renumber
[REGNO (old
)] = REGNO (reload_reg_rtx
[j
]);
5740 alter_reg (REGNO (old
), -1);
5746 /* We can't do that, so output an insn to load RELOADREG. */
5750 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5751 rtx second_reload_reg
= 0;
5752 enum insn_code icode
;
5754 /* If we have a secondary reload, pick up the secondary register
5755 and icode, if any. If OLDEQUIV and OLD are different or
5756 if this is an in-out reload, recompute whether or not we
5757 still need a secondary register and what the icode should
5758 be. If we still need a secondary register and the class or
5759 icode is different, go back to reloading from OLD if using
5760 OLDEQUIV means that we got the wrong type of register. We
5761 cannot have different class or icode due to an in-out reload
5762 because we don't make such reloads when both the input and
5763 output need secondary reload registers. */
5765 if (reload_secondary_in_reload
[j
] >= 0)
5767 int secondary_reload
= reload_secondary_in_reload
[j
];
5768 rtx real_oldequiv
= oldequiv
;
5771 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5772 and similarly for OLD.
5773 See comments in get_secondary_reload in reload.c. */
5774 if (GET_CODE (oldequiv
) == REG
5775 && REGNO (oldequiv
) >= FIRST_PSEUDO_REGISTER
5776 && reg_equiv_mem
[REGNO (oldequiv
)] != 0)
5777 real_oldequiv
= reg_equiv_mem
[REGNO (oldequiv
)];
5779 if (GET_CODE (old
) == REG
5780 && REGNO (old
) >= FIRST_PSEUDO_REGISTER
5781 && reg_equiv_mem
[REGNO (old
)] != 0)
5782 real_old
= reg_equiv_mem
[REGNO (old
)];
5784 second_reload_reg
= reload_reg_rtx
[secondary_reload
];
5785 icode
= reload_secondary_in_icode
[j
];
5787 if ((old
!= oldequiv
&& ! rtx_equal_p (old
, oldequiv
))
5788 || (reload_in
[j
] != 0 && reload_out
[j
] != 0))
5790 enum reg_class new_class
5791 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class
[j
],
5792 mode
, real_oldequiv
);
5794 if (new_class
== NO_REGS
)
5795 second_reload_reg
= 0;
5798 enum insn_code new_icode
;
5799 enum machine_mode new_mode
;
5801 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) new_class
],
5802 REGNO (second_reload_reg
)))
5803 oldequiv
= old
, real_oldequiv
= real_old
;
5806 new_icode
= reload_in_optab
[(int) mode
];
5807 if (new_icode
!= CODE_FOR_nothing
5808 && ((insn_operand_predicate
[(int) new_icode
][0]
5809 && ! ((*insn_operand_predicate
[(int) new_icode
][0])
5811 || (insn_operand_predicate
[(int) new_icode
][1]
5812 && ! ((*insn_operand_predicate
[(int) new_icode
][1])
5813 (real_oldequiv
, mode
)))))
5814 new_icode
= CODE_FOR_nothing
;
5816 if (new_icode
== CODE_FOR_nothing
)
5819 new_mode
= insn_operand_mode
[(int) new_icode
][2];
5821 if (GET_MODE (second_reload_reg
) != new_mode
)
5823 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg
),
5825 oldequiv
= old
, real_oldequiv
= real_old
;
5828 = gen_rtx (REG
, new_mode
,
5829 REGNO (second_reload_reg
));
5835 /* If we still need a secondary reload register, check
5836 to see if it is being used as a scratch or intermediate
5837 register and generate code appropriately. If we need
5838 a scratch register, use REAL_OLDEQUIV since the form of
5839 the insn may depend on the actual address if it is
5842 if (second_reload_reg
)
5844 if (icode
!= CODE_FOR_nothing
)
5846 emit_insn (GEN_FCN (icode
) (reloadreg
, real_oldequiv
,
5847 second_reload_reg
));
5852 /* See if we need a scratch register to load the
5853 intermediate register (a tertiary reload). */
5854 enum insn_code tertiary_icode
5855 = reload_secondary_in_icode
[secondary_reload
];
5857 if (tertiary_icode
!= CODE_FOR_nothing
)
5859 rtx third_reload_reg
5860 = reload_reg_rtx
[reload_secondary_in_reload
[secondary_reload
]];
5862 emit_insn ((GEN_FCN (tertiary_icode
)
5863 (second_reload_reg
, real_oldequiv
,
5864 third_reload_reg
)));
5867 gen_input_reload (second_reload_reg
, oldequiv
,
5869 reload_when_needed
[j
]);
5871 oldequiv
= second_reload_reg
;
5877 if (! special
&& ! rtx_equal_p (reloadreg
, oldequiv
))
5878 gen_input_reload (reloadreg
, oldequiv
, reload_opnum
[j
],
5879 reload_when_needed
[j
]);
5881 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
5882 /* We may have to make a REG_DEAD note for the secondary reload
5883 register in the insns we just made. Find the last insn that
5884 mentioned the register. */
5885 if (! special
&& second_reload_reg
5886 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg
)))
5890 for (prev
= get_last_insn (); prev
;
5891 prev
= PREV_INSN (prev
))
5892 if (GET_RTX_CLASS (GET_CODE (prev
) == 'i')
5893 && reg_overlap_mentioned_for_reload_p (second_reload_reg
,
5896 REG_NOTES (prev
) = gen_rtx (EXPR_LIST
, REG_DEAD
,
5905 /* End this sequence. */
5906 *where
= get_insns ();
5910 /* Add a note saying the input reload reg
5911 dies in this insn, if anyone cares. */
5912 #ifdef PRESERVE_DEATH_INFO_REGNO_P
5914 && reload_reg_rtx
[j
] != old
5915 && reload_reg_rtx
[j
] != 0
5916 && reload_out
[j
] == 0
5917 && ! reload_inherited
[j
]
5918 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx
[j
])))
5920 register rtx reloadreg
= reload_reg_rtx
[j
];
5923 /* We can't abort here because we need to support this for sched.c.
5924 It's not terrible to miss a REG_DEAD note, but we should try
5925 to figure out how to do this correctly. */
5926 /* The code below is incorrect for address-only reloads. */
5927 if (reload_when_needed
[j
] != RELOAD_OTHER
5928 && reload_when_needed
[j
] != RELOAD_FOR_INPUT
)
5932 /* Add a death note to this insn, for an input reload. */
5934 if ((reload_when_needed
[j
] == RELOAD_OTHER
5935 || reload_when_needed
[j
] == RELOAD_FOR_INPUT
)
5936 && ! dead_or_set_p (insn
, reloadreg
))
5938 = gen_rtx (EXPR_LIST
, REG_DEAD
,
5939 reloadreg
, REG_NOTES (insn
));
5942 /* When we inherit a reload, the last marked death of the reload reg
5943 may no longer really be a death. */
5944 if (reload_reg_rtx
[j
] != 0
5945 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx
[j
]))
5946 && reload_inherited
[j
])
5948 /* Handle inheriting an output reload.
5949 Remove the death note from the output reload insn. */
5950 if (reload_spill_index
[j
] >= 0
5951 && GET_CODE (reload_in
[j
]) == REG
5952 && spill_reg_store
[reload_spill_index
[j
]] != 0
5953 && find_regno_note (spill_reg_store
[reload_spill_index
[j
]],
5954 REG_DEAD
, REGNO (reload_reg_rtx
[j
])))
5955 remove_death (REGNO (reload_reg_rtx
[j
]),
5956 spill_reg_store
[reload_spill_index
[j
]]);
5957 /* Likewise for input reloads that were inherited. */
5958 else if (reload_spill_index
[j
] >= 0
5959 && GET_CODE (reload_in
[j
]) == REG
5960 && spill_reg_store
[reload_spill_index
[j
]] == 0
5961 && reload_inheritance_insn
[j
] != 0
5962 && find_regno_note (reload_inheritance_insn
[j
], REG_DEAD
,
5963 REGNO (reload_reg_rtx
[j
])))
5964 remove_death (REGNO (reload_reg_rtx
[j
]),
5965 reload_inheritance_insn
[j
]);
5970 /* We got this register from find_equiv_reg.
5971 Search back for its last death note and get rid of it.
5972 But don't search back too far.
5973 Don't go past a place where this reg is set,
5974 since a death note before that remains valid. */
5975 for (prev
= PREV_INSN (insn
);
5976 prev
&& GET_CODE (prev
) != CODE_LABEL
;
5977 prev
= PREV_INSN (prev
))
5978 if (GET_RTX_CLASS (GET_CODE (prev
)) == 'i'
5979 && dead_or_set_p (prev
, reload_reg_rtx
[j
]))
5981 if (find_regno_note (prev
, REG_DEAD
,
5982 REGNO (reload_reg_rtx
[j
])))
5983 remove_death (REGNO (reload_reg_rtx
[j
]), prev
);
5989 /* We might have used find_equiv_reg above to choose an alternate
5990 place from which to reload. If so, and it died, we need to remove
5991 that death and move it to one of the insns we just made. */
5993 if (oldequiv_reg
!= 0
5994 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg
)))
5998 for (prev
= PREV_INSN (insn
); prev
&& GET_CODE (prev
) != CODE_LABEL
;
5999 prev
= PREV_INSN (prev
))
6000 if (GET_RTX_CLASS (GET_CODE (prev
)) == 'i'
6001 && dead_or_set_p (prev
, oldequiv_reg
))
6003 if (find_regno_note (prev
, REG_DEAD
, REGNO (oldequiv_reg
)))
6005 for (prev1
= this_reload_insn
;
6006 prev1
; prev1
= PREV_INSN (prev1
))
6007 if (GET_RTX_CLASS (GET_CODE (prev1
) == 'i')
6008 && reg_overlap_mentioned_for_reload_p (oldequiv_reg
,
6011 REG_NOTES (prev1
) = gen_rtx (EXPR_LIST
, REG_DEAD
,
6016 remove_death (REGNO (oldequiv_reg
), prev
);
6023 /* If we are reloading a register that was recently stored in with an
6024 output-reload, see if we can prove there was
6025 actually no need to store the old value in it. */
6027 if (optimize
&& reload_inherited
[j
] && reload_spill_index
[j
] >= 0
6028 && reload_in
[j
] != 0
6029 && GET_CODE (reload_in
[j
]) == REG
6031 /* There doesn't seem to be any reason to restrict this to pseudos
6032 and doing so loses in the case where we are copying from a
6033 register of the wrong class. */
6034 && REGNO (reload_in
[j
]) >= FIRST_PSEUDO_REGISTER
6036 && spill_reg_store
[reload_spill_index
[j
]] != 0
6037 /* This is unsafe if some other reload uses the same reg first. */
6038 && reload_reg_free_before_p (spill_regs
[reload_spill_index
[j
]],
6039 reload_opnum
[j
], reload_when_needed
[j
])
6040 && dead_or_set_p (insn
, reload_in
[j
])
6041 /* This is unsafe if operand occurs more than once in current
6042 insn. Perhaps some occurrences weren't reloaded. */
6043 && count_occurrences (PATTERN (insn
), reload_in
[j
]) == 1)
6044 delete_output_reload (insn
, j
,
6045 spill_reg_store
[reload_spill_index
[j
]]);
6047 /* Input-reloading is done. Now do output-reloading,
6048 storing the value from the reload-register after the main insn
6049 if reload_out[j] is nonzero.
6051 ??? At some point we need to support handling output reloads of
6052 JUMP_INSNs or insns that set cc0. */
6053 old
= reload_out
[j
];
6055 && reload_reg_rtx
[j
] != old
6056 && reload_reg_rtx
[j
] != 0)
6058 register rtx reloadreg
= reload_reg_rtx
[j
];
6059 register rtx second_reloadreg
= 0;
6061 enum machine_mode mode
;
6064 /* An output operand that dies right away does need a reload,
6065 but need not be copied from it. Show the new location in the
6067 if ((GET_CODE (old
) == REG
|| GET_CODE (old
) == SCRATCH
)
6068 && (note
= find_reg_note (insn
, REG_UNUSED
, old
)) != 0)
6070 XEXP (note
, 0) = reload_reg_rtx
[j
];
6073 else if (GET_CODE (old
) == SCRATCH
)
6074 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6075 but we don't want to make an output reload. */
6079 /* Strip off of OLD any size-increasing SUBREGs such as
6080 (SUBREG:SI foo:QI 0). */
6082 while (GET_CODE (old
) == SUBREG
&& SUBREG_WORD (old
) == 0
6083 && (GET_MODE_SIZE (GET_MODE (old
))
6084 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old
)))))
6085 old
= SUBREG_REG (old
);
6088 /* If is a JUMP_INSN, we can't support output reloads yet. */
6089 if (GET_CODE (insn
) == JUMP_INSN
)
6092 push_to_sequence (output_reload_insns
[reload_opnum
[j
]]);
6094 /* Determine the mode to reload in.
6095 See comments above (for input reloading). */
6097 mode
= GET_MODE (old
);
6098 if (mode
== VOIDmode
)
6100 /* VOIDmode should never happen for an output. */
6101 if (asm_noperands (PATTERN (insn
)) < 0)
6102 /* It's the compiler's fault. */
6104 error_for_asm (insn
, "output operand is constant in `asm'");
6105 /* Prevent crash--use something we know is valid. */
6107 old
= gen_rtx (REG
, mode
, REGNO (reloadreg
));
6110 if (GET_MODE (reloadreg
) != mode
)
6111 reloadreg
= gen_rtx (REG
, mode
, REGNO (reloadreg
));
6113 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6115 /* If we need two reload regs, set RELOADREG to the intermediate
6116 one, since it will be stored into OUT. We might need a secondary
6117 register only for an input reload, so check again here. */
6119 if (reload_secondary_out_reload
[j
] >= 0)
6123 if (GET_CODE (old
) == REG
&& REGNO (old
) >= FIRST_PSEUDO_REGISTER
6124 && reg_equiv_mem
[REGNO (old
)] != 0)
6125 real_old
= reg_equiv_mem
[REGNO (old
)];
6127 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class
[j
],
6131 second_reloadreg
= reloadreg
;
6132 reloadreg
= reload_reg_rtx
[reload_secondary_out_reload
[j
]];
6134 /* See if RELOADREG is to be used as a scratch register
6135 or as an intermediate register. */
6136 if (reload_secondary_out_icode
[j
] != CODE_FOR_nothing
)
6138 emit_insn ((GEN_FCN (reload_secondary_out_icode
[j
])
6139 (real_old
, second_reloadreg
, reloadreg
)));
6144 /* See if we need both a scratch and intermediate reload
6146 int secondary_reload
= reload_secondary_out_reload
[j
];
6147 enum insn_code tertiary_icode
6148 = reload_secondary_out_icode
[secondary_reload
];
6151 if (GET_MODE (reloadreg
) != mode
)
6152 reloadreg
= gen_rtx (REG
, mode
, REGNO (reloadreg
));
6154 if (tertiary_icode
!= CODE_FOR_nothing
)
6157 = reload_reg_rtx
[reload_secondary_out_reload
[secondary_reload
]];
6158 pat
= (GEN_FCN (tertiary_icode
)
6159 (reloadreg
, second_reloadreg
, third_reloadreg
));
6161 #ifdef SECONDARY_MEMORY_NEEDED
6162 /* If we need a memory location to do the move, do it that way. */
6163 else if (GET_CODE (reloadreg
) == REG
6164 && REGNO (reloadreg
) < FIRST_PSEUDO_REGISTER
6165 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (reloadreg
)),
6166 REGNO_REG_CLASS (REGNO (second_reloadreg
)),
6167 GET_MODE (second_reloadreg
)))
6169 /* Get the memory to use and rewrite both registers
6172 = get_secondary_mem (reloadreg
,
6173 GET_MODE (second_reloadreg
),
6175 reload_when_needed
[j
]);
6178 if (GET_MODE (loc
) != GET_MODE (second_reloadreg
))
6179 second_reloadreg
= gen_rtx (REG
, GET_MODE (loc
),
6180 REGNO (second_reloadreg
));
6182 if (GET_MODE (loc
) != GET_MODE (reloadreg
))
6183 tmp_reloadreg
= gen_rtx (REG
, GET_MODE (loc
),
6186 tmp_reloadreg
= reloadreg
;
6188 emit_move_insn (loc
, second_reloadreg
);
6189 pat
= gen_move_insn (tmp_reloadreg
, loc
);
6193 pat
= gen_move_insn (reloadreg
, second_reloadreg
);
6201 /* Output the last reload insn. */
6204 #ifdef SECONDARY_MEMORY_NEEDED
6205 /* If we need a memory location to do the move, do it that way. */
6206 if (GET_CODE (old
) == REG
&& REGNO (old
) < FIRST_PSEUDO_REGISTER
6207 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old
)),
6208 REGNO_REG_CLASS (REGNO (reloadreg
)),
6209 GET_MODE (reloadreg
)))
6211 /* Get the memory to use and rewrite both registers to
6213 rtx loc
= get_secondary_mem (old
, GET_MODE (reloadreg
),
6215 reload_when_needed
[j
]);
6217 if (GET_MODE (loc
) != GET_MODE (reloadreg
))
6218 reloadreg
= gen_rtx (REG
, GET_MODE (loc
),
6221 if (GET_MODE (loc
) != GET_MODE (old
))
6222 old
= gen_rtx (REG
, GET_MODE (loc
), REGNO (old
));
6224 emit_insn (gen_move_insn (loc
, reloadreg
));
6225 emit_insn (gen_move_insn (old
, loc
));
6229 emit_insn (gen_move_insn (old
, reloadreg
));
6232 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6233 /* If final will look at death notes for this reg,
6234 put one on the last output-reload insn to use it. Similarly
6235 for any secondary register. */
6236 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg
)))
6237 for (p
= get_last_insn (); p
; p
= PREV_INSN (p
))
6238 if (GET_RTX_CLASS (GET_CODE (p
)) == 'i'
6239 && reg_overlap_mentioned_for_reload_p (reloadreg
,
6241 REG_NOTES (p
) = gen_rtx (EXPR_LIST
, REG_DEAD
,
6242 reloadreg
, REG_NOTES (p
));
6244 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6246 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg
)))
6247 for (p
= get_last_insn (); p
; p
= PREV_INSN (p
))
6248 if (GET_RTX_CLASS (GET_CODE (p
)) == 'i'
6249 && reg_overlap_mentioned_for_reload_p (second_reloadreg
,
6251 REG_NOTES (p
) = gen_rtx (EXPR_LIST
, REG_DEAD
,
6252 second_reloadreg
, REG_NOTES (p
));
6255 /* Look at all insns we emitted, just to be safe. */
6256 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
6257 if (GET_RTX_CLASS (GET_CODE (p
)) == 'i')
6259 /* If this output reload doesn't come from a spill reg,
6260 clear any memory of reloaded copies of the pseudo reg.
6261 If this output reload comes from a spill reg,
6262 reg_has_output_reload will make this do nothing. */
6263 note_stores (PATTERN (p
), forget_old_reloads_1
);
6265 if (reg_mentioned_p (reload_reg_rtx
[j
], PATTERN (p
)))
6269 output_reload_insns
[reload_opnum
[j
]] = get_insns ();
6274 if (reload_spill_index
[j
] >= 0)
6275 new_spill_reg_store
[reload_spill_index
[j
]] = store_insn
;
6278 /* Now write all the insns we made for reloads in the order expected by
6279 the allocation functions. Prior to the insn being reloaded, we write
6280 the following reloads:
6282 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6284 RELOAD_OTHER reloads.
6286 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6287 the RELOAD_FOR_INPUT reload for the operand.
6289 RELOAD_FOR_OPERAND_ADDRESS reloads.
6291 After the insn being reloaded, we write the following:
6293 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6294 the RELOAD_FOR_OUTPUT reload for that operand. */
6296 emit_insns_before (other_input_address_reload_insns
, before_insn
);
6297 emit_insns_before (other_input_reload_insns
, before_insn
);
6299 for (j
= 0; j
< reload_n_operands
; j
++)
6301 emit_insns_before (input_address_reload_insns
[j
], before_insn
);
6302 emit_insns_before (input_reload_insns
[j
], before_insn
);
6305 emit_insns_before (operand_reload_insns
, before_insn
);
6307 for (j
= 0; j
< reload_n_operands
; j
++)
6309 emit_insns_before (output_address_reload_insns
[j
], following_insn
);
6310 emit_insns_before (output_reload_insns
[j
], following_insn
);
6313 /* Move death notes from INSN
6314 to output-operand-address and output reload insns. */
6315 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6318 /* Loop over those insns, last ones first. */
6319 for (insn1
= PREV_INSN (following_insn
); insn1
!= insn
;
6320 insn1
= PREV_INSN (insn1
))
6321 if (GET_CODE (insn1
) == INSN
&& GET_CODE (PATTERN (insn1
)) == SET
)
6323 rtx source
= SET_SRC (PATTERN (insn1
));
6324 rtx dest
= SET_DEST (PATTERN (insn1
));
6326 /* The note we will examine next. */
6327 rtx reg_notes
= REG_NOTES (insn
);
6328 /* The place that pointed to this note. */
6329 rtx
*prev_reg_note
= ®_NOTES (insn
);
6331 /* If the note is for something used in the source of this
6332 reload insn, or in the output address, move the note. */
6335 rtx next_reg_notes
= XEXP (reg_notes
, 1);
6336 if (REG_NOTE_KIND (reg_notes
) == REG_DEAD
6337 && GET_CODE (XEXP (reg_notes
, 0)) == REG
6338 && ((GET_CODE (dest
) != REG
6339 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes
, 0),
6341 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes
, 0),
6344 *prev_reg_note
= next_reg_notes
;
6345 XEXP (reg_notes
, 1) = REG_NOTES (insn1
);
6346 REG_NOTES (insn1
) = reg_notes
;
6349 prev_reg_note
= &XEXP (reg_notes
, 1);
6351 reg_notes
= next_reg_notes
;
6357 /* For all the spill regs newly reloaded in this instruction,
6358 record what they were reloaded from, so subsequent instructions
6359 can inherit the reloads.
6361 Update spill_reg_store for the reloads of this insn.
6362 Copy the elements that were updated in the loop above. */
6364 for (j
= 0; j
< n_reloads
; j
++)
6366 register int r
= reload_order
[j
];
6367 register int i
= reload_spill_index
[r
];
6369 /* I is nonneg if this reload used one of the spill regs.
6370 If reload_reg_rtx[r] is 0, this is an optional reload
6371 that we opted to ignore.
6373 Also ignore reloads that don't reach the end of the insn,
6374 since we will eventually see the one that does. */
6376 if (i
>= 0 && reload_reg_rtx
[r
] != 0
6377 && reload_reg_reaches_end_p (spill_regs
[i
], reload_opnum
[r
],
6378 reload_when_needed
[r
]))
6380 /* First, clear out memory of what used to be in this spill reg.
6381 If consecutive registers are used, clear them all. */
6383 = HARD_REGNO_NREGS (spill_regs
[i
], GET_MODE (reload_reg_rtx
[r
]));
6386 for (k
= 0; k
< nr
; k
++)
6388 reg_reloaded_contents
[spill_reg_order
[spill_regs
[i
] + k
]] = -1;
6389 reg_reloaded_insn
[spill_reg_order
[spill_regs
[i
] + k
]] = 0;
6392 /* Maybe the spill reg contains a copy of reload_out. */
6393 if (reload_out
[r
] != 0 && GET_CODE (reload_out
[r
]) == REG
)
6395 register int nregno
= REGNO (reload_out
[r
]);
6396 int nnr
= (nregno
>= FIRST_PSEUDO_REGISTER
? 1
6397 : HARD_REGNO_NREGS (nregno
,
6398 GET_MODE (reload_reg_rtx
[r
])));
6400 spill_reg_store
[i
] = new_spill_reg_store
[i
];
6401 reg_last_reload_reg
[nregno
] = reload_reg_rtx
[r
];
6403 /* If NREGNO is a hard register, it may occupy more than
6404 one register. If it does, say what is in the
6405 rest of the registers assuming that both registers
6406 agree on how many words the object takes. If not,
6407 invalidate the subsequent registers. */
6409 if (nregno
< FIRST_PSEUDO_REGISTER
)
6410 for (k
= 1; k
< nnr
; k
++)
6411 reg_last_reload_reg
[nregno
+ k
]
6412 = (nr
== nnr
? gen_rtx (REG
,
6413 reg_raw_mode
[REGNO (reload_reg_rtx
[r
]) + k
],
6414 REGNO (reload_reg_rtx
[r
]) + k
)
6417 /* Now do the inverse operation. */
6418 for (k
= 0; k
< nr
; k
++)
6420 reg_reloaded_contents
[spill_reg_order
[spill_regs
[i
] + k
]]
6421 = (nregno
>= FIRST_PSEUDO_REGISTER
|| nr
!= nnr
? nregno
6423 reg_reloaded_insn
[spill_reg_order
[spill_regs
[i
] + k
]] = insn
;
6427 /* Maybe the spill reg contains a copy of reload_in. Only do
6428 something if there will not be an output reload for
6429 the register being reloaded. */
6430 else if (reload_out
[r
] == 0
6431 && reload_in
[r
] != 0
6432 && ((GET_CODE (reload_in
[r
]) == REG
6433 && ! reg_has_output_reload
[REGNO (reload_in
[r
])]
6434 || (GET_CODE (reload_in_reg
[r
]) == REG
6435 && ! reg_has_output_reload
[REGNO (reload_in_reg
[r
])]))))
6437 register int nregno
;
6440 if (GET_CODE (reload_in
[r
]) == REG
)
6441 nregno
= REGNO (reload_in
[r
]);
6443 nregno
= REGNO (reload_in_reg
[r
]);
6445 nnr
= (nregno
>= FIRST_PSEUDO_REGISTER
? 1
6446 : HARD_REGNO_NREGS (nregno
,
6447 GET_MODE (reload_reg_rtx
[r
])));
6449 reg_last_reload_reg
[nregno
] = reload_reg_rtx
[r
];
6451 if (nregno
< FIRST_PSEUDO_REGISTER
)
6452 for (k
= 1; k
< nnr
; k
++)
6453 reg_last_reload_reg
[nregno
+ k
]
6454 = (nr
== nnr
? gen_rtx (REG
,
6455 reg_raw_mode
[REGNO (reload_reg_rtx
[r
]) + k
],
6456 REGNO (reload_reg_rtx
[r
]) + k
)
6459 /* Unless we inherited this reload, show we haven't
6460 recently done a store. */
6461 if (! reload_inherited
[r
])
6462 spill_reg_store
[i
] = 0;
6464 for (k
= 0; k
< nr
; k
++)
6466 reg_reloaded_contents
[spill_reg_order
[spill_regs
[i
] + k
]]
6467 = (nregno
>= FIRST_PSEUDO_REGISTER
|| nr
!= nnr
? nregno
6469 reg_reloaded_insn
[spill_reg_order
[spill_regs
[i
] + k
]]
6475 /* The following if-statement was #if 0'd in 1.34 (or before...).
6476 It's reenabled in 1.35 because supposedly nothing else
6477 deals with this problem. */
6479 /* If a register gets output-reloaded from a non-spill register,
6480 that invalidates any previous reloaded copy of it.
6481 But forget_old_reloads_1 won't get to see it, because
6482 it thinks only about the original insn. So invalidate it here. */
6483 if (i
< 0 && reload_out
[r
] != 0 && GET_CODE (reload_out
[r
]) == REG
)
6485 register int nregno
= REGNO (reload_out
[r
]);
6486 int num_regs
= HARD_REGNO_NREGS (nregno
, GET_MODE (reload_out
[r
]));
6488 while (num_regs
-- > 0)
6489 reg_last_reload_reg
[nregno
+ num_regs
] = 0;
6494 /* Emit code to perform an input reload of IN to RELOADREG. IN is from
6495 operand OPNUM with reload type TYPE.
6497 Returns first insn emitted. */
6500 gen_input_reload (reloadreg
, in
, opnum
, type
)
6504 enum reload_type type
;
6506 rtx last
= get_last_insn ();
6508 /* How to do this reload can get quite tricky. Normally, we are being
6509 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6510 register that didn't get a hard register. In that case we can just
6511 call emit_move_insn.
6513 We can also be asked to reload a PLUS that adds a register or a MEM to
6514 another register, constant or MEM. This can occur during frame pointer
6515 elimination and while reloading addresses. This case is handled by
6516 trying to emit a single insn to perform the add. If it is not valid,
6517 we use a two insn sequence.
6519 Finally, we could be called to handle an 'o' constraint by putting
6520 an address into a register. In that case, we first try to do this
6521 with a named pattern of "reload_load_address". If no such pattern
6522 exists, we just emit a SET insn and hope for the best (it will normally
6523 be valid on machines that use 'o').
6525 This entire process is made complex because reload will never
6526 process the insns we generate here and so we must ensure that
6527 they will fit their constraints and also by the fact that parts of
6528 IN might be being reloaded separately and replaced with spill registers.
6529 Because of this, we are, in some sense, just guessing the right approach
6530 here. The one listed above seems to work.
6532 ??? At some point, this whole thing needs to be rethought. */
6534 if (GET_CODE (in
) == PLUS
6535 && (GET_CODE (XEXP (in
, 0)) == REG
6536 || GET_CODE (XEXP (in
, 0)) == MEM
)
6537 && (GET_CODE (XEXP (in
, 1)) == REG
6538 || CONSTANT_P (XEXP (in
, 1))
6539 || GET_CODE (XEXP (in
, 1)) == MEM
))
6541 /* We need to compute the sum of a register or a MEM and another
6542 register, constant, or MEM, and put it into the reload
6543 register. The best possible way of doing this is if the machine
6544 has a three-operand ADD insn that accepts the required operands.
6546 The simplest approach is to try to generate such an insn and see if it
6547 is recognized and matches its constraints. If so, it can be used.
6549 It might be better not to actually emit the insn unless it is valid,
6550 but we need to pass the insn as an operand to `recog' and
6551 `insn_extract' and it is simpler to emit and then delete the insn if
6552 not valid than to dummy things up. */
6554 rtx op0
, op1
, tem
, insn
;
6557 op0
= find_replacement (&XEXP (in
, 0));
6558 op1
= find_replacement (&XEXP (in
, 1));
6560 /* Since constraint checking is strict, commutativity won't be
6561 checked, so we need to do that here to avoid spurious failure
6562 if the add instruction is two-address and the second operand
6563 of the add is the same as the reload reg, which is frequently
6564 the case. If the insn would be A = B + A, rearrange it so
6565 it will be A = A + B as constrain_operands expects. */
6567 if (GET_CODE (XEXP (in
, 1)) == REG
6568 && REGNO (reloadreg
) == REGNO (XEXP (in
, 1)))
6569 tem
= op0
, op0
= op1
, op1
= tem
;
6571 if (op0
!= XEXP (in
, 0) || op1
!= XEXP (in
, 1))
6572 in
= gen_rtx (PLUS
, GET_MODE (in
), op0
, op1
);
6574 insn
= emit_insn (gen_rtx (SET
, VOIDmode
, reloadreg
, in
));
6575 code
= recog_memoized (insn
);
6579 insn_extract (insn
);
6580 /* We want constrain operands to treat this insn strictly in
6581 its validity determination, i.e., the way it would after reload
6583 if (constrain_operands (code
, 1))
6587 delete_insns_since (last
);
6589 /* If that failed, we must use a conservative two-insn sequence.
6590 use move to copy constant, MEM, or pseudo register to the reload
6591 register since "move" will be able to handle an arbitrary operand,
6592 unlike add which can't, in general. Then add the registers.
6594 If there is another way to do this for a specific machine, a
6595 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6598 if (CONSTANT_P (op1
) || GET_CODE (op1
) == MEM
6599 || (GET_CODE (op1
) == REG
6600 && REGNO (op1
) >= FIRST_PSEUDO_REGISTER
))
6601 tem
= op0
, op0
= op1
, op1
= tem
;
6603 emit_insn (gen_move_insn (reloadreg
, op0
));
6605 /* If OP0 and OP1 are the same, we can use RELOADREG for OP1.
6606 This fixes a problem on the 32K where the stack pointer cannot
6607 be used as an operand of an add insn. */
6609 if (rtx_equal_p (op0
, op1
))
6612 insn
= emit_insn (gen_add2_insn (reloadreg
, op1
));
6614 /* If that failed, copy the address register to the reload register.
6615 Then add the constant to the reload register. */
6617 code
= recog_memoized (insn
);
6621 insn_extract (insn
);
6622 /* We want constrain operands to treat this insn strictly in
6623 its validity determination, i.e., the way it would after reload
6625 if (constrain_operands (code
, 1))
6629 delete_insns_since (last
);
6631 emit_insn (gen_move_insn (reloadreg
, op1
));
6632 emit_insn (gen_add2_insn (reloadreg
, op0
));
6635 #ifdef SECONDARY_MEMORY_NEEDED
6636 /* If we need a memory location to do the move, do it that way. */
6637 else if (GET_CODE (in
) == REG
&& REGNO (in
) < FIRST_PSEUDO_REGISTER
6638 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in
)),
6639 REGNO_REG_CLASS (REGNO (reloadreg
)),
6640 GET_MODE (reloadreg
)))
6642 /* Get the memory to use and rewrite both registers to its mode. */
6643 rtx loc
= get_secondary_mem (in
, GET_MODE (reloadreg
), opnum
, type
);
6645 if (GET_MODE (loc
) != GET_MODE (reloadreg
))
6646 reloadreg
= gen_rtx (REG
, GET_MODE (loc
), REGNO (reloadreg
));
6648 if (GET_MODE (loc
) != GET_MODE (in
))
6649 in
= gen_rtx (REG
, GET_MODE (loc
), REGNO (in
));
6651 emit_insn (gen_move_insn (loc
, in
));
6652 emit_insn (gen_move_insn (reloadreg
, loc
));
6656 /* If IN is a simple operand, use gen_move_insn. */
6657 else if (GET_RTX_CLASS (GET_CODE (in
)) == 'o' || GET_CODE (in
) == SUBREG
)
6658 emit_insn (gen_move_insn (reloadreg
, in
));
6660 #ifdef HAVE_reload_load_address
6661 else if (HAVE_reload_load_address
)
6662 emit_insn (gen_reload_load_address (reloadreg
, in
));
6665 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
6667 emit_insn (gen_rtx (SET
, VOIDmode
, reloadreg
, in
));
6669 /* Return the first insn emitted.
6670 We can not just return get_last_insn, because there may have
6671 been multiple instructions emitted. Also note that gen_move_insn may
6672 emit more than one insn itself, so we can not assume that there is one
6673 insn emitted per emit_insn_before call. */
6675 return last
? NEXT_INSN (last
) : get_insns ();
6678 /* Delete a previously made output-reload
6679 whose result we now believe is not needed.
6680 First we double-check.
6682 INSN is the insn now being processed.
6683 OUTPUT_RELOAD_INSN is the insn of the output reload.
6684 J is the reload-number for this insn. */
6687 delete_output_reload (insn
, j
, output_reload_insn
)
6690 rtx output_reload_insn
;
6694 /* Get the raw pseudo-register referred to. */
6696 rtx reg
= reload_in
[j
];
6697 while (GET_CODE (reg
) == SUBREG
)
6698 reg
= SUBREG_REG (reg
);
6700 /* If the pseudo-reg we are reloading is no longer referenced
6701 anywhere between the store into it and here,
6702 and no jumps or labels intervene, then the value can get
6703 here through the reload reg alone.
6704 Otherwise, give up--return. */
6705 for (i1
= NEXT_INSN (output_reload_insn
);
6706 i1
!= insn
; i1
= NEXT_INSN (i1
))
6708 if (GET_CODE (i1
) == CODE_LABEL
|| GET_CODE (i1
) == JUMP_INSN
)
6710 if ((GET_CODE (i1
) == INSN
|| GET_CODE (i1
) == CALL_INSN
)
6711 && reg_mentioned_p (reg
, PATTERN (i1
)))
6715 if (cannot_omit_stores
[REGNO (reg
)])
6718 /* If this insn will store in the pseudo again,
6719 the previous store can be removed. */
6720 if (reload_out
[j
] == reload_in
[j
])
6721 delete_insn (output_reload_insn
);
6723 /* See if the pseudo reg has been completely replaced
6724 with reload regs. If so, delete the store insn
6725 and forget we had a stack slot for the pseudo. */
6726 else if (reg_n_deaths
[REGNO (reg
)] == 1
6727 && reg_basic_block
[REGNO (reg
)] >= 0
6728 && find_regno_note (insn
, REG_DEAD
, REGNO (reg
)))
6732 /* We know that it was used only between here
6733 and the beginning of the current basic block.
6734 (We also know that the last use before INSN was
6735 the output reload we are thinking of deleting, but never mind that.)
6736 Search that range; see if any ref remains. */
6737 for (i2
= PREV_INSN (insn
); i2
; i2
= PREV_INSN (i2
))
6739 rtx set
= single_set (i2
);
6741 /* Uses which just store in the pseudo don't count,
6742 since if they are the only uses, they are dead. */
6743 if (set
!= 0 && SET_DEST (set
) == reg
)
6745 if (GET_CODE (i2
) == CODE_LABEL
6746 || GET_CODE (i2
) == JUMP_INSN
)
6748 if ((GET_CODE (i2
) == INSN
|| GET_CODE (i2
) == CALL_INSN
)
6749 && reg_mentioned_p (reg
, PATTERN (i2
)))
6750 /* Some other ref remains;
6751 we can't do anything. */
6755 /* Delete the now-dead stores into this pseudo. */
6756 for (i2
= PREV_INSN (insn
); i2
; i2
= PREV_INSN (i2
))
6758 rtx set
= single_set (i2
);
6760 if (set
!= 0 && SET_DEST (set
) == reg
)
6762 if (GET_CODE (i2
) == CODE_LABEL
6763 || GET_CODE (i2
) == JUMP_INSN
)
6767 /* For the debugging info,
6768 say the pseudo lives in this reload reg. */
6769 reg_renumber
[REGNO (reg
)] = REGNO (reload_reg_rtx
[j
]);
6770 alter_reg (REGNO (reg
), -1);
6774 /* Output reload-insns to reload VALUE into RELOADREG.
6775 VALUE is an autoincrement or autodecrement RTX whose operand
6776 is a register or memory location;
6777 so reloading involves incrementing that location.
6779 INC_AMOUNT is the number to increment or decrement by (always positive).
6780 This cannot be deduced from VALUE. */
6783 inc_for_reload (reloadreg
, value
, inc_amount
)
6788 /* REG or MEM to be copied and incremented. */
6789 rtx incloc
= XEXP (value
, 0);
6790 /* Nonzero if increment after copying. */
6791 int post
= (GET_CODE (value
) == POST_DEC
|| GET_CODE (value
) == POST_INC
);
6797 /* No hard register is equivalent to this register after
6798 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6799 we could inc/dec that register as well (maybe even using it for
6800 the source), but I'm not sure it's worth worrying about. */
6801 if (GET_CODE (incloc
) == REG
)
6802 reg_last_reload_reg
[REGNO (incloc
)] = 0;
6804 if (GET_CODE (value
) == PRE_DEC
|| GET_CODE (value
) == POST_DEC
)
6805 inc_amount
= - inc_amount
;
6807 inc
= GEN_INT (inc_amount
);
6809 /* If this is post-increment, first copy the location to the reload reg. */
6811 emit_insn (gen_move_insn (reloadreg
, incloc
));
6813 /* See if we can directly increment INCLOC. Use a method similar to that
6814 in gen_input_reload. */
6816 last
= get_last_insn ();
6817 add_insn
= emit_insn (gen_rtx (SET
, VOIDmode
, incloc
,
6818 gen_rtx (PLUS
, GET_MODE (incloc
),
6821 code
= recog_memoized (add_insn
);
6824 insn_extract (add_insn
);
6825 if (constrain_operands (code
, 1))
6827 /* If this is a pre-increment and we have incremented the value
6828 where it lives, copy the incremented value to RELOADREG to
6829 be used as an address. */
6832 emit_insn (gen_move_insn (reloadreg
, incloc
));
6838 delete_insns_since (last
);
6840 /* If couldn't do the increment directly, must increment in RELOADREG.
6841 The way we do this depends on whether this is pre- or post-increment.
6842 For pre-increment, copy INCLOC to the reload register, increment it
6843 there, then save back. */
6847 emit_insn (gen_move_insn (reloadreg
, incloc
));
6848 emit_insn (gen_add2_insn (reloadreg
, inc
));
6849 emit_insn (gen_move_insn (incloc
, reloadreg
));
6854 Because this might be a jump insn or a compare, and because RELOADREG
6855 may not be available after the insn in an input reload, we must do
6856 the incrementation before the insn being reloaded for.
6858 We have already copied INCLOC to RELOADREG. Increment the copy in
6859 RELOADREG, save that back, then decrement RELOADREG so it has
6860 the original value. */
6862 emit_insn (gen_add2_insn (reloadreg
, inc
));
6863 emit_insn (gen_move_insn (incloc
, reloadreg
));
6864 emit_insn (gen_add2_insn (reloadreg
, GEN_INT (-inc_amount
)));
6870 /* Return 1 if we are certain that the constraint-string STRING allows
6871 the hard register REG. Return 0 if we can't be sure of this. */
6874 constraint_accepts_reg_p (string
, reg
)
6879 int regno
= true_regnum (reg
);
6882 /* Initialize for first alternative. */
6884 /* Check that each alternative contains `g' or `r'. */
6886 switch (c
= *string
++)
6889 /* If an alternative lacks `g' or `r', we lose. */
6892 /* If an alternative lacks `g' or `r', we lose. */
6895 /* Initialize for next alternative. */
6900 /* Any general reg wins for this alternative. */
6901 if (TEST_HARD_REG_BIT (reg_class_contents
[(int) GENERAL_REGS
], regno
))
6905 /* Any reg in specified class wins for this alternative. */
6907 enum reg_class
class = REG_CLASS_FROM_LETTER (c
);
6909 if (TEST_HARD_REG_BIT (reg_class_contents
[(int) class], regno
))
6915 /* Return the number of places FIND appears within X, but don't count
6916 an occurrence if some SET_DEST is FIND. */
6919 count_occurrences (x
, find
)
6920 register rtx x
, find
;
6923 register enum rtx_code code
;
6924 register char *format_ptr
;
6932 code
= GET_CODE (x
);
6947 if (SET_DEST (x
) == find
)
6948 return count_occurrences (SET_SRC (x
), find
);
6952 format_ptr
= GET_RTX_FORMAT (code
);
6955 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
6957 switch (*format_ptr
++)
6960 count
+= count_occurrences (XEXP (x
, i
), find
);
6964 if (XVEC (x
, i
) != NULL
)
6966 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
6967 count
+= count_occurrences (XVECEXP (x
, i
, j
), find
);