1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
25 #include "insn-config.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
31 #include "hard-reg-set.h"
34 #include "basic-block.h"
37 /* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
71 #ifndef REGISTER_MOVE_COST
72 #define REGISTER_MOVE_COST(x, y) 2
75 #ifndef MEMORY_MOVE_COST
76 #define MEMORY_MOVE_COST(x) 4
79 /* During reload_as_needed, element N contains a REG rtx for the hard reg
80 into which reg N has been reloaded (perhaps for a previous insn). */
81 static rtx
*reg_last_reload_reg
;
83 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85 static char *reg_has_output_reload
;
87 /* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89 static HARD_REG_SET reg_is_output_reload
;
91 /* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95 rtx
*reg_equiv_constant
;
97 /* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
101 rtx
*reg_equiv_memory_loc
;
103 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106 rtx
*reg_equiv_address
;
108 /* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
112 /* Widest width in which each pseudo reg is referred to (via subreg). */
113 static int *reg_max_ref_width
;
115 /* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117 static rtx
*reg_equiv_init
;
119 /* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124 static int reg_reloaded_contents
[FIRST_PSEUDO_REGISTER
];
126 /* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130 static rtx reg_reloaded_insn
[FIRST_PSEUDO_REGISTER
];
132 /* Number of spill-regs so far; number of valid elements of spill_regs. */
135 /* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
139 static rtx spill_reg_rtx
[FIRST_PSEUDO_REGISTER
];
141 /* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144 static rtx spill_reg_store
[FIRST_PSEUDO_REGISTER
];
146 /* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150 static short spill_reg_order
[FIRST_PSEUDO_REGISTER
];
152 /* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155 HARD_REG_SET forbidden_regs
;
157 /* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
162 (spill_reg_order prevents these registers from being used to start a
164 static HARD_REG_SET bad_spill_regs
;
166 /* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169 static short spill_regs
[FIRST_PSEUDO_REGISTER
];
171 /* Index of last register assigned as a spill register. We allocate in
172 a round-robin fashion. */
174 static int last_spill_reg
;
176 /* Describes order of preference for putting regs into spill_regs.
177 Contains the numbers of all the hard regs, in order most preferred first.
178 This order is different for each function.
179 It is set up by order_regs_for_reload.
180 Empty elements at the end contain -1. */
181 static short potential_reload_regs
[FIRST_PSEUDO_REGISTER
];
183 /* 1 for a hard register that appears explicitly in the rtl
184 (for example, function value registers, special registers
185 used by insns, structure value pointer registers). */
186 static char regs_explicitly_used
[FIRST_PSEUDO_REGISTER
];
188 /* Indicates if a register was counted against the need for
189 groups. 0 means it can count against max_nongroup instead. */
190 static HARD_REG_SET counted_for_groups
;
192 /* Indicates if a register was counted against the need for
193 non-groups. 0 means it can become part of a new group.
194 During choose_reload_regs, 1 here means don't use this reg
195 as part of a group, even if it seems to be otherwise ok. */
196 static HARD_REG_SET counted_for_nongroups
;
198 /* Indexed by pseudo reg number N,
199 says may not delete stores into the real (memory) home of pseudo N.
200 This is set if we already substituted a memory equivalent in some uses,
201 which happens when we have to eliminate the fp from it. */
202 static char *cannot_omit_stores
;
204 /* Nonzero if indirect addressing is supported on the machine; this means
205 that spilling (REG n) does not require reloading it into a register in
206 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
207 value indicates the level of indirect addressing supported, e.g., two
208 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
211 static char spill_indirect_levels
;
213 /* Nonzero if indirect addressing is supported when the innermost MEM is
214 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
215 which these are valid is the same as spill_indirect_levels, above. */
217 char indirect_symref_ok
;
219 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
221 char double_reg_address_ok
;
223 /* Record the stack slot for each spilled hard register. */
225 static rtx spill_stack_slot
[FIRST_PSEUDO_REGISTER
];
227 /* Width allocated so far for that stack slot. */
229 static int spill_stack_slot_width
[FIRST_PSEUDO_REGISTER
];
231 /* Indexed by register class and basic block number, nonzero if there is
232 any need for a spill register of that class in that basic block.
233 The pointer is 0 if we did stupid allocation and don't know
234 the structure of basic blocks. */
236 char *basic_block_needs
[N_REG_CLASSES
];
238 /* First uid used by insns created by reload in this function.
239 Used in find_equiv_reg. */
240 int reload_first_uid
;
242 /* Flag set by local-alloc or global-alloc if anything is live in
243 a call-clobbered reg across calls. */
245 int caller_save_needed
;
247 /* Set to 1 while reload_as_needed is operating.
248 Required by some machines to handle any generated moves differently. */
250 int reload_in_progress
= 0;
252 /* These arrays record the insn_code of insns that may be needed to
253 perform input and output reloads of special objects. They provide a
254 place to pass a scratch register. */
256 enum insn_code reload_in_optab
[NUM_MACHINE_MODES
];
257 enum insn_code reload_out_optab
[NUM_MACHINE_MODES
];
259 /* This obstack is used for allocation of rtl during register elimination.
260 The allocated storage can be freed once find_reloads has processed the
263 struct obstack reload_obstack
;
264 char *reload_firstobj
;
266 #define obstack_chunk_alloc xmalloc
267 #define obstack_chunk_free free
269 /* List of labels that must never be deleted. */
270 extern rtx forced_labels
;
272 /* This structure is used to record information about register eliminations.
273 Each array entry describes one possible way of eliminating a register
274 in favor of another. If there is more than one way of eliminating a
275 particular register, the most preferred should be specified first. */
277 static struct elim_table
279 int from
; /* Register number to be eliminated. */
280 int to
; /* Register number used as replacement. */
281 int initial_offset
; /* Initial difference between values. */
282 int can_eliminate
; /* Non-zero if this elimination can be done. */
283 int can_eliminate_previous
; /* Value of CAN_ELIMINATE in previous scan over
284 insns made by reload. */
285 int offset
; /* Current offset between the two regs. */
286 int max_offset
; /* Maximum offset between the two regs. */
287 int previous_offset
; /* Offset at end of previous insn. */
288 int ref_outside_mem
; /* "to" has been referenced outside a MEM. */
289 rtx from_rtx
; /* REG rtx for the register to be eliminated.
290 We cannot simply compare the number since
291 we might then spuriously replace a hard
292 register corresponding to a pseudo
293 assigned to the reg to be eliminated. */
294 rtx to_rtx
; /* REG rtx for the replacement. */
297 /* If a set of eliminable registers was specified, define the table from it.
298 Otherwise, default to the normal case of the frame pointer being
299 replaced by the stack pointer. */
301 #ifdef ELIMINABLE_REGS
304 {{ FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
}};
307 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
309 /* Record the number of pending eliminations that have an offset not equal
310 to their initial offset. If non-zero, we use a new copy of each
311 replacement result in any insns encountered. */
312 static int num_not_at_initial_offset
;
314 /* Count the number of registers that we may be able to eliminate. */
315 static int num_eliminable
;
317 /* For each label, we record the offset of each elimination. If we reach
318 a label by more than one path and an offset differs, we cannot do the
319 elimination. This information is indexed by the number of the label.
320 The first table is an array of flags that records whether we have yet
321 encountered a label and the second table is an array of arrays, one
322 entry in the latter array for each elimination. */
324 static char *offsets_known_at
;
325 static int (*offsets_at
)[NUM_ELIMINABLE_REGS
];
327 /* Number of labels in the current function. */
329 static int num_labels
;
331 struct hard_reg_n_uses
{ int regno
; int uses
; };
333 static int possible_group_p
PROTO((int, int *));
334 static void count_possible_groups
PROTO((int *, enum machine_mode
*,
336 static int modes_equiv_for_class_p
PROTO((enum machine_mode
,
339 static void spill_failure
PROTO((rtx
));
340 static int new_spill_reg
PROTO((int, int, int *, int *, int,
342 static void delete_dead_insn
PROTO((rtx
));
343 static void alter_reg
PROTO((int, int));
344 static void mark_scratch_live
PROTO((rtx
));
345 static void set_label_offsets
PROTO((rtx
, rtx
, int));
346 static int eliminate_regs_in_insn
PROTO((rtx
, int));
347 static void mark_not_eliminable
PROTO((rtx
, rtx
));
348 static int spill_hard_reg
PROTO((int, int, FILE *, int));
349 static void scan_paradoxical_subregs
PROTO((rtx
));
350 static int hard_reg_use_compare
PROTO((struct hard_reg_n_uses
*,
351 struct hard_reg_n_uses
*));
352 static void order_regs_for_reload
PROTO((void));
353 static int compare_spill_regs
PROTO((short *, short *));
354 static void reload_as_needed
PROTO((rtx
, int));
355 static void forget_old_reloads_1
PROTO((rtx
, rtx
));
356 static int reload_reg_class_lower
PROTO((short *, short *));
357 static void mark_reload_reg_in_use
PROTO((int, int, enum reload_type
,
359 static void clear_reload_reg_in_use
PROTO((int, int, enum reload_type
,
361 static int reload_reg_free_p
PROTO((int, int, enum reload_type
));
362 static int reload_reg_free_before_p
PROTO((int, int, enum reload_type
));
363 static int reload_reg_reaches_end_p
PROTO((int, int, enum reload_type
));
364 static int reloads_conflict
PROTO((int, int));
365 static int allocate_reload_reg
PROTO((int, rtx
, int, int));
366 static void choose_reload_regs
PROTO((rtx
, rtx
));
367 static void merge_assigned_reloads
PROTO((rtx
));
368 static void emit_reload_insns
PROTO((rtx
));
369 static void delete_output_reload
PROTO((rtx
, int, rtx
));
370 static void inc_for_reload
PROTO((rtx
, rtx
, int));
371 static int constraint_accepts_reg_p
PROTO((char *, rtx
));
372 static int count_occurrences
PROTO((rtx
, rtx
));
374 /* Initialize the reload pass once per compilation. */
381 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
382 Set spill_indirect_levels to the number of levels such addressing is
383 permitted, zero if it is not permitted at all. */
386 = gen_rtx (MEM
, Pmode
,
387 gen_rtx (PLUS
, Pmode
,
388 gen_rtx (REG
, Pmode
, LAST_VIRTUAL_REGISTER
+ 1),
390 spill_indirect_levels
= 0;
392 while (memory_address_p (QImode
, tem
))
394 spill_indirect_levels
++;
395 tem
= gen_rtx (MEM
, Pmode
, tem
);
398 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
400 tem
= gen_rtx (MEM
, Pmode
, gen_rtx (SYMBOL_REF
, Pmode
, "foo"));
401 indirect_symref_ok
= memory_address_p (QImode
, tem
);
403 /* See if reg+reg is a valid (and offsettable) address. */
405 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
407 tem
= gen_rtx (PLUS
, Pmode
,
408 gen_rtx (REG
, Pmode
, HARD_FRAME_POINTER_REGNUM
),
409 gen_rtx (REG
, Pmode
, i
));
410 /* This way, we make sure that reg+reg is an offsettable address. */
411 tem
= plus_constant (tem
, 4);
413 if (memory_address_p (QImode
, tem
))
415 double_reg_address_ok
= 1;
420 /* Initialize obstack for our rtl allocation. */
421 gcc_obstack_init (&reload_obstack
);
422 reload_firstobj
= (char *) obstack_alloc (&reload_obstack
, 0);
425 /* Main entry point for the reload pass.
427 FIRST is the first insn of the function being compiled.
429 GLOBAL nonzero means we were called from global_alloc
430 and should attempt to reallocate any pseudoregs that we
431 displace from hard regs we will use for reloads.
432 If GLOBAL is zero, we do not have enough information to do that,
433 so any pseudo reg that is spilled must go to the stack.
435 DUMPFILE is the global-reg debugging dump file stream, or 0.
436 If it is nonzero, messages are written to it to describe
437 which registers are seized as reload regs, which pseudo regs
438 are spilled from them, and where the pseudo regs are reallocated to.
440 Return value is nonzero if reload failed
441 and we must not do any more for this function. */
444 reload (first
, global
, dumpfile
)
450 register int i
, j
, k
;
452 register struct elim_table
*ep
;
454 int something_changed
;
455 int something_needs_reloads
;
456 int something_needs_elimination
;
457 int new_basic_block_needs
;
458 enum reg_class caller_save_spill_class
= NO_REGS
;
459 int caller_save_group_size
= 1;
461 /* Nonzero means we couldn't get enough spill regs. */
464 /* The basic block number currently being processed for INSN. */
467 /* Make sure even insns with volatile mem refs are recognizable. */
470 /* Enable find_equiv_reg to distinguish insns made by reload. */
471 reload_first_uid
= get_max_uid ();
473 for (i
= 0; i
< N_REG_CLASSES
; i
++)
474 basic_block_needs
[i
] = 0;
476 #ifdef SECONDARY_MEMORY_NEEDED
477 /* Initialize the secondary memory table. */
478 clear_secondary_mem ();
481 /* Remember which hard regs appear explicitly
482 before we merge into `regs_ever_live' the ones in which
483 pseudo regs have been allocated. */
484 bcopy (regs_ever_live
, regs_explicitly_used
, sizeof regs_ever_live
);
486 /* We don't have a stack slot for any spill reg yet. */
487 bzero ((char *) spill_stack_slot
, sizeof spill_stack_slot
);
488 bzero ((char *) spill_stack_slot_width
, sizeof spill_stack_slot_width
);
490 /* Initialize the save area information for caller-save, in case some
494 /* Compute which hard registers are now in use
495 as homes for pseudo registers.
496 This is done here rather than (eg) in global_alloc
497 because this point is reached even if not optimizing. */
499 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
502 for (i
= 0; i
< scratch_list_length
; i
++)
504 mark_scratch_live (scratch_list
[i
]);
506 /* Make sure that the last insn in the chain
507 is not something that needs reloading. */
508 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
510 /* Find all the pseudo registers that didn't get hard regs
511 but do have known equivalent constants or memory slots.
512 These include parameters (known equivalent to parameter slots)
513 and cse'd or loop-moved constant memory addresses.
515 Record constant equivalents in reg_equiv_constant
516 so they will be substituted by find_reloads.
517 Record memory equivalents in reg_mem_equiv so they can
518 be substituted eventually by altering the REG-rtx's. */
520 reg_equiv_constant
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
521 bzero ((char *) reg_equiv_constant
, max_regno
* sizeof (rtx
));
522 reg_equiv_memory_loc
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
523 bzero ((char *) reg_equiv_memory_loc
, max_regno
* sizeof (rtx
));
524 reg_equiv_mem
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
525 bzero ((char *) reg_equiv_mem
, max_regno
* sizeof (rtx
));
526 reg_equiv_init
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
527 bzero ((char *) reg_equiv_init
, max_regno
* sizeof (rtx
));
528 reg_equiv_address
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
529 bzero ((char *) reg_equiv_address
, max_regno
* sizeof (rtx
));
530 reg_max_ref_width
= (int *) alloca (max_regno
* sizeof (int));
531 bzero ((char *) reg_max_ref_width
, max_regno
* sizeof (int));
532 cannot_omit_stores
= (char *) alloca (max_regno
);
533 bzero (cannot_omit_stores
, max_regno
);
535 #ifdef SMALL_REGISTER_CLASSES
536 CLEAR_HARD_REG_SET (forbidden_regs
);
539 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
540 Also find all paradoxical subregs and find largest such for each pseudo.
541 On machines with small register classes, record hard registers that
542 are used for user variables. These can never be used for spills. */
544 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
546 rtx set
= single_set (insn
);
548 if (set
!= 0 && GET_CODE (SET_DEST (set
)) == REG
)
550 rtx note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
);
552 #ifdef LEGITIMATE_PIC_OPERAND_P
553 && (! CONSTANT_P (XEXP (note
, 0)) || ! flag_pic
554 || LEGITIMATE_PIC_OPERAND_P (XEXP (note
, 0)))
558 rtx x
= XEXP (note
, 0);
559 i
= REGNO (SET_DEST (set
));
560 if (i
> LAST_VIRTUAL_REGISTER
)
562 if (GET_CODE (x
) == MEM
)
563 reg_equiv_memory_loc
[i
] = x
;
564 else if (CONSTANT_P (x
))
566 if (LEGITIMATE_CONSTANT_P (x
))
567 reg_equiv_constant
[i
] = x
;
569 reg_equiv_memory_loc
[i
]
570 = force_const_mem (GET_MODE (SET_DEST (set
)), x
);
575 /* If this register is being made equivalent to a MEM
576 and the MEM is not SET_SRC, the equivalencing insn
577 is one with the MEM as a SET_DEST and it occurs later.
578 So don't mark this insn now. */
579 if (GET_CODE (x
) != MEM
580 || rtx_equal_p (SET_SRC (set
), x
))
581 reg_equiv_init
[i
] = insn
;
586 /* If this insn is setting a MEM from a register equivalent to it,
587 this is the equivalencing insn. */
588 else if (set
&& GET_CODE (SET_DEST (set
)) == MEM
589 && GET_CODE (SET_SRC (set
)) == REG
590 && reg_equiv_memory_loc
[REGNO (SET_SRC (set
))]
591 && rtx_equal_p (SET_DEST (set
),
592 reg_equiv_memory_loc
[REGNO (SET_SRC (set
))]))
593 reg_equiv_init
[REGNO (SET_SRC (set
))] = insn
;
595 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
596 scan_paradoxical_subregs (PATTERN (insn
));
599 /* Does this function require a frame pointer? */
601 frame_pointer_needed
= (! flag_omit_frame_pointer
602 #ifdef EXIT_IGNORE_STACK
603 /* ?? If EXIT_IGNORE_STACK is set, we will not save
604 and restore sp for alloca. So we can't eliminate
605 the frame pointer in that case. At some point,
606 we should improve this by emitting the
607 sp-adjusting insns for this case. */
608 || (current_function_calls_alloca
609 && EXIT_IGNORE_STACK
)
611 || FRAME_POINTER_REQUIRED
);
615 /* Initialize the table of registers to eliminate. The way we do this
616 depends on how the eliminable registers were defined. */
617 #ifdef ELIMINABLE_REGS
618 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
620 ep
->can_eliminate
= ep
->can_eliminate_previous
621 = (CAN_ELIMINATE (ep
->from
, ep
->to
)
622 && ! (ep
->to
== STACK_POINTER_REGNUM
&& frame_pointer_needed
));
625 reg_eliminate
[0].can_eliminate
= reg_eliminate
[0].can_eliminate_previous
626 = ! frame_pointer_needed
;
629 /* Count the number of eliminable registers and build the FROM and TO
630 REG rtx's. Note that code in gen_rtx will cause, e.g.,
631 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
632 We depend on this. */
633 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
635 num_eliminable
+= ep
->can_eliminate
;
636 ep
->from_rtx
= gen_rtx (REG
, Pmode
, ep
->from
);
637 ep
->to_rtx
= gen_rtx (REG
, Pmode
, ep
->to
);
640 num_labels
= max_label_num () - get_first_label_num ();
642 /* Allocate the tables used to store offset information at labels. */
643 offsets_known_at
= (char *) alloca (num_labels
);
645 = (int (*)[NUM_ELIMINABLE_REGS
])
646 alloca (num_labels
* NUM_ELIMINABLE_REGS
* sizeof (int));
648 offsets_known_at
-= get_first_label_num ();
649 offsets_at
-= get_first_label_num ();
651 /* Alter each pseudo-reg rtx to contain its hard reg number.
652 Assign stack slots to the pseudos that lack hard regs or equivalents.
653 Do not touch virtual registers. */
655 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_regno
; i
++)
658 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
659 because the stack size may be a part of the offset computation for
660 register elimination. */
661 assign_stack_local (BLKmode
, 0, 0);
663 /* If we have some registers we think can be eliminated, scan all insns to
664 see if there is an insn that sets one of these registers to something
665 other than itself plus a constant. If so, the register cannot be
666 eliminated. Doing this scan here eliminates an extra pass through the
667 main reload loop in the most common case where register elimination
669 for (insn
= first
; insn
&& num_eliminable
; insn
= NEXT_INSN (insn
))
670 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
671 || GET_CODE (insn
) == CALL_INSN
)
672 note_stores (PATTERN (insn
), mark_not_eliminable
);
674 #ifndef REGISTER_CONSTRAINTS
675 /* If all the pseudo regs have hard regs,
676 except for those that are never referenced,
677 we know that no reloads are needed. */
678 /* But that is not true if there are register constraints, since
679 in that case some pseudos might be in the wrong kind of hard reg. */
681 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
682 if (reg_renumber
[i
] == -1 && reg_n_refs
[i
] != 0)
685 if (i
== max_regno
&& num_eliminable
== 0 && ! caller_save_needed
)
689 /* Compute the order of preference for hard registers to spill.
690 Store them by decreasing preference in potential_reload_regs. */
692 order_regs_for_reload ();
694 /* So far, no hard regs have been spilled. */
696 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
697 spill_reg_order
[i
] = -1;
699 /* Initialize to -1, which means take the first spill register. */
702 /* On most machines, we can't use any register explicitly used in the
703 rtl as a spill register. But on some, we have to. Those will have
704 taken care to keep the life of hard regs as short as possible. */
706 #ifndef SMALL_REGISTER_CLASSES
707 COPY_HARD_REG_SET (forbidden_regs
, bad_spill_regs
);
710 /* Spill any hard regs that we know we can't eliminate. */
711 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
712 if (! ep
->can_eliminate
)
713 spill_hard_reg (ep
->from
, global
, dumpfile
, 1);
715 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
716 if (frame_pointer_needed
)
717 spill_hard_reg (HARD_FRAME_POINTER_REGNUM
, global
, dumpfile
, 1);
721 for (i
= 0; i
< N_REG_CLASSES
; i
++)
723 basic_block_needs
[i
] = (char *) alloca (n_basic_blocks
);
724 bzero (basic_block_needs
[i
], n_basic_blocks
);
727 /* From now on, we need to emit any moves without making new pseudos. */
728 reload_in_progress
= 1;
730 /* This loop scans the entire function each go-round
731 and repeats until one repetition spills no additional hard regs. */
733 /* This flag is set when a pseudo reg is spilled,
734 to require another pass. Note that getting an additional reload
735 reg does not necessarily imply any pseudo reg was spilled;
736 sometimes we find a reload reg that no pseudo reg was allocated in. */
737 something_changed
= 1;
738 /* This flag is set if there are any insns that require reloading. */
739 something_needs_reloads
= 0;
740 /* This flag is set if there are any insns that require register
742 something_needs_elimination
= 0;
743 while (something_changed
)
747 /* For each class, number of reload regs needed in that class.
748 This is the maximum over all insns of the needs in that class
749 of the individual insn. */
750 int max_needs
[N_REG_CLASSES
];
751 /* For each class, size of group of consecutive regs
752 that is needed for the reloads of this class. */
753 int group_size
[N_REG_CLASSES
];
754 /* For each class, max number of consecutive groups needed.
755 (Each group contains group_size[CLASS] consecutive registers.) */
756 int max_groups
[N_REG_CLASSES
];
757 /* For each class, max number needed of regs that don't belong
758 to any of the groups. */
759 int max_nongroups
[N_REG_CLASSES
];
760 /* For each class, the machine mode which requires consecutive
761 groups of regs of that class.
762 If two different modes ever require groups of one class,
763 they must be the same size and equally restrictive for that class,
764 otherwise we can't handle the complexity. */
765 enum machine_mode group_mode
[N_REG_CLASSES
];
766 /* Record the insn where each maximum need is first found. */
767 rtx max_needs_insn
[N_REG_CLASSES
];
768 rtx max_groups_insn
[N_REG_CLASSES
];
769 rtx max_nongroups_insn
[N_REG_CLASSES
];
771 int starting_frame_size
= get_frame_size ();
772 int previous_frame_pointer_needed
= frame_pointer_needed
;
773 static char *reg_class_names
[] = REG_CLASS_NAMES
;
775 something_changed
= 0;
776 bzero ((char *) max_needs
, sizeof max_needs
);
777 bzero ((char *) max_groups
, sizeof max_groups
);
778 bzero ((char *) max_nongroups
, sizeof max_nongroups
);
779 bzero ((char *) max_needs_insn
, sizeof max_needs_insn
);
780 bzero ((char *) max_groups_insn
, sizeof max_groups_insn
);
781 bzero ((char *) max_nongroups_insn
, sizeof max_nongroups_insn
);
782 bzero ((char *) group_size
, sizeof group_size
);
783 for (i
= 0; i
< N_REG_CLASSES
; i
++)
784 group_mode
[i
] = VOIDmode
;
786 /* Keep track of which basic blocks are needing the reloads. */
789 /* Remember whether any element of basic_block_needs
790 changes from 0 to 1 in this pass. */
791 new_basic_block_needs
= 0;
793 /* Reset all offsets on eliminable registers to their initial values. */
794 #ifdef ELIMINABLE_REGS
795 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
797 INITIAL_ELIMINATION_OFFSET (ep
->from
, ep
->to
, ep
->initial_offset
);
798 ep
->previous_offset
= ep
->offset
799 = ep
->max_offset
= ep
->initial_offset
;
802 #ifdef INITIAL_FRAME_POINTER_OFFSET
803 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate
[0].initial_offset
);
805 if (!FRAME_POINTER_REQUIRED
)
807 reg_eliminate
[0].initial_offset
= 0;
809 reg_eliminate
[0].previous_offset
= reg_eliminate
[0].max_offset
810 = reg_eliminate
[0].offset
= reg_eliminate
[0].initial_offset
;
813 num_not_at_initial_offset
= 0;
815 bzero ((char *) &offsets_known_at
[get_first_label_num ()], num_labels
);
817 /* Set a known offset for each forced label to be at the initial offset
818 of each elimination. We do this because we assume that all
819 computed jumps occur from a location where each elimination is
820 at its initial offset. */
822 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
824 set_label_offsets (XEXP (x
, 0), NULL_RTX
, 1);
826 /* For each pseudo register that has an equivalent location defined,
827 try to eliminate any eliminable registers (such as the frame pointer)
828 assuming initial offsets for the replacement register, which
831 If the resulting location is directly addressable, substitute
832 the MEM we just got directly for the old REG.
834 If it is not addressable but is a constant or the sum of a hard reg
835 and constant, it is probably not addressable because the constant is
836 out of range, in that case record the address; we will generate
837 hairy code to compute the address in a register each time it is
838 needed. Similarly if it is a hard register, but one that is not
839 valid as an address register.
841 If the location is not addressable, but does not have one of the
842 above forms, assign a stack slot. We have to do this to avoid the
843 potential of producing lots of reloads if, e.g., a location involves
844 a pseudo that didn't get a hard register and has an equivalent memory
845 location that also involves a pseudo that didn't get a hard register.
847 Perhaps at some point we will improve reload_when_needed handling
848 so this problem goes away. But that's very hairy. */
850 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
851 if (reg_renumber
[i
] < 0 && reg_equiv_memory_loc
[i
])
853 rtx x
= eliminate_regs (reg_equiv_memory_loc
[i
], 0, NULL_RTX
);
855 if (strict_memory_address_p (GET_MODE (regno_reg_rtx
[i
]),
857 reg_equiv_mem
[i
] = x
, reg_equiv_address
[i
] = 0;
858 else if (CONSTANT_P (XEXP (x
, 0))
859 || (GET_CODE (XEXP (x
, 0)) == REG
860 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
)
861 || (GET_CODE (XEXP (x
, 0)) == PLUS
862 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
863 && (REGNO (XEXP (XEXP (x
, 0), 0))
864 < FIRST_PSEUDO_REGISTER
)
865 && CONSTANT_P (XEXP (XEXP (x
, 0), 1))))
866 reg_equiv_address
[i
] = XEXP (x
, 0), reg_equiv_mem
[i
] = 0;
869 /* Make a new stack slot. Then indicate that something
870 changed so we go back and recompute offsets for
871 eliminable registers because the allocation of memory
872 below might change some offset. reg_equiv_{mem,address}
873 will be set up for this pseudo on the next pass around
875 reg_equiv_memory_loc
[i
] = 0;
876 reg_equiv_init
[i
] = 0;
878 something_changed
= 1;
882 /* If we allocated another pseudo to the stack, redo elimination
884 if (something_changed
)
887 /* If caller-saves needs a group, initialize the group to include
888 the size and mode required for caller-saves. */
890 if (caller_save_group_size
> 1)
892 group_mode
[(int) caller_save_spill_class
] = Pmode
;
893 group_size
[(int) caller_save_spill_class
] = caller_save_group_size
;
896 /* Compute the most additional registers needed by any instruction.
897 Collect information separately for each class of regs. */
899 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
901 if (global
&& this_block
+ 1 < n_basic_blocks
902 && insn
== basic_block_head
[this_block
+1])
905 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
906 might include REG_LABEL), we need to see what effects this
907 has on the known offsets at labels. */
909 if (GET_CODE (insn
) == CODE_LABEL
|| GET_CODE (insn
) == JUMP_INSN
910 || (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
911 && REG_NOTES (insn
) != 0))
912 set_label_offsets (insn
, insn
, 0);
914 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
916 /* Nonzero means don't use a reload reg that overlaps
917 the place where a function value can be returned. */
918 rtx avoid_return_reg
= 0;
920 rtx old_body
= PATTERN (insn
);
921 int old_code
= INSN_CODE (insn
);
922 rtx old_notes
= REG_NOTES (insn
);
923 int did_elimination
= 0;
925 /* To compute the number of reload registers of each class
926 needed for an insn, we must similate what choose_reload_regs
927 can do. We do this by splitting an insn into an "input" and
928 an "output" part. RELOAD_OTHER reloads are used in both.
929 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
930 which must be live over the entire input section of reloads,
931 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
932 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
935 The registers needed for output are RELOAD_OTHER and
936 RELOAD_FOR_OUTPUT, which are live for the entire output
937 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
938 reloads for each operand.
940 The total number of registers needed is the maximum of the
941 inputs and outputs. */
945 /* [0] is normal, [1] is nongroup. */
946 int regs
[2][N_REG_CLASSES
];
947 int groups
[N_REG_CLASSES
];
950 /* Each `struct needs' corresponds to one RELOAD_... type. */
956 struct needs other_addr
;
957 struct needs op_addr
;
958 struct needs op_addr_reload
;
959 struct needs in_addr
[MAX_RECOG_OPERANDS
];
960 struct needs out_addr
[MAX_RECOG_OPERANDS
];
963 /* If needed, eliminate any eliminable registers. */
965 did_elimination
= eliminate_regs_in_insn (insn
, 0);
967 #ifdef SMALL_REGISTER_CLASSES
968 /* Set avoid_return_reg if this is an insn
969 that might use the value of a function call. */
970 if (GET_CODE (insn
) == CALL_INSN
)
972 if (GET_CODE (PATTERN (insn
)) == SET
)
973 after_call
= SET_DEST (PATTERN (insn
));
974 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
975 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
976 after_call
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 0));
980 else if (after_call
!= 0
981 && !(GET_CODE (PATTERN (insn
)) == SET
982 && SET_DEST (PATTERN (insn
)) == stack_pointer_rtx
))
984 if (reg_referenced_p (after_call
, PATTERN (insn
)))
985 avoid_return_reg
= after_call
;
988 #endif /* SMALL_REGISTER_CLASSES */
990 /* Analyze the instruction. */
991 find_reloads (insn
, 0, spill_indirect_levels
, global
,
994 /* Remember for later shortcuts which insns had any reloads or
995 register eliminations.
997 One might think that it would be worthwhile to mark insns
998 that need register replacements but not reloads, but this is
999 not safe because find_reloads may do some manipulation of
1000 the insn (such as swapping commutative operands), which would
1001 be lost when we restore the old pattern after register
1002 replacement. So the actions of find_reloads must be redone in
1003 subsequent passes or in reload_as_needed.
1005 However, it is safe to mark insns that need reloads
1006 but not register replacement. */
1008 PUT_MODE (insn
, (did_elimination
? QImode
1009 : n_reloads
? HImode
1010 : GET_MODE (insn
) == DImode
? DImode
1013 /* Discard any register replacements done. */
1014 if (did_elimination
)
1016 obstack_free (&reload_obstack
, reload_firstobj
);
1017 PATTERN (insn
) = old_body
;
1018 INSN_CODE (insn
) = old_code
;
1019 REG_NOTES (insn
) = old_notes
;
1020 something_needs_elimination
= 1;
1023 /* If this insn has no reloads, we need not do anything except
1024 in the case of a CALL_INSN when we have caller-saves and
1025 caller-save needs reloads. */
1028 && ! (GET_CODE (insn
) == CALL_INSN
1029 && caller_save_spill_class
!= NO_REGS
))
1032 something_needs_reloads
= 1;
1033 bzero ((char *) &insn_needs
, sizeof insn_needs
);
1035 /* Count each reload once in every class
1036 containing the reload's own class. */
1038 for (i
= 0; i
< n_reloads
; i
++)
1040 register enum reg_class
*p
;
1041 enum reg_class
class = reload_reg_class
[i
];
1043 enum machine_mode mode
;
1045 struct needs
*this_needs
;
1047 /* Don't count the dummy reloads, for which one of the
1048 regs mentioned in the insn can be used for reloading.
1049 Don't count optional reloads.
1050 Don't count reloads that got combined with others. */
1051 if (reload_reg_rtx
[i
] != 0
1052 || reload_optional
[i
] != 0
1053 || (reload_out
[i
] == 0 && reload_in
[i
] == 0
1054 && ! reload_secondary_p
[i
]))
1057 /* Show that a reload register of this class is needed
1058 in this basic block. We do not use insn_needs and
1059 insn_groups because they are overly conservative for
1061 if (global
&& ! basic_block_needs
[(int) class][this_block
])
1063 basic_block_needs
[(int) class][this_block
] = 1;
1064 new_basic_block_needs
= 1;
1068 mode
= reload_inmode
[i
];
1069 if (GET_MODE_SIZE (reload_outmode
[i
]) > GET_MODE_SIZE (mode
))
1070 mode
= reload_outmode
[i
];
1071 size
= CLASS_MAX_NREGS (class, mode
);
1073 /* If this class doesn't want a group, determine if we have
1074 a nongroup need or a regular need. We have a nongroup
1075 need if this reload conflicts with a group reload whose
1076 class intersects with this reload's class. */
1080 for (j
= 0; j
< n_reloads
; j
++)
1081 if ((CLASS_MAX_NREGS (reload_reg_class
[j
],
1082 (GET_MODE_SIZE (reload_outmode
[j
])
1083 > GET_MODE_SIZE (reload_inmode
[j
]))
1087 && (!reload_optional
[j
])
1088 && (reload_in
[j
] != 0 || reload_out
[j
] != 0
1089 || reload_secondary_p
[j
])
1090 && reloads_conflict (i
, j
)
1091 && reg_classes_intersect_p (class,
1092 reload_reg_class
[j
]))
1098 /* Decide which time-of-use to count this reload for. */
1099 switch (reload_when_needed
[i
])
1102 this_needs
= &insn_needs
.other
;
1104 case RELOAD_FOR_INPUT
:
1105 this_needs
= &insn_needs
.input
;
1107 case RELOAD_FOR_OUTPUT
:
1108 this_needs
= &insn_needs
.output
;
1110 case RELOAD_FOR_INSN
:
1111 this_needs
= &insn_needs
.insn
;
1113 case RELOAD_FOR_OTHER_ADDRESS
:
1114 this_needs
= &insn_needs
.other_addr
;
1116 case RELOAD_FOR_INPUT_ADDRESS
:
1117 this_needs
= &insn_needs
.in_addr
[reload_opnum
[i
]];
1119 case RELOAD_FOR_OUTPUT_ADDRESS
:
1120 this_needs
= &insn_needs
.out_addr
[reload_opnum
[i
]];
1122 case RELOAD_FOR_OPERAND_ADDRESS
:
1123 this_needs
= &insn_needs
.op_addr
;
1125 case RELOAD_FOR_OPADDR_ADDR
:
1126 this_needs
= &insn_needs
.op_addr_reload
;
1132 enum machine_mode other_mode
, allocate_mode
;
1134 /* Count number of groups needed separately from
1135 number of individual regs needed. */
1136 this_needs
->groups
[(int) class]++;
1137 p
= reg_class_superclasses
[(int) class];
1138 while (*p
!= LIM_REG_CLASSES
)
1139 this_needs
->groups
[(int) *p
++]++;
1141 /* Record size and mode of a group of this class. */
1142 /* If more than one size group is needed,
1143 make all groups the largest needed size. */
1144 if (group_size
[(int) class] < size
)
1146 other_mode
= group_mode
[(int) class];
1147 allocate_mode
= mode
;
1149 group_size
[(int) class] = size
;
1150 group_mode
[(int) class] = mode
;
1155 allocate_mode
= group_mode
[(int) class];
1158 /* Crash if two dissimilar machine modes both need
1159 groups of consecutive regs of the same class. */
1161 if (other_mode
!= VOIDmode
&& other_mode
!= allocate_mode
1162 && ! modes_equiv_for_class_p (allocate_mode
,
1164 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1169 this_needs
->regs
[nongroup_need
][(int) class] += 1;
1170 p
= reg_class_superclasses
[(int) class];
1171 while (*p
!= LIM_REG_CLASSES
)
1172 this_needs
->regs
[nongroup_need
][(int) *p
++] += 1;
1178 /* All reloads have been counted for this insn;
1179 now merge the various times of use.
1180 This sets insn_needs, etc., to the maximum total number
1181 of registers needed at any point in this insn. */
1183 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1185 int in_max
, out_max
;
1187 /* Compute normal and nongroup needs. */
1188 for (j
= 0; j
<= 1; j
++)
1190 for (in_max
= 0, out_max
= 0, k
= 0;
1191 k
< reload_n_operands
; k
++)
1194 = MAX (in_max
, insn_needs
.in_addr
[k
].regs
[j
][i
]);
1196 = MAX (out_max
, insn_needs
.out_addr
[k
].regs
[j
][i
]);
1199 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1200 and operand addresses but not things used to reload
1201 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1202 don't conflict with things needed to reload inputs or
1205 in_max
= MAX (MAX (insn_needs
.op_addr
.regs
[j
][i
],
1206 insn_needs
.op_addr_reload
.regs
[j
][i
]),
1209 out_max
= MAX (out_max
, insn_needs
.insn
.regs
[j
][i
]);
1211 insn_needs
.input
.regs
[j
][i
]
1212 = MAX (insn_needs
.input
.regs
[j
][i
]
1213 + insn_needs
.op_addr
.regs
[j
][i
]
1214 + insn_needs
.insn
.regs
[j
][i
],
1215 in_max
+ insn_needs
.input
.regs
[j
][i
]);
1217 insn_needs
.output
.regs
[j
][i
] += out_max
;
1218 insn_needs
.other
.regs
[j
][i
]
1219 += MAX (MAX (insn_needs
.input
.regs
[j
][i
],
1220 insn_needs
.output
.regs
[j
][i
]),
1221 insn_needs
.other_addr
.regs
[j
][i
]);
1225 /* Now compute group needs. */
1226 for (in_max
= 0, out_max
= 0, j
= 0;
1227 j
< reload_n_operands
; j
++)
1229 in_max
= MAX (in_max
, insn_needs
.in_addr
[j
].groups
[i
]);
1231 = MAX (out_max
, insn_needs
.out_addr
[j
].groups
[i
]);
1234 in_max
= MAX (MAX (insn_needs
.op_addr
.groups
[i
],
1235 insn_needs
.op_addr_reload
.groups
[i
]),
1237 out_max
= MAX (out_max
, insn_needs
.insn
.groups
[i
]);
1239 insn_needs
.input
.groups
[i
]
1240 = MAX (insn_needs
.input
.groups
[i
]
1241 + insn_needs
.op_addr
.groups
[i
]
1242 + insn_needs
.insn
.groups
[i
],
1243 in_max
+ insn_needs
.input
.groups
[i
]);
1245 insn_needs
.output
.groups
[i
] += out_max
;
1246 insn_needs
.other
.groups
[i
]
1247 += MAX (MAX (insn_needs
.input
.groups
[i
],
1248 insn_needs
.output
.groups
[i
]),
1249 insn_needs
.other_addr
.groups
[i
]);
1252 /* If this is a CALL_INSN and caller-saves will need
1253 a spill register, act as if the spill register is
1254 needed for this insn. However, the spill register
1255 can be used by any reload of this insn, so we only
1256 need do something if no need for that class has
1259 The assumption that every CALL_INSN will trigger a
1260 caller-save is highly conservative, however, the number
1261 of cases where caller-saves will need a spill register but
1262 a block containing a CALL_INSN won't need a spill register
1263 of that class should be quite rare.
1265 If a group is needed, the size and mode of the group will
1266 have been set up at the beginning of this loop. */
1268 if (GET_CODE (insn
) == CALL_INSN
1269 && caller_save_spill_class
!= NO_REGS
)
1271 /* See if this register would conflict with any reload
1272 that needs a group. */
1273 int nongroup_need
= 0;
1274 int *caller_save_needs
;
1276 for (j
= 0; j
< n_reloads
; j
++)
1277 if ((CLASS_MAX_NREGS (reload_reg_class
[j
],
1278 (GET_MODE_SIZE (reload_outmode
[j
])
1279 > GET_MODE_SIZE (reload_inmode
[j
]))
1283 && reg_classes_intersect_p (caller_save_spill_class
,
1284 reload_reg_class
[j
]))
1291 = (caller_save_group_size
> 1
1292 ? insn_needs
.other
.groups
1293 : insn_needs
.other
.regs
[nongroup_need
]);
1295 if (caller_save_needs
[(int) caller_save_spill_class
] == 0)
1297 register enum reg_class
*p
1298 = reg_class_superclasses
[(int) caller_save_spill_class
];
1300 caller_save_needs
[(int) caller_save_spill_class
]++;
1302 while (*p
!= LIM_REG_CLASSES
)
1303 caller_save_needs
[(int) *p
++] += 1;
1306 /* Show that this basic block will need a register of
1310 && ! (basic_block_needs
[(int) caller_save_spill_class
]
1313 basic_block_needs
[(int) caller_save_spill_class
]
1315 new_basic_block_needs
= 1;
1319 #ifdef SMALL_REGISTER_CLASSES
1320 /* If this insn stores the value of a function call,
1321 and that value is in a register that has been spilled,
1322 and if the insn needs a reload in a class
1323 that might use that register as the reload register,
1324 then add add an extra need in that class.
1325 This makes sure we have a register available that does
1326 not overlap the return value. */
1328 if (avoid_return_reg
)
1330 int regno
= REGNO (avoid_return_reg
);
1332 = HARD_REGNO_NREGS (regno
, GET_MODE (avoid_return_reg
));
1334 int basic_needs
[N_REG_CLASSES
], basic_groups
[N_REG_CLASSES
];
1336 /* First compute the "basic needs", which counts a
1337 need only in the smallest class in which it
1340 bcopy ((char *) insn_needs
.other
.regs
[0],
1341 (char *) basic_needs
, sizeof basic_needs
);
1342 bcopy ((char *) insn_needs
.other
.groups
,
1343 (char *) basic_groups
, sizeof basic_groups
);
1345 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1349 if (basic_needs
[i
] >= 0)
1350 for (p
= reg_class_superclasses
[i
];
1351 *p
!= LIM_REG_CLASSES
; p
++)
1352 basic_needs
[(int) *p
] -= basic_needs
[i
];
1354 if (basic_groups
[i
] >= 0)
1355 for (p
= reg_class_superclasses
[i
];
1356 *p
!= LIM_REG_CLASSES
; p
++)
1357 basic_groups
[(int) *p
] -= basic_groups
[i
];
1360 /* Now count extra regs if there might be a conflict with
1361 the return value register. */
1363 for (r
= regno
; r
< regno
+ nregs
; r
++)
1364 if (spill_reg_order
[r
] >= 0)
1365 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1366 if (TEST_HARD_REG_BIT (reg_class_contents
[i
], r
))
1368 if (basic_needs
[i
] > 0)
1372 insn_needs
.other
.regs
[0][i
]++;
1373 p
= reg_class_superclasses
[i
];
1374 while (*p
!= LIM_REG_CLASSES
)
1375 insn_needs
.other
.regs
[0][(int) *p
++]++;
1377 if (basic_groups
[i
] > 0)
1381 insn_needs
.other
.groups
[i
]++;
1382 p
= reg_class_superclasses
[i
];
1383 while (*p
!= LIM_REG_CLASSES
)
1384 insn_needs
.other
.groups
[(int) *p
++]++;
1388 #endif /* SMALL_REGISTER_CLASSES */
1390 /* For each class, collect maximum need of any insn. */
1392 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1394 if (max_needs
[i
] < insn_needs
.other
.regs
[0][i
])
1396 max_needs
[i
] = insn_needs
.other
.regs
[0][i
];
1397 max_needs_insn
[i
] = insn
;
1399 if (max_groups
[i
] < insn_needs
.other
.groups
[i
])
1401 max_groups
[i
] = insn_needs
.other
.groups
[i
];
1402 max_groups_insn
[i
] = insn
;
1404 if (max_nongroups
[i
] < insn_needs
.other
.regs
[1][i
])
1406 max_nongroups
[i
] = insn_needs
.other
.regs
[1][i
];
1407 max_nongroups_insn
[i
] = insn
;
1411 /* Note that there is a continue statement above. */
1414 /* If we allocated any new memory locations, make another pass
1415 since it might have changed elimination offsets. */
1416 if (starting_frame_size
!= get_frame_size ())
1417 something_changed
= 1;
1420 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1422 if (max_needs
[i
] > 0)
1424 ";; Need %d reg%s of class %s (for insn %d).\n",
1425 max_needs
[i
], max_needs
[i
] == 1 ? "" : "s",
1426 reg_class_names
[i
], INSN_UID (max_needs_insn
[i
]));
1427 if (max_nongroups
[i
] > 0)
1429 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1430 max_nongroups
[i
], max_nongroups
[i
] == 1 ? "" : "s",
1431 reg_class_names
[i
], INSN_UID (max_nongroups_insn
[i
]));
1432 if (max_groups
[i
] > 0)
1434 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1435 max_groups
[i
], max_groups
[i
] == 1 ? "" : "s",
1436 mode_name
[(int) group_mode
[i
]],
1437 reg_class_names
[i
], INSN_UID (max_groups_insn
[i
]));
1440 /* If we have caller-saves, set up the save areas and see if caller-save
1441 will need a spill register. */
1443 if (caller_save_needed
1444 && ! setup_save_areas (&something_changed
)
1445 && caller_save_spill_class
== NO_REGS
)
1447 /* The class we will need depends on whether the machine
1448 supports the sum of two registers for an address; see
1449 find_address_reloads for details. */
1451 caller_save_spill_class
1452 = double_reg_address_ok
? INDEX_REG_CLASS
: BASE_REG_CLASS
;
1453 caller_save_group_size
1454 = CLASS_MAX_NREGS (caller_save_spill_class
, Pmode
);
1455 something_changed
= 1;
1458 /* See if anything that happened changes which eliminations are valid.
1459 For example, on the Sparc, whether or not the frame pointer can
1460 be eliminated can depend on what registers have been used. We need
1461 not check some conditions again (such as flag_omit_frame_pointer)
1462 since they can't have changed. */
1464 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
1465 if ((ep
->from
== HARD_FRAME_POINTER_REGNUM
&& FRAME_POINTER_REQUIRED
)
1466 #ifdef ELIMINABLE_REGS
1467 || ! CAN_ELIMINATE (ep
->from
, ep
->to
)
1470 ep
->can_eliminate
= 0;
1472 /* Look for the case where we have discovered that we can't replace
1473 register A with register B and that means that we will now be
1474 trying to replace register A with register C. This means we can
1475 no longer replace register C with register B and we need to disable
1476 such an elimination, if it exists. This occurs often with A == ap,
1477 B == sp, and C == fp. */
1479 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
1481 struct elim_table
*op
;
1482 register int new_to
= -1;
1484 if (! ep
->can_eliminate
&& ep
->can_eliminate_previous
)
1486 /* Find the current elimination for ep->from, if there is a
1488 for (op
= reg_eliminate
;
1489 op
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; op
++)
1490 if (op
->from
== ep
->from
&& op
->can_eliminate
)
1496 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1498 for (op
= reg_eliminate
;
1499 op
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; op
++)
1500 if (op
->from
== new_to
&& op
->to
== ep
->to
)
1501 op
->can_eliminate
= 0;
1505 /* See if any registers that we thought we could eliminate the previous
1506 time are no longer eliminable. If so, something has changed and we
1507 must spill the register. Also, recompute the number of eliminable
1508 registers and see if the frame pointer is needed; it is if there is
1509 no elimination of the frame pointer that we can perform. */
1511 frame_pointer_needed
= 1;
1512 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
1514 if (ep
->can_eliminate
&& ep
->from
== FRAME_POINTER_REGNUM
1515 && ep
->to
!= HARD_FRAME_POINTER_REGNUM
)
1516 frame_pointer_needed
= 0;
1518 if (! ep
->can_eliminate
&& ep
->can_eliminate_previous
)
1520 ep
->can_eliminate_previous
= 0;
1521 spill_hard_reg (ep
->from
, global
, dumpfile
, 1);
1522 something_changed
= 1;
1527 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1528 /* If we didn't need a frame pointer last time, but we do now, spill
1529 the hard frame pointer. */
1530 if (frame_pointer_needed
&& ! previous_frame_pointer_needed
)
1532 spill_hard_reg (HARD_FRAME_POINTER_REGNUM
, global
, dumpfile
, 1);
1533 something_changed
= 1;
1537 /* If all needs are met, we win. */
1539 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1540 if (max_needs
[i
] > 0 || max_groups
[i
] > 0 || max_nongroups
[i
] > 0)
1542 if (i
== N_REG_CLASSES
&& !new_basic_block_needs
&& ! something_changed
)
1545 /* Not all needs are met; must spill some hard regs. */
1547 /* Put all registers spilled so far back in potential_reload_regs, but
1548 put them at the front, since we've already spilled most of the
1549 psuedos in them (we might have left some pseudos unspilled if they
1550 were in a block that didn't need any spill registers of a conflicting
1551 class. We used to try to mark off the need for those registers,
1552 but doing so properly is very complex and reallocating them is the
1553 simpler approach. First, "pack" potential_reload_regs by pushing
1554 any nonnegative entries towards the end. That will leave room
1555 for the registers we already spilled.
1557 Also, undo the marking of the spill registers from the last time
1558 around in FORBIDDEN_REGS since we will be probably be allocating
1561 ??? It is theoretically possible that we might end up not using one
1562 of our previously-spilled registers in this allocation, even though
1563 they are at the head of the list. It's not clear what to do about
1564 this, but it was no better before, when we marked off the needs met
1565 by the previously-spilled registers. With the current code, globals
1566 can be allocated into these registers, but locals cannot. */
1570 for (i
= j
= FIRST_PSEUDO_REGISTER
- 1; i
>= 0; i
--)
1571 if (potential_reload_regs
[i
] != -1)
1572 potential_reload_regs
[j
--] = potential_reload_regs
[i
];
1574 for (i
= 0; i
< n_spills
; i
++)
1576 potential_reload_regs
[i
] = spill_regs
[i
];
1577 spill_reg_order
[spill_regs
[i
]] = -1;
1578 CLEAR_HARD_REG_BIT (forbidden_regs
, spill_regs
[i
]);
1584 /* Now find more reload regs to satisfy the remaining need
1585 Do it by ascending class number, since otherwise a reg
1586 might be spilled for a big class and might fail to count
1587 for a smaller class even though it belongs to that class.
1589 Count spilled regs in `spills', and add entries to
1590 `spill_regs' and `spill_reg_order'.
1592 ??? Note there is a problem here.
1593 When there is a need for a group in a high-numbered class,
1594 and also need for non-group regs that come from a lower class,
1595 the non-group regs are chosen first. If there aren't many regs,
1596 they might leave no room for a group.
1598 This was happening on the 386. To fix it, we added the code
1599 that calls possible_group_p, so that the lower class won't
1600 break up the last possible group.
1602 Really fixing the problem would require changes above
1603 in counting the regs already spilled, and in choose_reload_regs.
1604 It might be hard to avoid introducing bugs there. */
1606 CLEAR_HARD_REG_SET (counted_for_groups
);
1607 CLEAR_HARD_REG_SET (counted_for_nongroups
);
1609 for (class = 0; class < N_REG_CLASSES
; class++)
1611 /* First get the groups of registers.
1612 If we got single registers first, we might fragment
1614 while (max_groups
[class] > 0)
1616 /* If any single spilled regs happen to form groups,
1617 count them now. Maybe we don't really need
1618 to spill another group. */
1619 count_possible_groups (group_size
, group_mode
, max_groups
,
1622 if (max_groups
[class] <= 0)
1625 /* Groups of size 2 (the only groups used on most machines)
1626 are treated specially. */
1627 if (group_size
[class] == 2)
1629 /* First, look for a register that will complete a group. */
1630 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1634 j
= potential_reload_regs
[i
];
1635 if (j
>= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs
, j
)
1637 ((j
> 0 && (other
= j
- 1, spill_reg_order
[other
] >= 0)
1638 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
)
1639 && TEST_HARD_REG_BIT (reg_class_contents
[class], other
)
1640 && HARD_REGNO_MODE_OK (other
, group_mode
[class])
1641 && ! TEST_HARD_REG_BIT (counted_for_nongroups
,
1643 /* We don't want one part of another group.
1644 We could get "two groups" that overlap! */
1645 && ! TEST_HARD_REG_BIT (counted_for_groups
, other
))
1647 (j
< FIRST_PSEUDO_REGISTER
- 1
1648 && (other
= j
+ 1, spill_reg_order
[other
] >= 0)
1649 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
)
1650 && TEST_HARD_REG_BIT (reg_class_contents
[class], other
)
1651 && HARD_REGNO_MODE_OK (j
, group_mode
[class])
1652 && ! TEST_HARD_REG_BIT (counted_for_nongroups
,
1654 && ! TEST_HARD_REG_BIT (counted_for_groups
,
1657 register enum reg_class
*p
;
1659 /* We have found one that will complete a group,
1660 so count off one group as provided. */
1661 max_groups
[class]--;
1662 p
= reg_class_superclasses
[class];
1663 while (*p
!= LIM_REG_CLASSES
)
1665 if (group_size
[(int) *p
] <= group_size
[class])
1666 max_groups
[(int) *p
]--;
1670 /* Indicate both these regs are part of a group. */
1671 SET_HARD_REG_BIT (counted_for_groups
, j
);
1672 SET_HARD_REG_BIT (counted_for_groups
, other
);
1676 /* We can't complete a group, so start one. */
1677 #ifdef SMALL_REGISTER_CLASSES
1678 /* Look for a pair neither of which is explicitly used. */
1679 if (i
== FIRST_PSEUDO_REGISTER
)
1680 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1683 j
= potential_reload_regs
[i
];
1684 /* Verify that J+1 is a potential reload reg. */
1685 for (k
= 0; k
< FIRST_PSEUDO_REGISTER
; k
++)
1686 if (potential_reload_regs
[k
] == j
+ 1)
1688 if (j
>= 0 && j
+ 1 < FIRST_PSEUDO_REGISTER
1689 && k
< FIRST_PSEUDO_REGISTER
1690 && spill_reg_order
[j
] < 0 && spill_reg_order
[j
+ 1] < 0
1691 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
)
1692 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
+ 1)
1693 && HARD_REGNO_MODE_OK (j
, group_mode
[class])
1694 && ! TEST_HARD_REG_BIT (counted_for_nongroups
,
1696 && ! TEST_HARD_REG_BIT (bad_spill_regs
, j
+ 1)
1697 /* Reject J at this stage
1698 if J+1 was explicitly used. */
1699 && ! regs_explicitly_used
[j
+ 1])
1703 /* Now try any group at all
1704 whose registers are not in bad_spill_regs. */
1705 if (i
== FIRST_PSEUDO_REGISTER
)
1706 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1709 j
= potential_reload_regs
[i
];
1710 /* Verify that J+1 is a potential reload reg. */
1711 for (k
= 0; k
< FIRST_PSEUDO_REGISTER
; k
++)
1712 if (potential_reload_regs
[k
] == j
+ 1)
1714 if (j
>= 0 && j
+ 1 < FIRST_PSEUDO_REGISTER
1715 && k
< FIRST_PSEUDO_REGISTER
1716 && spill_reg_order
[j
] < 0 && spill_reg_order
[j
+ 1] < 0
1717 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
)
1718 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
+ 1)
1719 && HARD_REGNO_MODE_OK (j
, group_mode
[class])
1720 && ! TEST_HARD_REG_BIT (counted_for_nongroups
,
1722 && ! TEST_HARD_REG_BIT (bad_spill_regs
, j
+ 1))
1726 /* I should be the index in potential_reload_regs
1727 of the new reload reg we have found. */
1729 if (i
>= FIRST_PSEUDO_REGISTER
)
1731 /* There are no groups left to spill. */
1732 spill_failure (max_groups_insn
[class]);
1738 |= new_spill_reg (i
, class, max_needs
, NULL_PTR
,
1743 /* For groups of more than 2 registers,
1744 look for a sufficient sequence of unspilled registers,
1745 and spill them all at once. */
1746 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1750 j
= potential_reload_regs
[i
];
1752 && j
+ group_size
[class] <= FIRST_PSEUDO_REGISTER
1753 && HARD_REGNO_MODE_OK (j
, group_mode
[class]))
1755 /* Check each reg in the sequence. */
1756 for (k
= 0; k
< group_size
[class]; k
++)
1757 if (! (spill_reg_order
[j
+ k
] < 0
1758 && ! TEST_HARD_REG_BIT (bad_spill_regs
, j
+ k
)
1759 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
+ k
)))
1761 /* We got a full sequence, so spill them all. */
1762 if (k
== group_size
[class])
1764 register enum reg_class
*p
;
1765 for (k
= 0; k
< group_size
[class]; k
++)
1768 SET_HARD_REG_BIT (counted_for_groups
, j
+ k
);
1769 for (idx
= 0; idx
< FIRST_PSEUDO_REGISTER
; idx
++)
1770 if (potential_reload_regs
[idx
] == j
+ k
)
1773 |= new_spill_reg (idx
, class,
1774 max_needs
, NULL_PTR
,
1778 /* We have found one that will complete a group,
1779 so count off one group as provided. */
1780 max_groups
[class]--;
1781 p
= reg_class_superclasses
[class];
1782 while (*p
!= LIM_REG_CLASSES
)
1784 if (group_size
[(int) *p
]
1785 <= group_size
[class])
1786 max_groups
[(int) *p
]--;
1793 /* We couldn't find any registers for this reload.
1794 Avoid going into an infinite loop. */
1795 if (i
>= FIRST_PSEUDO_REGISTER
)
1797 /* There are no groups left. */
1798 spill_failure (max_groups_insn
[class]);
1805 /* Now similarly satisfy all need for single registers. */
1807 while (max_needs
[class] > 0 || max_nongroups
[class] > 0)
1809 #ifdef SMALL_REGISTER_CLASSES
1810 /* This should be right for all machines, but only the 386
1811 is known to need it, so this conditional plays safe.
1812 ??? For 2.5, try making this unconditional. */
1813 /* If we spilled enough regs, but they weren't counted
1814 against the non-group need, see if we can count them now.
1815 If so, we can avoid some actual spilling. */
1816 if (max_needs
[class] <= 0 && max_nongroups
[class] > 0)
1817 for (i
= 0; i
< n_spills
; i
++)
1818 if (TEST_HARD_REG_BIT (reg_class_contents
[class],
1820 && !TEST_HARD_REG_BIT (counted_for_groups
,
1822 && !TEST_HARD_REG_BIT (counted_for_nongroups
,
1824 && max_nongroups
[class] > 0)
1826 register enum reg_class
*p
;
1828 SET_HARD_REG_BIT (counted_for_nongroups
, spill_regs
[i
]);
1829 max_nongroups
[class]--;
1830 p
= reg_class_superclasses
[class];
1831 while (*p
!= LIM_REG_CLASSES
)
1832 max_nongroups
[(int) *p
++]--;
1834 if (max_needs
[class] <= 0 && max_nongroups
[class] <= 0)
1838 /* Consider the potential reload regs that aren't
1839 yet in use as reload regs, in order of preference.
1840 Find the most preferred one that's in this class. */
1842 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1843 if (potential_reload_regs
[i
] >= 0
1844 && TEST_HARD_REG_BIT (reg_class_contents
[class],
1845 potential_reload_regs
[i
])
1846 /* If this reg will not be available for groups,
1847 pick one that does not foreclose possible groups.
1848 This is a kludge, and not very general,
1849 but it should be sufficient to make the 386 work,
1850 and the problem should not occur on machines with
1852 && (max_nongroups
[class] == 0
1853 || possible_group_p (potential_reload_regs
[i
], max_groups
)))
1856 /* If we couldn't get a register, try to get one even if we
1857 might foreclose possible groups. This may cause problems
1858 later, but that's better than aborting now, since it is
1859 possible that we will, in fact, be able to form the needed
1860 group even with this allocation. */
1862 if (i
>= FIRST_PSEUDO_REGISTER
1863 && (asm_noperands (max_needs
[class] > 0
1864 ? max_needs_insn
[class]
1865 : max_nongroups_insn
[class])
1867 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1868 if (potential_reload_regs
[i
] >= 0
1869 && TEST_HARD_REG_BIT (reg_class_contents
[class],
1870 potential_reload_regs
[i
]))
1873 /* I should be the index in potential_reload_regs
1874 of the new reload reg we have found. */
1876 if (i
>= FIRST_PSEUDO_REGISTER
)
1878 /* There are no possible registers left to spill. */
1879 spill_failure (max_needs
[class] > 0 ? max_needs_insn
[class]
1880 : max_nongroups_insn
[class]);
1886 |= new_spill_reg (i
, class, max_needs
, max_nongroups
,
1892 /* If global-alloc was run, notify it of any register eliminations we have
1895 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
1896 if (ep
->can_eliminate
)
1897 mark_elimination (ep
->from
, ep
->to
);
1899 /* Insert code to save and restore call-clobbered hard regs
1900 around calls. Tell if what mode to use so that we will process
1901 those insns in reload_as_needed if we have to. */
1903 if (caller_save_needed
)
1904 save_call_clobbered_regs (num_eliminable
? QImode
1905 : caller_save_spill_class
!= NO_REGS
? HImode
1908 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1909 If that insn didn't set the register (i.e., it copied the register to
1910 memory), just delete that insn instead of the equivalencing insn plus
1911 anything now dead. If we call delete_dead_insn on that insn, we may
1912 delete the insn that actually sets the register if the register die
1913 there and that is incorrect. */
1915 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1916 if (reg_renumber
[i
] < 0 && reg_equiv_init
[i
] != 0
1917 && GET_CODE (reg_equiv_init
[i
]) != NOTE
)
1919 if (reg_set_p (regno_reg_rtx
[i
], PATTERN (reg_equiv_init
[i
])))
1920 delete_dead_insn (reg_equiv_init
[i
]);
1923 PUT_CODE (reg_equiv_init
[i
], NOTE
);
1924 NOTE_SOURCE_FILE (reg_equiv_init
[i
]) = 0;
1925 NOTE_LINE_NUMBER (reg_equiv_init
[i
]) = NOTE_INSN_DELETED
;
1929 /* Use the reload registers where necessary
1930 by generating move instructions to move the must-be-register
1931 values into or out of the reload registers. */
1933 if (something_needs_reloads
|| something_needs_elimination
1934 || (caller_save_needed
&& num_eliminable
)
1935 || caller_save_spill_class
!= NO_REGS
)
1936 reload_as_needed (first
, global
);
1938 /* If we were able to eliminate the frame pointer, show that it is no
1939 longer live at the start of any basic block. If it ls live by
1940 virtue of being in a pseudo, that pseudo will be marked live
1941 and hence the frame pointer will be known to be live via that
1944 if (! frame_pointer_needed
)
1945 for (i
= 0; i
< n_basic_blocks
; i
++)
1946 basic_block_live_at_start
[i
][HARD_FRAME_POINTER_REGNUM
/ REGSET_ELT_BITS
]
1947 &= ~ ((REGSET_ELT_TYPE
) 1 << (HARD_FRAME_POINTER_REGNUM
1948 % REGSET_ELT_BITS
));
1950 /* Come here (with failure set nonzero) if we can't get enough spill regs
1951 and we decide not to abort about it. */
1954 reload_in_progress
= 0;
1956 /* Now eliminate all pseudo regs by modifying them into
1957 their equivalent memory references.
1958 The REG-rtx's for the pseudos are modified in place,
1959 so all insns that used to refer to them now refer to memory.
1961 For a reg that has a reg_equiv_address, all those insns
1962 were changed by reloading so that no insns refer to it any longer;
1963 but the DECL_RTL of a variable decl may refer to it,
1964 and if so this causes the debugging info to mention the variable. */
1966 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1970 if (reg_equiv_mem
[i
])
1972 addr
= XEXP (reg_equiv_mem
[i
], 0);
1973 in_struct
= MEM_IN_STRUCT_P (reg_equiv_mem
[i
]);
1975 if (reg_equiv_address
[i
])
1976 addr
= reg_equiv_address
[i
];
1979 if (reg_renumber
[i
] < 0)
1981 rtx reg
= regno_reg_rtx
[i
];
1982 XEXP (reg
, 0) = addr
;
1983 REG_USERVAR_P (reg
) = 0;
1984 MEM_IN_STRUCT_P (reg
) = in_struct
;
1985 PUT_CODE (reg
, MEM
);
1987 else if (reg_equiv_mem
[i
])
1988 XEXP (reg_equiv_mem
[i
], 0) = addr
;
1992 #ifdef PRESERVE_DEATH_INFO_REGNO_P
1993 /* Make a pass over all the insns and remove death notes for things that
1994 are no longer registers or no longer die in the insn (e.g., an input
1995 and output pseudo being tied). */
1997 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
1998 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
2002 for (note
= REG_NOTES (insn
); note
; note
= next
)
2004 next
= XEXP (note
, 1);
2005 if (REG_NOTE_KIND (note
) == REG_DEAD
2006 && (GET_CODE (XEXP (note
, 0)) != REG
2007 || reg_set_p (XEXP (note
, 0), PATTERN (insn
))))
2008 remove_note (insn
, note
);
2013 /* Indicate that we no longer have known memory locations or constants. */
2014 reg_equiv_constant
= 0;
2015 reg_equiv_memory_loc
= 0;
2018 free (scratch_list
);
2021 free (scratch_block
);
2027 /* Nonzero if, after spilling reg REGNO for non-groups,
2028 it will still be possible to find a group if we still need one. */
2031 possible_group_p (regno
, max_groups
)
2036 int class = (int) NO_REGS
;
2038 for (i
= 0; i
< (int) N_REG_CLASSES
; i
++)
2039 if (max_groups
[i
] > 0)
2045 if (class == (int) NO_REGS
)
2048 /* Consider each pair of consecutive registers. */
2049 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
- 1; i
++)
2051 /* Ignore pairs that include reg REGNO. */
2052 if (i
== regno
|| i
+ 1 == regno
)
2055 /* Ignore pairs that are outside the class that needs the group.
2056 ??? Here we fail to handle the case where two different classes
2057 independently need groups. But this never happens with our
2058 current machine descriptions. */
2059 if (! (TEST_HARD_REG_BIT (reg_class_contents
[class], i
)
2060 && TEST_HARD_REG_BIT (reg_class_contents
[class], i
+ 1)))
2063 /* A pair of consecutive regs we can still spill does the trick. */
2064 if (spill_reg_order
[i
] < 0 && spill_reg_order
[i
+ 1] < 0
2065 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
)
2066 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
+ 1))
2069 /* A pair of one already spilled and one we can spill does it
2070 provided the one already spilled is not otherwise reserved. */
2071 if (spill_reg_order
[i
] < 0
2072 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
)
2073 && spill_reg_order
[i
+ 1] >= 0
2074 && ! TEST_HARD_REG_BIT (counted_for_groups
, i
+ 1)
2075 && ! TEST_HARD_REG_BIT (counted_for_nongroups
, i
+ 1))
2077 if (spill_reg_order
[i
+ 1] < 0
2078 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
+ 1)
2079 && spill_reg_order
[i
] >= 0
2080 && ! TEST_HARD_REG_BIT (counted_for_groups
, i
)
2081 && ! TEST_HARD_REG_BIT (counted_for_nongroups
, i
))
2088 /* Count any groups of CLASS that can be formed from the registers recently
2092 count_possible_groups (group_size
, group_mode
, max_groups
, class)
2094 enum machine_mode
*group_mode
;
2101 /* Now find all consecutive groups of spilled registers
2102 and mark each group off against the need for such groups.
2103 But don't count them against ordinary need, yet. */
2105 if (group_size
[class] == 0)
2108 CLEAR_HARD_REG_SET (new);
2110 /* Make a mask of all the regs that are spill regs in class I. */
2111 for (i
= 0; i
< n_spills
; i
++)
2112 if (TEST_HARD_REG_BIT (reg_class_contents
[class], spill_regs
[i
])
2113 && ! TEST_HARD_REG_BIT (counted_for_groups
, spill_regs
[i
])
2114 && ! TEST_HARD_REG_BIT (counted_for_nongroups
, spill_regs
[i
]))
2115 SET_HARD_REG_BIT (new, spill_regs
[i
]);
2117 /* Find each consecutive group of them. */
2118 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
&& max_groups
[class] > 0; i
++)
2119 if (TEST_HARD_REG_BIT (new, i
)
2120 && i
+ group_size
[class] <= FIRST_PSEUDO_REGISTER
2121 && HARD_REGNO_MODE_OK (i
, group_mode
[class]))
2123 for (j
= 1; j
< group_size
[class]; j
++)
2124 if (! TEST_HARD_REG_BIT (new, i
+ j
))
2127 if (j
== group_size
[class])
2129 /* We found a group. Mark it off against this class's need for
2130 groups, and against each superclass too. */
2131 register enum reg_class
*p
;
2133 max_groups
[class]--;
2134 p
= reg_class_superclasses
[class];
2135 while (*p
!= LIM_REG_CLASSES
)
2137 if (group_size
[(int) *p
] <= group_size
[class])
2138 max_groups
[(int) *p
]--;
2142 /* Don't count these registers again. */
2143 for (j
= 0; j
< group_size
[class]; j
++)
2144 SET_HARD_REG_BIT (counted_for_groups
, i
+ j
);
2147 /* Skip to the last reg in this group. When i is incremented above,
2148 it will then point to the first reg of the next possible group. */
2153 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2154 another mode that needs to be reloaded for the same register class CLASS.
2155 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2156 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2158 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2159 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2160 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2161 causes unnecessary failures on machines requiring alignment of register
2162 groups when the two modes are different sizes, because the larger mode has
2163 more strict alignment rules than the smaller mode. */
2166 modes_equiv_for_class_p (allocate_mode
, other_mode
, class)
2167 enum machine_mode allocate_mode
, other_mode
;
2168 enum reg_class
class;
2171 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
2173 if (TEST_HARD_REG_BIT (reg_class_contents
[(int) class], regno
)
2174 && HARD_REGNO_MODE_OK (regno
, allocate_mode
)
2175 && ! HARD_REGNO_MODE_OK (regno
, other_mode
))
2181 /* Handle the failure to find a register to spill.
2182 INSN should be one of the insns which needed this particular spill reg. */
2185 spill_failure (insn
)
2188 if (asm_noperands (PATTERN (insn
)) >= 0)
2189 error_for_asm (insn
, "`asm' needs too many reloads");
2191 fatal_insn ("Unable to find a register to spill.", insn
);
2194 /* Add a new register to the tables of available spill-registers
2195 (as well as spilling all pseudos allocated to the register).
2196 I is the index of this register in potential_reload_regs.
2197 CLASS is the regclass whose need is being satisfied.
2198 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2199 so that this register can count off against them.
2200 MAX_NONGROUPS is 0 if this register is part of a group.
2201 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2204 new_spill_reg (i
, class, max_needs
, max_nongroups
, global
, dumpfile
)
2212 register enum reg_class
*p
;
2214 int regno
= potential_reload_regs
[i
];
2216 if (i
>= FIRST_PSEUDO_REGISTER
)
2217 abort (); /* Caller failed to find any register. */
2219 if (fixed_regs
[regno
] || TEST_HARD_REG_BIT (forbidden_regs
, regno
))
2220 fatal ("fixed or forbidden register was spilled.\n\
2221 This may be due to a compiler bug or to impossible asm\n\
2222 statements or clauses.");
2224 /* Make reg REGNO an additional reload reg. */
2226 potential_reload_regs
[i
] = -1;
2227 spill_regs
[n_spills
] = regno
;
2228 spill_reg_order
[regno
] = n_spills
;
2230 fprintf (dumpfile
, "Spilling reg %d.\n", spill_regs
[n_spills
]);
2232 /* Clear off the needs we just satisfied. */
2235 p
= reg_class_superclasses
[class];
2236 while (*p
!= LIM_REG_CLASSES
)
2237 max_needs
[(int) *p
++]--;
2239 if (max_nongroups
&& max_nongroups
[class] > 0)
2241 SET_HARD_REG_BIT (counted_for_nongroups
, regno
);
2242 max_nongroups
[class]--;
2243 p
= reg_class_superclasses
[class];
2244 while (*p
!= LIM_REG_CLASSES
)
2245 max_nongroups
[(int) *p
++]--;
2248 /* Spill every pseudo reg that was allocated to this reg
2249 or to something that overlaps this reg. */
2251 val
= spill_hard_reg (spill_regs
[n_spills
], global
, dumpfile
, 0);
2253 /* If there are some registers still to eliminate and this register
2254 wasn't ever used before, additional stack space may have to be
2255 allocated to store this register. Thus, we may have changed the offset
2256 between the stack and frame pointers, so mark that something has changed.
2257 (If new pseudos were spilled, thus requiring more space, VAL would have
2258 been set non-zero by the call to spill_hard_reg above since additional
2259 reloads may be needed in that case.
2261 One might think that we need only set VAL to 1 if this is a call-used
2262 register. However, the set of registers that must be saved by the
2263 prologue is not identical to the call-used set. For example, the
2264 register used by the call insn for the return PC is a call-used register,
2265 but must be saved by the prologue. */
2266 if (num_eliminable
&& ! regs_ever_live
[spill_regs
[n_spills
]])
2269 regs_ever_live
[spill_regs
[n_spills
]] = 1;
2275 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2276 data that is dead in INSN. */
2279 delete_dead_insn (insn
)
2282 rtx prev
= prev_real_insn (insn
);
2285 /* If the previous insn sets a register that dies in our insn, delete it
2287 if (prev
&& GET_CODE (PATTERN (prev
)) == SET
2288 && (prev_dest
= SET_DEST (PATTERN (prev
)), GET_CODE (prev_dest
) == REG
)
2289 && reg_mentioned_p (prev_dest
, PATTERN (insn
))
2290 && find_regno_note (insn
, REG_DEAD
, REGNO (prev_dest
)))
2291 delete_dead_insn (prev
);
2293 PUT_CODE (insn
, NOTE
);
2294 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2295 NOTE_SOURCE_FILE (insn
) = 0;
2298 /* Modify the home of pseudo-reg I.
2299 The new home is present in reg_renumber[I].
2301 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2302 or it may be -1, meaning there is none or it is not relevant.
2303 This is used so that all pseudos spilled from a given hard reg
2304 can share one stack slot. */
2307 alter_reg (i
, from_reg
)
2311 /* When outputting an inline function, this can happen
2312 for a reg that isn't actually used. */
2313 if (regno_reg_rtx
[i
] == 0)
2316 /* If the reg got changed to a MEM at rtl-generation time,
2318 if (GET_CODE (regno_reg_rtx
[i
]) != REG
)
2321 /* Modify the reg-rtx to contain the new hard reg
2322 number or else to contain its pseudo reg number. */
2323 REGNO (regno_reg_rtx
[i
])
2324 = reg_renumber
[i
] >= 0 ? reg_renumber
[i
] : i
;
2326 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2327 allocate a stack slot for it. */
2329 if (reg_renumber
[i
] < 0
2330 && reg_n_refs
[i
] > 0
2331 && reg_equiv_constant
[i
] == 0
2332 && reg_equiv_memory_loc
[i
] == 0)
2335 int inherent_size
= PSEUDO_REGNO_BYTES (i
);
2336 int total_size
= MAX (inherent_size
, reg_max_ref_width
[i
]);
2339 /* Each pseudo reg has an inherent size which comes from its own mode,
2340 and a total size which provides room for paradoxical subregs
2341 which refer to the pseudo reg in wider modes.
2343 We can use a slot already allocated if it provides both
2344 enough inherent space and enough total space.
2345 Otherwise, we allocate a new slot, making sure that it has no less
2346 inherent space, and no less total space, then the previous slot. */
2349 /* No known place to spill from => no slot to reuse. */
2350 x
= assign_stack_local (GET_MODE (regno_reg_rtx
[i
]), total_size
, -1);
2351 if (BYTES_BIG_ENDIAN
)
2353 /* Cancel the big-endian correction done in assign_stack_local.
2354 Get the address of the beginning of the slot.
2355 This is so we can do a big-endian correction unconditionally
2357 adjust
= inherent_size
- total_size
;
2360 /* Reuse a stack slot if possible. */
2361 else if (spill_stack_slot
[from_reg
] != 0
2362 && spill_stack_slot_width
[from_reg
] >= total_size
2363 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot
[from_reg
]))
2365 x
= spill_stack_slot
[from_reg
];
2366 /* Allocate a bigger slot. */
2369 /* Compute maximum size needed, both for inherent size
2370 and for total size. */
2371 enum machine_mode mode
= GET_MODE (regno_reg_rtx
[i
]);
2373 if (spill_stack_slot
[from_reg
])
2375 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot
[from_reg
]))
2377 mode
= GET_MODE (spill_stack_slot
[from_reg
]);
2378 if (spill_stack_slot_width
[from_reg
] > total_size
)
2379 total_size
= spill_stack_slot_width
[from_reg
];
2381 /* Make a slot with that size. */
2382 x
= assign_stack_local (mode
, total_size
, -1);
2384 if (BYTES_BIG_ENDIAN
)
2386 /* Cancel the big-endian correction done in assign_stack_local.
2387 Get the address of the beginning of the slot.
2388 This is so we can do a big-endian correction unconditionally
2390 adjust
= GET_MODE_SIZE (mode
) - total_size
;
2393 stack_slot
= gen_rtx (MEM
, mode_for_size (total_size
2396 plus_constant (XEXP (x
, 0), adjust
));
2397 RTX_UNCHANGING_P (stack_slot
)
2398 = RTX_UNCHANGING_P (regno_reg_rtx
[i
]);
2401 spill_stack_slot
[from_reg
] = stack_slot
;
2402 spill_stack_slot_width
[from_reg
] = total_size
;
2405 /* On a big endian machine, the "address" of the slot
2406 is the address of the low part that fits its inherent mode. */
2407 if (BYTES_BIG_ENDIAN
&& inherent_size
< total_size
)
2408 adjust
+= (total_size
- inherent_size
);
2410 /* If we have any adjustment to make, or if the stack slot is the
2411 wrong mode, make a new stack slot. */
2412 if (adjust
!= 0 || GET_MODE (x
) != GET_MODE (regno_reg_rtx
[i
]))
2414 x
= gen_rtx (MEM
, GET_MODE (regno_reg_rtx
[i
]),
2415 plus_constant (XEXP (x
, 0), adjust
));
2416 RTX_UNCHANGING_P (x
) = RTX_UNCHANGING_P (regno_reg_rtx
[i
]);
2419 /* Save the stack slot for later. */
2420 reg_equiv_memory_loc
[i
] = x
;
2424 /* Mark the slots in regs_ever_live for the hard regs
2425 used by pseudo-reg number REGNO. */
2428 mark_home_live (regno
)
2431 register int i
, lim
;
2432 i
= reg_renumber
[regno
];
2435 lim
= i
+ HARD_REGNO_NREGS (i
, PSEUDO_REGNO_MODE (regno
));
2437 regs_ever_live
[i
++] = 1;
2440 /* Mark the registers used in SCRATCH as being live. */
2443 mark_scratch_live (scratch
)
2447 int regno
= REGNO (scratch
);
2448 int lim
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (scratch
));
2450 for (i
= regno
; i
< lim
; i
++)
2451 regs_ever_live
[i
] = 1;
2454 /* This function handles the tracking of elimination offsets around branches.
2456 X is a piece of RTL being scanned.
2458 INSN is the insn that it came from, if any.
2460 INITIAL_P is non-zero if we are to set the offset to be the initial
2461 offset and zero if we are setting the offset of the label to be the
2465 set_label_offsets (x
, insn
, initial_p
)
2470 enum rtx_code code
= GET_CODE (x
);
2473 struct elim_table
*p
;
2478 if (LABEL_REF_NONLOCAL_P (x
))
2483 /* ... fall through ... */
2486 /* If we know nothing about this label, set the desired offsets. Note
2487 that this sets the offset at a label to be the offset before a label
2488 if we don't know anything about the label. This is not correct for
2489 the label after a BARRIER, but is the best guess we can make. If
2490 we guessed wrong, we will suppress an elimination that might have
2491 been possible had we been able to guess correctly. */
2493 if (! offsets_known_at
[CODE_LABEL_NUMBER (x
)])
2495 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2496 offsets_at
[CODE_LABEL_NUMBER (x
)][i
]
2497 = (initial_p
? reg_eliminate
[i
].initial_offset
2498 : reg_eliminate
[i
].offset
);
2499 offsets_known_at
[CODE_LABEL_NUMBER (x
)] = 1;
2502 /* Otherwise, if this is the definition of a label and it is
2503 preceded by a BARRIER, set our offsets to the known offset of
2507 && (tem
= prev_nonnote_insn (insn
)) != 0
2508 && GET_CODE (tem
) == BARRIER
)
2510 num_not_at_initial_offset
= 0;
2511 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2513 reg_eliminate
[i
].offset
= reg_eliminate
[i
].previous_offset
2514 = offsets_at
[CODE_LABEL_NUMBER (x
)][i
];
2515 if (reg_eliminate
[i
].can_eliminate
2516 && (reg_eliminate
[i
].offset
2517 != reg_eliminate
[i
].initial_offset
))
2518 num_not_at_initial_offset
++;
2523 /* If neither of the above cases is true, compare each offset
2524 with those previously recorded and suppress any eliminations
2525 where the offsets disagree. */
2527 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2528 if (offsets_at
[CODE_LABEL_NUMBER (x
)][i
]
2529 != (initial_p
? reg_eliminate
[i
].initial_offset
2530 : reg_eliminate
[i
].offset
))
2531 reg_eliminate
[i
].can_eliminate
= 0;
2536 set_label_offsets (PATTERN (insn
), insn
, initial_p
);
2538 /* ... fall through ... */
2542 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2543 and hence must have all eliminations at their initial offsets. */
2544 for (tem
= REG_NOTES (x
); tem
; tem
= XEXP (tem
, 1))
2545 if (REG_NOTE_KIND (tem
) == REG_LABEL
)
2546 set_label_offsets (XEXP (tem
, 0), insn
, 1);
2551 /* Each of the labels in the address vector must be at their initial
2552 offsets. We want the first first for ADDR_VEC and the second
2553 field for ADDR_DIFF_VEC. */
2555 for (i
= 0; i
< XVECLEN (x
, code
== ADDR_DIFF_VEC
); i
++)
2556 set_label_offsets (XVECEXP (x
, code
== ADDR_DIFF_VEC
, i
),
2561 /* We only care about setting PC. If the source is not RETURN,
2562 IF_THEN_ELSE, or a label, disable any eliminations not at
2563 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2564 isn't one of those possibilities. For branches to a label,
2565 call ourselves recursively.
2567 Note that this can disable elimination unnecessarily when we have
2568 a non-local goto since it will look like a non-constant jump to
2569 someplace in the current function. This isn't a significant
2570 problem since such jumps will normally be when all elimination
2571 pairs are back to their initial offsets. */
2573 if (SET_DEST (x
) != pc_rtx
)
2576 switch (GET_CODE (SET_SRC (x
)))
2583 set_label_offsets (XEXP (SET_SRC (x
), 0), insn
, initial_p
);
2587 tem
= XEXP (SET_SRC (x
), 1);
2588 if (GET_CODE (tem
) == LABEL_REF
)
2589 set_label_offsets (XEXP (tem
, 0), insn
, initial_p
);
2590 else if (GET_CODE (tem
) != PC
&& GET_CODE (tem
) != RETURN
)
2593 tem
= XEXP (SET_SRC (x
), 2);
2594 if (GET_CODE (tem
) == LABEL_REF
)
2595 set_label_offsets (XEXP (tem
, 0), insn
, initial_p
);
2596 else if (GET_CODE (tem
) != PC
&& GET_CODE (tem
) != RETURN
)
2601 /* If we reach here, all eliminations must be at their initial
2602 offset because we are doing a jump to a variable address. */
2603 for (p
= reg_eliminate
; p
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; p
++)
2604 if (p
->offset
!= p
->initial_offset
)
2605 p
->can_eliminate
= 0;
2609 /* Used for communication between the next two function to properly share
2610 the vector for an ASM_OPERANDS. */
2612 static struct rtvec_def
*old_asm_operands_vec
, *new_asm_operands_vec
;
2614 /* Scan X and replace any eliminable registers (such as fp) with a
2615 replacement (such as sp), plus an offset.
2617 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2618 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2619 MEM, we are allowed to replace a sum of a register and the constant zero
2620 with the register, which we cannot do outside a MEM. In addition, we need
2621 to record the fact that a register is referenced outside a MEM.
2623 If INSN is an insn, it is the insn containing X. If we replace a REG
2624 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2625 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2626 that the REG is being modified.
2628 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2629 That's used when we eliminate in expressions stored in notes.
2630 This means, do not set ref_outside_mem even if the reference
2633 If we see a modification to a register we know about, take the
2634 appropriate action (see case SET, below).
2636 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2637 replacements done assuming all offsets are at their initial values. If
2638 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2639 encounter, return the actual location so that find_reloads will do
2640 the proper thing. */
2643 eliminate_regs (x
, mem_mode
, insn
)
2645 enum machine_mode mem_mode
;
2648 enum rtx_code code
= GET_CODE (x
);
2649 struct elim_table
*ep
;
2674 /* First handle the case where we encounter a bare register that
2675 is eliminable. Replace it with a PLUS. */
2676 if (regno
< FIRST_PSEUDO_REGISTER
)
2678 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2680 if (ep
->from_rtx
== x
&& ep
->can_eliminate
)
2683 /* Refs inside notes don't count for this purpose. */
2684 && ! (insn
!= 0 && (GET_CODE (insn
) == EXPR_LIST
2685 || GET_CODE (insn
) == INSN_LIST
)))
2686 ep
->ref_outside_mem
= 1;
2687 return plus_constant (ep
->to_rtx
, ep
->previous_offset
);
2691 else if (reg_equiv_memory_loc
&& reg_equiv_memory_loc
[regno
]
2692 && (reg_equiv_address
[regno
] || num_not_at_initial_offset
))
2694 /* In this case, find_reloads would attempt to either use an
2695 incorrect address (if something is not at its initial offset)
2696 or substitute an replaced address into an insn (which loses
2697 if the offset is changed by some later action). So we simply
2698 return the replaced stack slot (assuming it is changed by
2699 elimination) and ignore the fact that this is actually a
2700 reference to the pseudo. Ensure we make a copy of the
2701 address in case it is shared. */
2702 new = eliminate_regs (reg_equiv_memory_loc
[regno
],
2704 if (new != reg_equiv_memory_loc
[regno
])
2706 cannot_omit_stores
[regno
] = 1;
2707 return copy_rtx (new);
2713 /* If this is the sum of an eliminable register and a constant, rework
2715 if (GET_CODE (XEXP (x
, 0)) == REG
2716 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2717 && CONSTANT_P (XEXP (x
, 1)))
2719 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2721 if (ep
->from_rtx
== XEXP (x
, 0) && ep
->can_eliminate
)
2724 /* Refs inside notes don't count for this purpose. */
2725 && ! (insn
!= 0 && (GET_CODE (insn
) == EXPR_LIST
2726 || GET_CODE (insn
) == INSN_LIST
)))
2727 ep
->ref_outside_mem
= 1;
2729 /* The only time we want to replace a PLUS with a REG (this
2730 occurs when the constant operand of the PLUS is the negative
2731 of the offset) is when we are inside a MEM. We won't want
2732 to do so at other times because that would change the
2733 structure of the insn in a way that reload can't handle.
2734 We special-case the commonest situation in
2735 eliminate_regs_in_insn, so just replace a PLUS with a
2736 PLUS here, unless inside a MEM. */
2737 if (mem_mode
!= 0 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2738 && INTVAL (XEXP (x
, 1)) == - ep
->previous_offset
)
2741 return gen_rtx (PLUS
, Pmode
, ep
->to_rtx
,
2742 plus_constant (XEXP (x
, 1),
2743 ep
->previous_offset
));
2746 /* If the register is not eliminable, we are done since the other
2747 operand is a constant. */
2751 /* If this is part of an address, we want to bring any constant to the
2752 outermost PLUS. We will do this by doing register replacement in
2753 our operands and seeing if a constant shows up in one of them.
2755 We assume here this is part of an address (or a "load address" insn)
2756 since an eliminable register is not likely to appear in any other
2759 If we have (plus (eliminable) (reg)), we want to produce
2760 (plus (plus (replacement) (reg) (const))). If this was part of a
2761 normal add insn, (plus (replacement) (reg)) will be pushed as a
2762 reload. This is the desired action. */
2765 rtx new0
= eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2766 rtx new1
= eliminate_regs (XEXP (x
, 1), mem_mode
, insn
);
2768 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
2770 /* If one side is a PLUS and the other side is a pseudo that
2771 didn't get a hard register but has a reg_equiv_constant,
2772 we must replace the constant here since it may no longer
2773 be in the position of any operand. */
2774 if (GET_CODE (new0
) == PLUS
&& GET_CODE (new1
) == REG
2775 && REGNO (new1
) >= FIRST_PSEUDO_REGISTER
2776 && reg_renumber
[REGNO (new1
)] < 0
2777 && reg_equiv_constant
!= 0
2778 && reg_equiv_constant
[REGNO (new1
)] != 0)
2779 new1
= reg_equiv_constant
[REGNO (new1
)];
2780 else if (GET_CODE (new1
) == PLUS
&& GET_CODE (new0
) == REG
2781 && REGNO (new0
) >= FIRST_PSEUDO_REGISTER
2782 && reg_renumber
[REGNO (new0
)] < 0
2783 && reg_equiv_constant
[REGNO (new0
)] != 0)
2784 new0
= reg_equiv_constant
[REGNO (new0
)];
2786 new = form_sum (new0
, new1
);
2788 /* As above, if we are not inside a MEM we do not want to
2789 turn a PLUS into something else. We might try to do so here
2790 for an addition of 0 if we aren't optimizing. */
2791 if (! mem_mode
&& GET_CODE (new) != PLUS
)
2792 return gen_rtx (PLUS
, GET_MODE (x
), new, const0_rtx
);
2800 /* If this is the product of an eliminable register and a
2801 constant, apply the distribute law and move the constant out
2802 so that we have (plus (mult ..) ..). This is needed in order
2803 to keep load-address insns valid. This case is pathalogical.
2804 We ignore the possibility of overflow here. */
2805 if (GET_CODE (XEXP (x
, 0)) == REG
2806 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2807 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2808 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2810 if (ep
->from_rtx
== XEXP (x
, 0) && ep
->can_eliminate
)
2813 /* Refs inside notes don't count for this purpose. */
2814 && ! (insn
!= 0 && (GET_CODE (insn
) == EXPR_LIST
2815 || GET_CODE (insn
) == INSN_LIST
)))
2816 ep
->ref_outside_mem
= 1;
2819 plus_constant (gen_rtx (MULT
, Pmode
, ep
->to_rtx
, XEXP (x
, 1)),
2820 ep
->previous_offset
* INTVAL (XEXP (x
, 1)));
2823 /* ... fall through ... */
2828 case DIV
: case UDIV
:
2829 case MOD
: case UMOD
:
2830 case AND
: case IOR
: case XOR
:
2831 case ROTATERT
: case ROTATE
:
2832 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
2834 case GE
: case GT
: case GEU
: case GTU
:
2835 case LE
: case LT
: case LEU
: case LTU
:
2837 rtx new0
= eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2839 = XEXP (x
, 1) ? eliminate_regs (XEXP (x
, 1), mem_mode
, insn
) : 0;
2841 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
2842 return gen_rtx (code
, GET_MODE (x
), new0
, new1
);
2847 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2850 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2851 if (new != XEXP (x
, 0))
2852 x
= gen_rtx (EXPR_LIST
, REG_NOTE_KIND (x
), new, XEXP (x
, 1));
2855 /* ... fall through ... */
2858 /* Now do eliminations in the rest of the chain. If this was
2859 an EXPR_LIST, this might result in allocating more memory than is
2860 strictly needed, but it simplifies the code. */
2863 new = eliminate_regs (XEXP (x
, 1), mem_mode
, insn
);
2864 if (new != XEXP (x
, 1))
2865 return gen_rtx (GET_CODE (x
), GET_MODE (x
), XEXP (x
, 0), new);
2873 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
2874 if (ep
->to_rtx
== XEXP (x
, 0))
2876 int size
= GET_MODE_SIZE (mem_mode
);
2878 /* If more bytes than MEM_MODE are pushed, account for them. */
2879 #ifdef PUSH_ROUNDING
2880 if (ep
->to_rtx
== stack_pointer_rtx
)
2881 size
= PUSH_ROUNDING (size
);
2883 if (code
== PRE_DEC
|| code
== POST_DEC
)
2889 /* Fall through to generic unary operation case. */
2891 case STRICT_LOW_PART
:
2893 case SIGN_EXTEND
: case ZERO_EXTEND
:
2894 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
2895 case FLOAT
: case FIX
:
2896 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
2900 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2901 if (new != XEXP (x
, 0))
2902 return gen_rtx (code
, GET_MODE (x
), new);
2906 /* Similar to above processing, but preserve SUBREG_WORD.
2907 Convert (subreg (mem)) to (mem) if not paradoxical.
2908 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2909 pseudo didn't get a hard reg, we must replace this with the
2910 eliminated version of the memory location because push_reloads
2911 may do the replacement in certain circumstances. */
2912 if (GET_CODE (SUBREG_REG (x
)) == REG
2913 && (GET_MODE_SIZE (GET_MODE (x
))
2914 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
2915 && reg_equiv_memory_loc
!= 0
2916 && reg_equiv_memory_loc
[REGNO (SUBREG_REG (x
))] != 0)
2918 new = eliminate_regs (reg_equiv_memory_loc
[REGNO (SUBREG_REG (x
))],
2921 /* If we didn't change anything, we must retain the pseudo. */
2922 if (new == reg_equiv_memory_loc
[REGNO (SUBREG_REG (x
))])
2923 new = SUBREG_REG (x
);
2926 /* Otherwise, ensure NEW isn't shared in case we have to reload
2928 new = copy_rtx (new);
2930 /* In this case, we must show that the pseudo is used in this
2931 insn so that delete_output_reload will do the right thing. */
2932 if (insn
!= 0 && GET_CODE (insn
) != EXPR_LIST
2933 && GET_CODE (insn
) != INSN_LIST
)
2934 emit_insn_before (gen_rtx (USE
, VOIDmode
, SUBREG_REG (x
)),
2939 new = eliminate_regs (SUBREG_REG (x
), mem_mode
, insn
);
2941 if (new != XEXP (x
, 0))
2943 if (GET_CODE (new) == MEM
2944 && (GET_MODE_SIZE (GET_MODE (x
))
2945 <= GET_MODE_SIZE (GET_MODE (new)))
2946 #ifdef LOAD_EXTEND_OP
2947 /* On these machines we will be reloading what is
2948 inside the SUBREG if it originally was a pseudo and
2949 the inner and outer modes are both a word or
2950 smaller. So leave the SUBREG then. */
2951 && ! (GET_CODE (SUBREG_REG (x
)) == REG
2952 && GET_MODE_SIZE (GET_MODE (x
)) <= UNITS_PER_WORD
2953 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD
2954 && (GET_MODE_SIZE (GET_MODE (x
))
2955 > GET_MODE_SIZE (GET_MODE (new)))
2956 && INTEGRAL_MODE_P (GET_MODE (new))
2957 && LOAD_EXTEND_OP (GET_MODE (new)) != NIL
)
2961 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
2962 enum machine_mode mode
= GET_MODE (x
);
2964 if (BYTES_BIG_ENDIAN
)
2965 offset
+= (MIN (UNITS_PER_WORD
,
2966 GET_MODE_SIZE (GET_MODE (new)))
2967 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
2969 PUT_MODE (new, mode
);
2970 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset
);
2974 return gen_rtx (SUBREG
, GET_MODE (x
), new, SUBREG_WORD (x
));
2980 /* If clobbering a register that is the replacement register for an
2981 elimination we still think can be performed, note that it cannot
2982 be performed. Otherwise, we need not be concerned about it. */
2983 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
2984 if (ep
->to_rtx
== XEXP (x
, 0))
2985 ep
->can_eliminate
= 0;
2987 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2988 if (new != XEXP (x
, 0))
2989 return gen_rtx (code
, GET_MODE (x
), new);
2995 /* Properly handle sharing input and constraint vectors. */
2996 if (ASM_OPERANDS_INPUT_VEC (x
) != old_asm_operands_vec
)
2998 /* When we come to a new vector not seen before,
2999 scan all its elements; keep the old vector if none
3000 of them changes; otherwise, make a copy. */
3001 old_asm_operands_vec
= ASM_OPERANDS_INPUT_VEC (x
);
3002 temp_vec
= (rtx
*) alloca (XVECLEN (x
, 3) * sizeof (rtx
));
3003 for (i
= 0; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
3004 temp_vec
[i
] = eliminate_regs (ASM_OPERANDS_INPUT (x
, i
),
3007 for (i
= 0; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
3008 if (temp_vec
[i
] != ASM_OPERANDS_INPUT (x
, i
))
3011 if (i
== ASM_OPERANDS_INPUT_LENGTH (x
))
3012 new_asm_operands_vec
= old_asm_operands_vec
;
3014 new_asm_operands_vec
3015 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x
), temp_vec
);
3018 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3019 if (new_asm_operands_vec
== old_asm_operands_vec
)
3022 new = gen_rtx (ASM_OPERANDS
, VOIDmode
, ASM_OPERANDS_TEMPLATE (x
),
3023 ASM_OPERANDS_OUTPUT_CONSTRAINT (x
),
3024 ASM_OPERANDS_OUTPUT_IDX (x
), new_asm_operands_vec
,
3025 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x
),
3026 ASM_OPERANDS_SOURCE_FILE (x
),
3027 ASM_OPERANDS_SOURCE_LINE (x
));
3028 new->volatil
= x
->volatil
;
3033 /* Check for setting a register that we know about. */
3034 if (GET_CODE (SET_DEST (x
)) == REG
)
3036 /* See if this is setting the replacement register for an
3039 If DEST is the hard frame pointer, we do nothing because we
3040 assume that all assignments to the frame pointer are for
3041 non-local gotos and are being done at a time when they are valid
3042 and do not disturb anything else. Some machines want to
3043 eliminate a fake argument pointer (or even a fake frame pointer)
3044 with either the real frame or the stack pointer. Assignments to
3045 the hard frame pointer must not prevent this elimination. */
3047 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3049 if (ep
->to_rtx
== SET_DEST (x
)
3050 && SET_DEST (x
) != hard_frame_pointer_rtx
)
3052 /* If it is being incremented, adjust the offset. Otherwise,
3053 this elimination can't be done. */
3054 rtx src
= SET_SRC (x
);
3056 if (GET_CODE (src
) == PLUS
3057 && XEXP (src
, 0) == SET_DEST (x
)
3058 && GET_CODE (XEXP (src
, 1)) == CONST_INT
)
3059 ep
->offset
-= INTVAL (XEXP (src
, 1));
3061 ep
->can_eliminate
= 0;
3064 /* Now check to see we are assigning to a register that can be
3065 eliminated. If so, it must be as part of a PARALLEL, since we
3066 will not have been called if this is a single SET. So indicate
3067 that we can no longer eliminate this reg. */
3068 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3070 if (ep
->from_rtx
== SET_DEST (x
) && ep
->can_eliminate
)
3071 ep
->can_eliminate
= 0;
3074 /* Now avoid the loop below in this common case. */
3076 rtx new0
= eliminate_regs (SET_DEST (x
), 0, insn
);
3077 rtx new1
= eliminate_regs (SET_SRC (x
), 0, insn
);
3079 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3080 write a CLOBBER insn. */
3081 if (GET_CODE (SET_DEST (x
)) == REG
&& GET_CODE (new0
) == MEM
3082 && insn
!= 0 && GET_CODE (insn
) != EXPR_LIST
3083 && GET_CODE (insn
) != INSN_LIST
)
3084 emit_insn_after (gen_rtx (CLOBBER
, VOIDmode
, SET_DEST (x
)), insn
);
3086 if (new0
!= SET_DEST (x
) || new1
!= SET_SRC (x
))
3087 return gen_rtx (SET
, VOIDmode
, new0
, new1
);
3093 /* Our only special processing is to pass the mode of the MEM to our
3094 recursive call and copy the flags. While we are here, handle this
3095 case more efficiently. */
3096 new = eliminate_regs (XEXP (x
, 0), GET_MODE (x
), insn
);
3097 if (new != XEXP (x
, 0))
3099 new = gen_rtx (MEM
, GET_MODE (x
), new);
3100 new->volatil
= x
->volatil
;
3101 new->unchanging
= x
->unchanging
;
3102 new->in_struct
= x
->in_struct
;
3109 /* Process each of our operands recursively. If any have changed, make a
3111 fmt
= GET_RTX_FORMAT (code
);
3112 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3116 new = eliminate_regs (XEXP (x
, i
), mem_mode
, insn
);
3117 if (new != XEXP (x
, i
) && ! copied
)
3119 rtx new_x
= rtx_alloc (code
);
3120 bcopy ((char *) x
, (char *) new_x
,
3121 (sizeof (*new_x
) - sizeof (new_x
->fld
)
3122 + sizeof (new_x
->fld
[0]) * GET_RTX_LENGTH (code
)));
3128 else if (*fmt
== 'E')
3131 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3133 new = eliminate_regs (XVECEXP (x
, i
, j
), mem_mode
, insn
);
3134 if (new != XVECEXP (x
, i
, j
) && ! copied_vec
)
3136 rtvec new_v
= gen_rtvec_v (XVECLEN (x
, i
),
3137 &XVECEXP (x
, i
, 0));
3140 rtx new_x
= rtx_alloc (code
);
3141 bcopy ((char *) x
, (char *) new_x
,
3142 (sizeof (*new_x
) - sizeof (new_x
->fld
)
3143 + (sizeof (new_x
->fld
[0])
3144 * GET_RTX_LENGTH (code
))));
3148 XVEC (x
, i
) = new_v
;
3151 XVECEXP (x
, i
, j
) = new;
3159 /* Scan INSN and eliminate all eliminable registers in it.
3161 If REPLACE is nonzero, do the replacement destructively. Also
3162 delete the insn as dead it if it is setting an eliminable register.
3164 If REPLACE is zero, do all our allocations in reload_obstack.
3166 If no eliminations were done and this insn doesn't require any elimination
3167 processing (these are not identical conditions: it might be updating sp,
3168 but not referencing fp; this needs to be seen during reload_as_needed so
3169 that the offset between fp and sp can be taken into consideration), zero
3170 is returned. Otherwise, 1 is returned. */
3173 eliminate_regs_in_insn (insn
, replace
)
3177 rtx old_body
= PATTERN (insn
);
3178 rtx old_set
= single_set (insn
);
3181 struct elim_table
*ep
;
3184 push_obstacks (&reload_obstack
, &reload_obstack
);
3186 if (old_set
!= 0 && GET_CODE (SET_DEST (old_set
)) == REG
3187 && REGNO (SET_DEST (old_set
)) < FIRST_PSEUDO_REGISTER
)
3189 /* Check for setting an eliminable register. */
3190 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3191 if (ep
->from_rtx
== SET_DEST (old_set
) && ep
->can_eliminate
)
3193 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3194 /* If this is setting the frame pointer register to the
3195 hardware frame pointer register and this is an elimination
3196 that will be done (tested above), this insn is really
3197 adjusting the frame pointer downward to compensate for
3198 the adjustment done before a nonlocal goto. */
3199 if (ep
->from
== FRAME_POINTER_REGNUM
3200 && ep
->to
== HARD_FRAME_POINTER_REGNUM
)
3202 rtx src
= SET_SRC (old_set
);
3205 if (src
== ep
->to_rtx
)
3207 else if (GET_CODE (src
) == PLUS
3208 && GET_CODE (XEXP (src
, 0)) == CONST_INT
)
3209 offset
= INTVAL (XEXP (src
, 0)), ok
= 1;
3216 = plus_constant (ep
->to_rtx
, offset
- ep
->offset
);
3218 /* First see if this insn remains valid when we
3219 make the change. If not, keep the INSN_CODE
3220 the same and let reload fit it up. */
3221 validate_change (insn
, &SET_SRC (old_set
), src
, 1);
3222 validate_change (insn
, &SET_DEST (old_set
),
3224 if (! apply_change_group ())
3226 SET_SRC (old_set
) = src
;
3227 SET_DEST (old_set
) = ep
->to_rtx
;
3237 /* In this case this insn isn't serving a useful purpose. We
3238 will delete it in reload_as_needed once we know that this
3239 elimination is, in fact, being done.
3241 If REPLACE isn't set, we can't delete this insn, but neededn't
3242 process it since it won't be used unless something changes. */
3244 delete_dead_insn (insn
);
3249 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3250 in the insn is the negative of the offset in FROM. Substitute
3251 (set (reg) (reg to)) for the insn and change its code.
3253 We have to do this here, rather than in eliminate_regs, do that we can
3254 change the insn code. */
3256 if (GET_CODE (SET_SRC (old_set
)) == PLUS
3257 && GET_CODE (XEXP (SET_SRC (old_set
), 0)) == REG
3258 && GET_CODE (XEXP (SET_SRC (old_set
), 1)) == CONST_INT
)
3259 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3261 if (ep
->from_rtx
== XEXP (SET_SRC (old_set
), 0)
3262 && ep
->can_eliminate
)
3264 /* We must stop at the first elimination that will be used.
3265 If this one would replace the PLUS with a REG, do it
3266 now. Otherwise, quit the loop and let eliminate_regs
3267 do its normal replacement. */
3268 if (ep
->offset
== - INTVAL (XEXP (SET_SRC (old_set
), 1)))
3270 /* We assume here that we don't need a PARALLEL of
3271 any CLOBBERs for this assignment. There's not
3272 much we can do if we do need it. */
3273 PATTERN (insn
) = gen_rtx (SET
, VOIDmode
,
3274 SET_DEST (old_set
), ep
->to_rtx
);
3275 INSN_CODE (insn
) = -1;
3284 old_asm_operands_vec
= 0;
3286 /* Replace the body of this insn with a substituted form. If we changed
3287 something, return non-zero.
3289 If we are replacing a body that was a (set X (plus Y Z)), try to
3290 re-recognize the insn. We do this in case we had a simple addition
3291 but now can do this as a load-address. This saves an insn in this
3294 new_body
= eliminate_regs (old_body
, 0, replace
? insn
: NULL_RTX
);
3295 if (new_body
!= old_body
)
3297 /* If we aren't replacing things permanently and we changed something,
3298 make another copy to ensure that all the RTL is new. Otherwise
3299 things can go wrong if find_reload swaps commutative operands
3300 and one is inside RTL that has been copied while the other is not. */
3302 /* Don't copy an asm_operands because (1) there's no need and (2)
3303 copy_rtx can't do it properly when there are multiple outputs. */
3304 if (! replace
&& asm_noperands (old_body
) < 0)
3305 new_body
= copy_rtx (new_body
);
3307 /* If we had a move insn but now we don't, rerecognize it. This will
3308 cause spurious re-recognition if the old move had a PARALLEL since
3309 the new one still will, but we can't call single_set without
3310 having put NEW_BODY into the insn and the re-recognition won't
3311 hurt in this rare case. */
3313 && ((GET_CODE (SET_SRC (old_set
)) == REG
3314 && (GET_CODE (new_body
) != SET
3315 || GET_CODE (SET_SRC (new_body
)) != REG
))
3316 /* If this was a load from or store to memory, compare
3317 the MEM in recog_operand to the one in the insn. If they
3318 are not equal, then rerecognize the insn. */
3320 && ((GET_CODE (SET_SRC (old_set
)) == MEM
3321 && SET_SRC (old_set
) != recog_operand
[1])
3322 || (GET_CODE (SET_DEST (old_set
)) == MEM
3323 && SET_DEST (old_set
) != recog_operand
[0])))
3324 /* If this was an add insn before, rerecognize. */
3325 || GET_CODE (SET_SRC (old_set
)) == PLUS
))
3327 if (! validate_change (insn
, &PATTERN (insn
), new_body
, 0))
3328 /* If recognition fails, store the new body anyway.
3329 It's normal to have recognition failures here
3330 due to bizarre memory addresses; reloading will fix them. */
3331 PATTERN (insn
) = new_body
;
3334 PATTERN (insn
) = new_body
;
3339 /* Loop through all elimination pairs. See if any have changed and
3340 recalculate the number not at initial offset.
3342 Compute the maximum offset (minimum offset if the stack does not
3343 grow downward) for each elimination pair.
3345 We also detect a cases where register elimination cannot be done,
3346 namely, if a register would be both changed and referenced outside a MEM
3347 in the resulting insn since such an insn is often undefined and, even if
3348 not, we cannot know what meaning will be given to it. Note that it is
3349 valid to have a register used in an address in an insn that changes it
3350 (presumably with a pre- or post-increment or decrement).
3352 If anything changes, return nonzero. */
3354 num_not_at_initial_offset
= 0;
3355 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3357 if (ep
->previous_offset
!= ep
->offset
&& ep
->ref_outside_mem
)
3358 ep
->can_eliminate
= 0;
3360 ep
->ref_outside_mem
= 0;
3362 if (ep
->previous_offset
!= ep
->offset
)
3365 ep
->previous_offset
= ep
->offset
;
3366 if (ep
->can_eliminate
&& ep
->offset
!= ep
->initial_offset
)
3367 num_not_at_initial_offset
++;
3369 #ifdef STACK_GROWS_DOWNWARD
3370 ep
->max_offset
= MAX (ep
->max_offset
, ep
->offset
);
3372 ep
->max_offset
= MIN (ep
->max_offset
, ep
->offset
);
3377 /* If we changed something, perform elmination in REG_NOTES. This is
3378 needed even when REPLACE is zero because a REG_DEAD note might refer
3379 to a register that we eliminate and could cause a different number
3380 of spill registers to be needed in the final reload pass than in
3382 if (val
&& REG_NOTES (insn
) != 0)
3383 REG_NOTES (insn
) = eliminate_regs (REG_NOTES (insn
), 0, REG_NOTES (insn
));
3391 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3392 replacement we currently believe is valid, mark it as not eliminable if X
3393 modifies DEST in any way other than by adding a constant integer to it.
3395 If DEST is the frame pointer, we do nothing because we assume that
3396 all assignments to the hard frame pointer are nonlocal gotos and are being
3397 done at a time when they are valid and do not disturb anything else.
3398 Some machines want to eliminate a fake argument pointer with either the
3399 frame or stack pointer. Assignments to the hard frame pointer must not
3400 prevent this elimination.
3402 Called via note_stores from reload before starting its passes to scan
3403 the insns of the function. */
3406 mark_not_eliminable (dest
, x
)
3412 /* A SUBREG of a hard register here is just changing its mode. We should
3413 not see a SUBREG of an eliminable hard register, but check just in
3415 if (GET_CODE (dest
) == SUBREG
)
3416 dest
= SUBREG_REG (dest
);
3418 if (dest
== hard_frame_pointer_rtx
)
3421 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
3422 if (reg_eliminate
[i
].can_eliminate
&& dest
== reg_eliminate
[i
].to_rtx
3423 && (GET_CODE (x
) != SET
3424 || GET_CODE (SET_SRC (x
)) != PLUS
3425 || XEXP (SET_SRC (x
), 0) != dest
3426 || GET_CODE (XEXP (SET_SRC (x
), 1)) != CONST_INT
))
3428 reg_eliminate
[i
].can_eliminate_previous
3429 = reg_eliminate
[i
].can_eliminate
= 0;
3434 /* Kick all pseudos out of hard register REGNO.
3435 If GLOBAL is nonzero, try to find someplace else to put them.
3436 If DUMPFILE is nonzero, log actions taken on that file.
3438 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3439 because we found we can't eliminate some register. In the case, no pseudos
3440 are allowed to be in the register, even if they are only in a block that
3441 doesn't require spill registers, unlike the case when we are spilling this
3442 hard reg to produce another spill register.
3444 Return nonzero if any pseudos needed to be kicked out. */
3447 spill_hard_reg (regno
, global
, dumpfile
, cant_eliminate
)
3453 enum reg_class
class = REGNO_REG_CLASS (regno
);
3454 int something_changed
= 0;
3457 SET_HARD_REG_BIT (forbidden_regs
, regno
);
3460 regs_ever_live
[regno
] = 1;
3462 /* Spill every pseudo reg that was allocated to this reg
3463 or to something that overlaps this reg. */
3465 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
3466 if (reg_renumber
[i
] >= 0
3467 && reg_renumber
[i
] <= regno
3469 + HARD_REGNO_NREGS (reg_renumber
[i
],
3470 PSEUDO_REGNO_MODE (i
))
3473 /* If this register belongs solely to a basic block which needed no
3474 spilling of any class that this register is contained in,
3475 leave it be, unless we are spilling this register because
3476 it was a hard register that can't be eliminated. */
3478 if (! cant_eliminate
3479 && basic_block_needs
[0]
3480 && reg_basic_block
[i
] >= 0
3481 && basic_block_needs
[(int) class][reg_basic_block
[i
]] == 0)
3485 for (p
= reg_class_superclasses
[(int) class];
3486 *p
!= LIM_REG_CLASSES
; p
++)
3487 if (basic_block_needs
[(int) *p
][reg_basic_block
[i
]] > 0)
3490 if (*p
== LIM_REG_CLASSES
)
3494 /* Mark it as no longer having a hard register home. */
3495 reg_renumber
[i
] = -1;
3496 /* We will need to scan everything again. */
3497 something_changed
= 1;
3499 retry_global_alloc (i
, forbidden_regs
);
3501 alter_reg (i
, regno
);
3504 if (reg_renumber
[i
] == -1)
3505 fprintf (dumpfile
, " Register %d now on stack.\n\n", i
);
3507 fprintf (dumpfile
, " Register %d now in %d.\n\n",
3508 i
, reg_renumber
[i
]);
3511 for (i
= 0; i
< scratch_list_length
; i
++)
3513 if (scratch_list
[i
] && REGNO (scratch_list
[i
]) == regno
)
3515 if (! cant_eliminate
&& basic_block_needs
[0]
3516 && ! basic_block_needs
[(int) class][scratch_block
[i
]])
3520 for (p
= reg_class_superclasses
[(int) class];
3521 *p
!= LIM_REG_CLASSES
; p
++)
3522 if (basic_block_needs
[(int) *p
][scratch_block
[i
]] > 0)
3525 if (*p
== LIM_REG_CLASSES
)
3528 PUT_CODE (scratch_list
[i
], SCRATCH
);
3529 scratch_list
[i
] = 0;
3530 something_changed
= 1;
3535 return something_changed
;
3538 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3539 Also mark any hard registers used to store user variables as
3540 forbidden from being used for spill registers. */
3543 scan_paradoxical_subregs (x
)
3548 register enum rtx_code code
= GET_CODE (x
);
3553 #ifdef SMALL_REGISTER_CLASSES
3554 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
&& REG_USERVAR_P (x
))
3555 SET_HARD_REG_BIT (forbidden_regs
, REGNO (x
));
3571 if (GET_CODE (SUBREG_REG (x
)) == REG
3572 && GET_MODE_SIZE (GET_MODE (x
)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
3573 reg_max_ref_width
[REGNO (SUBREG_REG (x
))]
3574 = GET_MODE_SIZE (GET_MODE (x
));
3578 fmt
= GET_RTX_FORMAT (code
);
3579 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3582 scan_paradoxical_subregs (XEXP (x
, i
));
3583 else if (fmt
[i
] == 'E')
3586 for (j
= XVECLEN (x
, i
) - 1; j
>=0; j
--)
3587 scan_paradoxical_subregs (XVECEXP (x
, i
, j
));
3593 hard_reg_use_compare (p1
, p2
)
3594 struct hard_reg_n_uses
*p1
, *p2
;
3596 int tem
= p1
->uses
- p2
->uses
;
3597 if (tem
!= 0) return tem
;
3598 /* If regs are equally good, sort by regno,
3599 so that the results of qsort leave nothing to chance. */
3600 return p1
->regno
- p2
->regno
;
3603 /* Choose the order to consider regs for use as reload registers
3604 based on how much trouble would be caused by spilling one.
3605 Store them in order of decreasing preference in potential_reload_regs. */
3608 order_regs_for_reload ()
3614 struct hard_reg_n_uses hard_reg_n_uses
[FIRST_PSEUDO_REGISTER
];
3616 CLEAR_HARD_REG_SET (bad_spill_regs
);
3618 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3619 potential_reload_regs
[i
] = -1;
3621 /* Count number of uses of each hard reg by pseudo regs allocated to it
3622 and then order them by decreasing use. */
3624 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3626 hard_reg_n_uses
[i
].uses
= 0;
3627 hard_reg_n_uses
[i
].regno
= i
;
3630 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
3632 int regno
= reg_renumber
[i
];
3635 int lim
= regno
+ HARD_REGNO_NREGS (regno
, PSEUDO_REGNO_MODE (i
));
3637 hard_reg_n_uses
[regno
++].uses
+= reg_n_refs
[i
];
3639 large
+= reg_n_refs
[i
];
3642 /* Now fixed registers (which cannot safely be used for reloading)
3643 get a very high use count so they will be considered least desirable.
3644 Registers used explicitly in the rtl code are almost as bad. */
3646 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3650 hard_reg_n_uses
[i
].uses
+= 2 * large
+ 2;
3651 SET_HARD_REG_BIT (bad_spill_regs
, i
);
3653 else if (regs_explicitly_used
[i
])
3655 hard_reg_n_uses
[i
].uses
+= large
+ 1;
3656 #ifndef SMALL_REGISTER_CLASSES
3657 /* ??? We are doing this here because of the potential that
3658 bad code may be generated if a register explicitly used in
3659 an insn was used as a spill register for that insn. But
3660 not using these are spill registers may lose on some machine.
3661 We'll have to see how this works out. */
3662 SET_HARD_REG_BIT (bad_spill_regs
, i
);
3666 hard_reg_n_uses
[HARD_FRAME_POINTER_REGNUM
].uses
+= 2 * large
+ 2;
3667 SET_HARD_REG_BIT (bad_spill_regs
, HARD_FRAME_POINTER_REGNUM
);
3669 #ifdef ELIMINABLE_REGS
3670 /* If registers other than the frame pointer are eliminable, mark them as
3672 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
3674 hard_reg_n_uses
[reg_eliminate
[i
].from
].uses
+= 2 * large
+ 2;
3675 SET_HARD_REG_BIT (bad_spill_regs
, reg_eliminate
[i
].from
);
3679 /* Prefer registers not so far used, for use in temporary loading.
3680 Among them, if REG_ALLOC_ORDER is defined, use that order.
3681 Otherwise, prefer registers not preserved by calls. */
3683 #ifdef REG_ALLOC_ORDER
3684 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3686 int regno
= reg_alloc_order
[i
];
3688 if (hard_reg_n_uses
[regno
].uses
== 0)
3689 potential_reload_regs
[o
++] = regno
;
3692 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3694 if (hard_reg_n_uses
[i
].uses
== 0 && call_used_regs
[i
])
3695 potential_reload_regs
[o
++] = i
;
3697 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3699 if (hard_reg_n_uses
[i
].uses
== 0 && ! call_used_regs
[i
])
3700 potential_reload_regs
[o
++] = i
;
3704 qsort (hard_reg_n_uses
, FIRST_PSEUDO_REGISTER
,
3705 sizeof hard_reg_n_uses
[0], hard_reg_use_compare
);
3707 /* Now add the regs that are already used,
3708 preferring those used less often. The fixed and otherwise forbidden
3709 registers will be at the end of this list. */
3711 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3712 if (hard_reg_n_uses
[i
].uses
!= 0)
3713 potential_reload_regs
[o
++] = hard_reg_n_uses
[i
].regno
;
3716 /* Used in reload_as_needed to sort the spilled regs. */
3719 compare_spill_regs (r1
, r2
)
3725 /* Reload pseudo-registers into hard regs around each insn as needed.
3726 Additional register load insns are output before the insn that needs it
3727 and perhaps store insns after insns that modify the reloaded pseudo reg.
3729 reg_last_reload_reg and reg_reloaded_contents keep track of
3730 which registers are already available in reload registers.
3731 We update these for the reloads that we perform,
3732 as the insns are scanned. */
3735 reload_as_needed (first
, live_known
)
3745 bzero ((char *) spill_reg_rtx
, sizeof spill_reg_rtx
);
3746 bzero ((char *) spill_reg_store
, sizeof spill_reg_store
);
3747 reg_last_reload_reg
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
3748 bzero ((char *) reg_last_reload_reg
, max_regno
* sizeof (rtx
));
3749 reg_has_output_reload
= (char *) alloca (max_regno
);
3750 for (i
= 0; i
< n_spills
; i
++)
3752 reg_reloaded_contents
[i
] = -1;
3753 reg_reloaded_insn
[i
] = 0;
3756 /* Reset all offsets on eliminable registers to their initial values. */
3757 #ifdef ELIMINABLE_REGS
3758 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
3760 INITIAL_ELIMINATION_OFFSET (reg_eliminate
[i
].from
, reg_eliminate
[i
].to
,
3761 reg_eliminate
[i
].initial_offset
);
3762 reg_eliminate
[i
].previous_offset
3763 = reg_eliminate
[i
].offset
= reg_eliminate
[i
].initial_offset
;
3766 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate
[0].initial_offset
);
3767 reg_eliminate
[0].previous_offset
3768 = reg_eliminate
[0].offset
= reg_eliminate
[0].initial_offset
;
3771 num_not_at_initial_offset
= 0;
3773 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3774 pack registers with group needs. */
3777 qsort (spill_regs
, n_spills
, sizeof (short), compare_spill_regs
);
3778 for (i
= 0; i
< n_spills
; i
++)
3779 spill_reg_order
[spill_regs
[i
]] = i
;
3782 for (insn
= first
; insn
;)
3784 register rtx next
= NEXT_INSN (insn
);
3786 /* Notice when we move to a new basic block. */
3787 if (live_known
&& this_block
+ 1 < n_basic_blocks
3788 && insn
== basic_block_head
[this_block
+1])
3791 /* If we pass a label, copy the offsets from the label information
3792 into the current offsets of each elimination. */
3793 if (GET_CODE (insn
) == CODE_LABEL
)
3795 num_not_at_initial_offset
= 0;
3796 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
3798 reg_eliminate
[i
].offset
= reg_eliminate
[i
].previous_offset
3799 = offsets_at
[CODE_LABEL_NUMBER (insn
)][i
];
3800 if (reg_eliminate
[i
].can_eliminate
3801 && (reg_eliminate
[i
].offset
3802 != reg_eliminate
[i
].initial_offset
))
3803 num_not_at_initial_offset
++;
3807 else if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
3809 rtx avoid_return_reg
= 0;
3811 #ifdef SMALL_REGISTER_CLASSES
3812 /* Set avoid_return_reg if this is an insn
3813 that might use the value of a function call. */
3814 if (GET_CODE (insn
) == CALL_INSN
)
3816 if (GET_CODE (PATTERN (insn
)) == SET
)
3817 after_call
= SET_DEST (PATTERN (insn
));
3818 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
3819 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
3820 after_call
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 0));
3824 else if (after_call
!= 0
3825 && !(GET_CODE (PATTERN (insn
)) == SET
3826 && SET_DEST (PATTERN (insn
)) == stack_pointer_rtx
))
3828 if (reg_referenced_p (after_call
, PATTERN (insn
)))
3829 avoid_return_reg
= after_call
;
3832 #endif /* SMALL_REGISTER_CLASSES */
3834 /* If this is a USE and CLOBBER of a MEM, ensure that any
3835 references to eliminable registers have been removed. */
3837 if ((GET_CODE (PATTERN (insn
)) == USE
3838 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
3839 && GET_CODE (XEXP (PATTERN (insn
), 0)) == MEM
)
3840 XEXP (XEXP (PATTERN (insn
), 0), 0)
3841 = eliminate_regs (XEXP (XEXP (PATTERN (insn
), 0), 0),
3842 GET_MODE (XEXP (PATTERN (insn
), 0)), NULL_RTX
);
3844 /* If we need to do register elimination processing, do so.
3845 This might delete the insn, in which case we are done. */
3846 if (num_eliminable
&& GET_MODE (insn
) == QImode
)
3848 eliminate_regs_in_insn (insn
, 1);
3849 if (GET_CODE (insn
) == NOTE
)
3856 if (GET_MODE (insn
) == VOIDmode
)
3858 /* First find the pseudo regs that must be reloaded for this insn.
3859 This info is returned in the tables reload_... (see reload.h).
3860 Also modify the body of INSN by substituting RELOAD
3861 rtx's for those pseudo regs. */
3864 bzero (reg_has_output_reload
, max_regno
);
3865 CLEAR_HARD_REG_SET (reg_is_output_reload
);
3867 find_reloads (insn
, 1, spill_indirect_levels
, live_known
,
3873 rtx prev
= PREV_INSN (insn
), next
= NEXT_INSN (insn
);
3877 /* If this block has not had spilling done for a
3878 particular clas and we have any non-optionals that need a
3879 spill reg in that class, abort. */
3881 for (class = 0; class < N_REG_CLASSES
; class++)
3882 if (basic_block_needs
[class] != 0
3883 && basic_block_needs
[class][this_block
] == 0)
3884 for (i
= 0; i
< n_reloads
; i
++)
3885 if (class == (int) reload_reg_class
[i
]
3886 && reload_reg_rtx
[i
] == 0
3887 && ! reload_optional
[i
]
3888 && (reload_in
[i
] != 0 || reload_out
[i
] != 0
3889 || reload_secondary_p
[i
] != 0))
3890 fatal_insn ("Non-optional registers need a spill register", insn
);
3892 /* Now compute which reload regs to reload them into. Perhaps
3893 reusing reload regs from previous insns, or else output
3894 load insns to reload them. Maybe output store insns too.
3895 Record the choices of reload reg in reload_reg_rtx. */
3896 choose_reload_regs (insn
, avoid_return_reg
);
3898 #ifdef SMALL_REGISTER_CLASSES
3899 /* Merge any reloads that we didn't combine for fear of
3900 increasing the number of spill registers needed but now
3901 discover can be safely merged. */
3902 merge_assigned_reloads (insn
);
3905 /* Generate the insns to reload operands into or out of
3906 their reload regs. */
3907 emit_reload_insns (insn
);
3909 /* Substitute the chosen reload regs from reload_reg_rtx
3910 into the insn's body (or perhaps into the bodies of other
3911 load and store insn that we just made for reloading
3912 and that we moved the structure into). */
3915 /* If this was an ASM, make sure that all the reload insns
3916 we have generated are valid. If not, give an error
3919 if (asm_noperands (PATTERN (insn
)) >= 0)
3920 for (p
= NEXT_INSN (prev
); p
!= next
; p
= NEXT_INSN (p
))
3921 if (p
!= insn
&& GET_RTX_CLASS (GET_CODE (p
)) == 'i'
3922 && (recog_memoized (p
) < 0
3923 || (insn_extract (p
),
3924 ! constrain_operands (INSN_CODE (p
), 1))))
3926 error_for_asm (insn
,
3927 "`asm' operand requires impossible reload");
3929 NOTE_SOURCE_FILE (p
) = 0;
3930 NOTE_LINE_NUMBER (p
) = NOTE_INSN_DELETED
;
3933 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3934 is no longer validly lying around to save a future reload.
3935 Note that this does not detect pseudos that were reloaded
3936 for this insn in order to be stored in
3937 (obeying register constraints). That is correct; such reload
3938 registers ARE still valid. */
3939 note_stores (PATTERN (insn
), forget_old_reloads_1
);
3941 /* There may have been CLOBBER insns placed after INSN. So scan
3942 between INSN and NEXT and use them to forget old reloads. */
3943 for (x
= NEXT_INSN (insn
); x
!= next
; x
= NEXT_INSN (x
))
3944 if (GET_CODE (x
) == INSN
&& GET_CODE (PATTERN (x
)) == CLOBBER
)
3945 note_stores (PATTERN (x
), forget_old_reloads_1
);
3948 /* Likewise for regs altered by auto-increment in this insn.
3949 But note that the reg-notes are not changed by reloading:
3950 they still contain the pseudo-regs, not the spill regs. */
3951 for (x
= REG_NOTES (insn
); x
; x
= XEXP (x
, 1))
3952 if (REG_NOTE_KIND (x
) == REG_INC
)
3954 /* See if this pseudo reg was reloaded in this insn.
3955 If so, its last-reload info is still valid
3956 because it is based on this insn's reload. */
3957 for (i
= 0; i
< n_reloads
; i
++)
3958 if (reload_out
[i
] == XEXP (x
, 0))
3962 forget_old_reloads_1 (XEXP (x
, 0), NULL_RTX
);
3966 /* A reload reg's contents are unknown after a label. */
3967 if (GET_CODE (insn
) == CODE_LABEL
)
3968 for (i
= 0; i
< n_spills
; i
++)
3970 reg_reloaded_contents
[i
] = -1;
3971 reg_reloaded_insn
[i
] = 0;
3974 /* Don't assume a reload reg is still good after a call insn
3975 if it is a call-used reg. */
3976 else if (GET_CODE (insn
) == CALL_INSN
)
3977 for (i
= 0; i
< n_spills
; i
++)
3978 if (call_used_regs
[spill_regs
[i
]])
3980 reg_reloaded_contents
[i
] = -1;
3981 reg_reloaded_insn
[i
] = 0;
3984 /* In case registers overlap, allow certain insns to invalidate
3985 particular hard registers. */
3987 #ifdef INSN_CLOBBERS_REGNO_P
3988 for (i
= 0 ; i
< n_spills
; i
++)
3989 if (INSN_CLOBBERS_REGNO_P (insn
, spill_regs
[i
]))
3991 reg_reloaded_contents
[i
] = -1;
3992 reg_reloaded_insn
[i
] = 0;
4004 /* Discard all record of any value reloaded from X,
4005 or reloaded in X from someplace else;
4006 unless X is an output reload reg of the current insn.
4008 X may be a hard reg (the reload reg)
4009 or it may be a pseudo reg that was reloaded from. */
4012 forget_old_reloads_1 (x
, ignored
)
4020 /* note_stores does give us subregs of hard regs. */
4021 while (GET_CODE (x
) == SUBREG
)
4023 offset
+= SUBREG_WORD (x
);
4027 if (GET_CODE (x
) != REG
)
4030 regno
= REGNO (x
) + offset
;
4032 if (regno
>= FIRST_PSEUDO_REGISTER
)
4037 nr
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
4038 /* Storing into a spilled-reg invalidates its contents.
4039 This can happen if a block-local pseudo is allocated to that reg
4040 and it wasn't spilled because this block's total need is 0.
4041 Then some insn might have an optional reload and use this reg. */
4042 for (i
= 0; i
< nr
; i
++)
4043 if (spill_reg_order
[regno
+ i
] >= 0
4044 /* But don't do this if the reg actually serves as an output
4045 reload reg in the current instruction. */
4047 || ! TEST_HARD_REG_BIT (reg_is_output_reload
, regno
+ i
)))
4049 reg_reloaded_contents
[spill_reg_order
[regno
+ i
]] = -1;
4050 reg_reloaded_insn
[spill_reg_order
[regno
+ i
]] = 0;
4054 /* Since value of X has changed,
4055 forget any value previously copied from it. */
4058 /* But don't forget a copy if this is the output reload
4059 that establishes the copy's validity. */
4060 if (n_reloads
== 0 || reg_has_output_reload
[regno
+ nr
] == 0)
4061 reg_last_reload_reg
[regno
+ nr
] = 0;
4064 /* For each reload, the mode of the reload register. */
4065 static enum machine_mode reload_mode
[MAX_RELOADS
];
4067 /* For each reload, the largest number of registers it will require. */
4068 static int reload_nregs
[MAX_RELOADS
];
4070 /* Comparison function for qsort to decide which of two reloads
4071 should be handled first. *P1 and *P2 are the reload numbers. */
4074 reload_reg_class_lower (p1
, p2
)
4077 register int r1
= *p1
, r2
= *p2
;
4080 /* Consider required reloads before optional ones. */
4081 t
= reload_optional
[r1
] - reload_optional
[r2
];
4085 /* Count all solitary classes before non-solitary ones. */
4086 t
= ((reg_class_size
[(int) reload_reg_class
[r2
]] == 1)
4087 - (reg_class_size
[(int) reload_reg_class
[r1
]] == 1));
4091 /* Aside from solitaires, consider all multi-reg groups first. */
4092 t
= reload_nregs
[r2
] - reload_nregs
[r1
];
4096 /* Consider reloads in order of increasing reg-class number. */
4097 t
= (int) reload_reg_class
[r1
] - (int) reload_reg_class
[r2
];
4101 /* If reloads are equally urgent, sort by reload number,
4102 so that the results of qsort leave nothing to chance. */
4106 /* The following HARD_REG_SETs indicate when each hard register is
4107 used for a reload of various parts of the current insn. */
4109 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4110 static HARD_REG_SET reload_reg_used
;
4111 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4112 static HARD_REG_SET reload_reg_used_in_input_addr
[MAX_RECOG_OPERANDS
];
4113 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4114 static HARD_REG_SET reload_reg_used_in_output_addr
[MAX_RECOG_OPERANDS
];
4115 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4116 static HARD_REG_SET reload_reg_used_in_input
[MAX_RECOG_OPERANDS
];
4117 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4118 static HARD_REG_SET reload_reg_used_in_output
[MAX_RECOG_OPERANDS
];
4119 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4120 static HARD_REG_SET reload_reg_used_in_op_addr
;
4121 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4122 static HARD_REG_SET reload_reg_used_in_op_addr_reload
;
4123 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4124 static HARD_REG_SET reload_reg_used_in_insn
;
4125 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4126 static HARD_REG_SET reload_reg_used_in_other_addr
;
4128 /* If reg is in use as a reload reg for any sort of reload. */
4129 static HARD_REG_SET reload_reg_used_at_all
;
4131 /* If reg is use as an inherited reload. We just mark the first register
4133 static HARD_REG_SET reload_reg_used_for_inherit
;
4135 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4136 TYPE. MODE is used to indicate how many consecutive regs are
4140 mark_reload_reg_in_use (regno
, opnum
, type
, mode
)
4143 enum reload_type type
;
4144 enum machine_mode mode
;
4146 int nregs
= HARD_REGNO_NREGS (regno
, mode
);
4149 for (i
= regno
; i
< nregs
+ regno
; i
++)
4154 SET_HARD_REG_BIT (reload_reg_used
, i
);
4157 case RELOAD_FOR_INPUT_ADDRESS
:
4158 SET_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], i
);
4161 case RELOAD_FOR_OUTPUT_ADDRESS
:
4162 SET_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], i
);
4165 case RELOAD_FOR_OPERAND_ADDRESS
:
4166 SET_HARD_REG_BIT (reload_reg_used_in_op_addr
, i
);
4169 case RELOAD_FOR_OPADDR_ADDR
:
4170 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, i
);
4173 case RELOAD_FOR_OTHER_ADDRESS
:
4174 SET_HARD_REG_BIT (reload_reg_used_in_other_addr
, i
);
4177 case RELOAD_FOR_INPUT
:
4178 SET_HARD_REG_BIT (reload_reg_used_in_input
[opnum
], i
);
4181 case RELOAD_FOR_OUTPUT
:
4182 SET_HARD_REG_BIT (reload_reg_used_in_output
[opnum
], i
);
4185 case RELOAD_FOR_INSN
:
4186 SET_HARD_REG_BIT (reload_reg_used_in_insn
, i
);
4190 SET_HARD_REG_BIT (reload_reg_used_at_all
, i
);
4194 /* Similarly, but show REGNO is no longer in use for a reload. */
4197 clear_reload_reg_in_use (regno
, opnum
, type
, mode
)
4200 enum reload_type type
;
4201 enum machine_mode mode
;
4203 int nregs
= HARD_REGNO_NREGS (regno
, mode
);
4206 for (i
= regno
; i
< nregs
+ regno
; i
++)
4211 CLEAR_HARD_REG_BIT (reload_reg_used
, i
);
4214 case RELOAD_FOR_INPUT_ADDRESS
:
4215 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], i
);
4218 case RELOAD_FOR_OUTPUT_ADDRESS
:
4219 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], i
);
4222 case RELOAD_FOR_OPERAND_ADDRESS
:
4223 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr
, i
);
4226 case RELOAD_FOR_OPADDR_ADDR
:
4227 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, i
);
4230 case RELOAD_FOR_OTHER_ADDRESS
:
4231 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr
, i
);
4234 case RELOAD_FOR_INPUT
:
4235 CLEAR_HARD_REG_BIT (reload_reg_used_in_input
[opnum
], i
);
4238 case RELOAD_FOR_OUTPUT
:
4239 CLEAR_HARD_REG_BIT (reload_reg_used_in_output
[opnum
], i
);
4242 case RELOAD_FOR_INSN
:
4243 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn
, i
);
4249 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4250 specified by OPNUM and TYPE. */
4253 reload_reg_free_p (regno
, opnum
, type
)
4256 enum reload_type type
;
4260 /* In use for a RELOAD_OTHER means it's not available for anything except
4261 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4262 to be used only for inputs. */
4264 if (type
!= RELOAD_FOR_OTHER_ADDRESS
4265 && TEST_HARD_REG_BIT (reload_reg_used
, regno
))
4271 /* In use for anything except RELOAD_FOR_OTHER_ADDRESS means
4272 we can't use it for RELOAD_OTHER. */
4273 if (TEST_HARD_REG_BIT (reload_reg_used
, regno
)
4274 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4275 || TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
4278 for (i
= 0; i
< reload_n_operands
; i
++)
4279 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4280 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4281 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
4282 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4287 case RELOAD_FOR_INPUT
:
4288 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4289 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
))
4292 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
))
4295 /* If it is used for some other input, can't use it. */
4296 for (i
= 0; i
< reload_n_operands
; i
++)
4297 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4300 /* If it is used in a later operand's address, can't use it. */
4301 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4302 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
))
4307 case RELOAD_FOR_INPUT_ADDRESS
:
4308 /* Can't use a register if it is used for an input address for this
4309 operand or used as an input in an earlier one. */
4310 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], regno
))
4313 for (i
= 0; i
< opnum
; i
++)
4314 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4319 case RELOAD_FOR_OUTPUT_ADDRESS
:
4320 /* Can't use a register if it is used for an output address for this
4321 operand or used as an output in this or a later operand. */
4322 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], regno
))
4325 for (i
= opnum
; i
< reload_n_operands
; i
++)
4326 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4331 case RELOAD_FOR_OPERAND_ADDRESS
:
4332 for (i
= 0; i
< reload_n_operands
; i
++)
4333 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4336 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4337 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
4339 case RELOAD_FOR_OPADDR_ADDR
:
4340 for (i
= 0; i
< reload_n_operands
; i
++)
4341 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4344 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
));
4346 case RELOAD_FOR_OUTPUT
:
4347 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4348 outputs, or an operand address for this or an earlier output. */
4349 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
4352 for (i
= 0; i
< reload_n_operands
; i
++)
4353 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4356 for (i
= 0; i
<= opnum
; i
++)
4357 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
))
4362 case RELOAD_FOR_INSN
:
4363 for (i
= 0; i
< reload_n_operands
; i
++)
4364 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
4365 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4368 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4369 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
4371 case RELOAD_FOR_OTHER_ADDRESS
:
4372 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4377 /* Return 1 if the value in reload reg REGNO, as used by a reload
4378 needed for the part of the insn specified by OPNUM and TYPE,
4379 is not in use for a reload in any prior part of the insn.
4381 We can assume that the reload reg was already tested for availability
4382 at the time it is needed, and we should not check this again,
4383 in case the reg has already been marked in use. */
4386 reload_reg_free_before_p (regno
, opnum
, type
)
4389 enum reload_type type
;
4395 case RELOAD_FOR_OTHER_ADDRESS
:
4396 /* These always come first. */
4400 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4402 /* If this use is for part of the insn,
4403 check the reg is not in use for any prior part. It is tempting
4404 to try to do this by falling through from objecs that occur
4405 later in the insn to ones that occur earlier, but that will not
4406 correctly take into account the fact that here we MUST ignore
4407 things that would prevent the register from being allocated in
4408 the first place, since we know that it was allocated. */
4410 case RELOAD_FOR_OUTPUT_ADDRESS
:
4411 /* Earlier reloads are for earlier outputs or their addresses,
4412 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4413 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4415 for (i
= 0; i
< opnum
; i
++)
4416 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4417 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4420 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
4423 for (i
= 0; i
< reload_n_operands
; i
++)
4424 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4425 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4428 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
)
4429 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4430 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
4432 case RELOAD_FOR_OUTPUT
:
4433 /* This can't be used in the output address for this operand and
4434 anything that can't be used for it, except that we've already
4435 tested for RELOAD_FOR_INSN objects. */
4437 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], regno
))
4440 for (i
= 0; i
< opnum
; i
++)
4441 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4442 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4445 for (i
= 0; i
< reload_n_operands
; i
++)
4446 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4447 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
4448 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
))
4451 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4453 case RELOAD_FOR_OPERAND_ADDRESS
:
4454 case RELOAD_FOR_OPADDR_ADDR
:
4455 case RELOAD_FOR_INSN
:
4456 /* These can't conflict with inputs, or each other, so all we have to
4457 test is input addresses and the addresses of OTHER items. */
4459 for (i
= 0; i
< reload_n_operands
; i
++)
4460 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
))
4463 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4465 case RELOAD_FOR_INPUT
:
4466 /* The only things earlier are the address for this and
4467 earlier inputs, other inputs (which we know we don't conflict
4468 with), and addresses of RELOAD_OTHER objects. */
4470 for (i
= 0; i
<= opnum
; i
++)
4471 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
))
4474 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4476 case RELOAD_FOR_INPUT_ADDRESS
:
4477 /* Similarly, all we have to check is for use in earlier inputs'
4479 for (i
= 0; i
< opnum
; i
++)
4480 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
))
4483 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4488 /* Return 1 if the value in reload reg REGNO, as used by a reload
4489 needed for the part of the insn specified by OPNUM and TYPE,
4490 is still available in REGNO at the end of the insn.
4492 We can assume that the reload reg was already tested for availability
4493 at the time it is needed, and we should not check this again,
4494 in case the reg has already been marked in use. */
4497 reload_reg_reaches_end_p (regno
, opnum
, type
)
4500 enum reload_type type
;
4507 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4508 its value must reach the end. */
4511 /* If this use is for part of the insn,
4512 its value reaches if no subsequent part uses the same register.
4513 Just like the above function, don't try to do this with lots
4516 case RELOAD_FOR_OTHER_ADDRESS
:
4517 /* Here we check for everything else, since these don't conflict
4518 with anything else and everything comes later. */
4520 for (i
= 0; i
< reload_n_operands
; i
++)
4521 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4522 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
)
4523 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4524 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4527 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4528 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4529 && ! TEST_HARD_REG_BIT (reload_reg_used
, regno
));
4531 case RELOAD_FOR_INPUT_ADDRESS
:
4532 /* Similar, except that we check only for this and subsequent inputs
4533 and the address of only subsequent inputs and we do not need
4534 to check for RELOAD_OTHER objects since they are known not to
4537 for (i
= opnum
; i
< reload_n_operands
; i
++)
4538 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4541 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4542 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
))
4545 for (i
= 0; i
< reload_n_operands
; i
++)
4546 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4547 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4550 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
))
4553 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4554 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
));
4556 case RELOAD_FOR_INPUT
:
4557 /* Similar to input address, except we start at the next operand for
4558 both input and input address and we do not check for
4559 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4562 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4563 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4564 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4567 /* ... fall through ... */
4569 case RELOAD_FOR_OPERAND_ADDRESS
:
4570 /* Check outputs and their addresses. */
4572 for (i
= 0; i
< reload_n_operands
; i
++)
4573 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4574 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4579 case RELOAD_FOR_OPADDR_ADDR
:
4580 for (i
= 0; i
< reload_n_operands
; i
++)
4581 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4582 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4585 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4586 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
));
4588 case RELOAD_FOR_INSN
:
4589 /* These conflict with other outputs with RELOAD_OTHER. So
4590 we need only check for output addresses. */
4594 /* ... fall through ... */
4596 case RELOAD_FOR_OUTPUT
:
4597 case RELOAD_FOR_OUTPUT_ADDRESS
:
4598 /* We already know these can't conflict with a later output. So the
4599 only thing to check are later output addresses. */
4600 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4601 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
))
4610 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4613 This function uses the same algorithm as reload_reg_free_p above. */
4616 reloads_conflict (r1
, r2
)
4619 enum reload_type r1_type
= reload_when_needed
[r1
];
4620 enum reload_type r2_type
= reload_when_needed
[r2
];
4621 int r1_opnum
= reload_opnum
[r1
];
4622 int r2_opnum
= reload_opnum
[r2
];
4624 /* RELOAD_OTHER conflicts with everything except RELOAD_FOR_OTHER_ADDRESS. */
4626 if (r2_type
== RELOAD_OTHER
&& r1_type
!= RELOAD_FOR_OTHER_ADDRESS
)
4629 /* Otherwise, check conflicts differently for each type. */
4633 case RELOAD_FOR_INPUT
:
4634 return (r2_type
== RELOAD_FOR_INSN
4635 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
4636 || r2_type
== RELOAD_FOR_OPADDR_ADDR
4637 || r2_type
== RELOAD_FOR_INPUT
4638 || (r2_type
== RELOAD_FOR_INPUT_ADDRESS
&& r2_opnum
> r1_opnum
));
4640 case RELOAD_FOR_INPUT_ADDRESS
:
4641 return ((r2_type
== RELOAD_FOR_INPUT_ADDRESS
&& r1_opnum
== r2_opnum
)
4642 || (r2_type
== RELOAD_FOR_INPUT
&& r2_opnum
< r1_opnum
));
4644 case RELOAD_FOR_OUTPUT_ADDRESS
:
4645 return ((r2_type
== RELOAD_FOR_OUTPUT_ADDRESS
&& r2_opnum
== r1_opnum
)
4646 || (r2_type
== RELOAD_FOR_OUTPUT
&& r2_opnum
>= r1_opnum
));
4648 case RELOAD_FOR_OPERAND_ADDRESS
:
4649 return (r2_type
== RELOAD_FOR_INPUT
|| r2_type
== RELOAD_FOR_INSN
4650 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
);
4652 case RELOAD_FOR_OPADDR_ADDR
:
4653 return (r2_type
== RELOAD_FOR_INPUT
4654 || r2_type
== RELOAD_FOR_OPADDR_ADDR
);
4656 case RELOAD_FOR_OUTPUT
:
4657 return (r2_type
== RELOAD_FOR_INSN
|| r2_type
== RELOAD_FOR_OUTPUT
4658 || (r2_type
== RELOAD_FOR_OUTPUT_ADDRESS
4659 && r2_opnum
>= r1_opnum
));
4661 case RELOAD_FOR_INSN
:
4662 return (r2_type
== RELOAD_FOR_INPUT
|| r2_type
== RELOAD_FOR_OUTPUT
4663 || r2_type
== RELOAD_FOR_INSN
4664 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
);
4666 case RELOAD_FOR_OTHER_ADDRESS
:
4667 return r2_type
== RELOAD_FOR_OTHER_ADDRESS
;
4670 return r2_type
!= RELOAD_FOR_OTHER_ADDRESS
;
4677 /* Vector of reload-numbers showing the order in which the reloads should
4679 short reload_order
[MAX_RELOADS
];
4681 /* Indexed by reload number, 1 if incoming value
4682 inherited from previous insns. */
4683 char reload_inherited
[MAX_RELOADS
];
4685 /* For an inherited reload, this is the insn the reload was inherited from,
4686 if we know it. Otherwise, this is 0. */
4687 rtx reload_inheritance_insn
[MAX_RELOADS
];
4689 /* If non-zero, this is a place to get the value of the reload,
4690 rather than using reload_in. */
4691 rtx reload_override_in
[MAX_RELOADS
];
4693 /* For each reload, the index in spill_regs of the spill register used,
4694 or -1 if we did not need one of the spill registers for this reload. */
4695 int reload_spill_index
[MAX_RELOADS
];
4697 /* Find a spill register to use as a reload register for reload R.
4698 LAST_RELOAD is non-zero if this is the last reload for the insn being
4701 Set reload_reg_rtx[R] to the register allocated.
4703 If NOERROR is nonzero, we return 1 if successful,
4704 or 0 if we couldn't find a spill reg and we didn't change anything. */
4707 allocate_reload_reg (r
, insn
, last_reload
, noerror
)
4719 /* If we put this reload ahead, thinking it is a group,
4720 then insist on finding a group. Otherwise we can grab a
4721 reg that some other reload needs.
4722 (That can happen when we have a 68000 DATA_OR_FP_REG
4723 which is a group of data regs or one fp reg.)
4724 We need not be so restrictive if there are no more reloads
4727 ??? Really it would be nicer to have smarter handling
4728 for that kind of reg class, where a problem like this is normal.
4729 Perhaps those classes should be avoided for reloading
4730 by use of more alternatives. */
4732 int force_group
= reload_nregs
[r
] > 1 && ! last_reload
;
4734 /* If we want a single register and haven't yet found one,
4735 take any reg in the right class and not in use.
4736 If we want a consecutive group, here is where we look for it.
4738 We use two passes so we can first look for reload regs to
4739 reuse, which are already in use for other reloads in this insn,
4740 and only then use additional registers.
4741 I think that maximizing reuse is needed to make sure we don't
4742 run out of reload regs. Suppose we have three reloads, and
4743 reloads A and B can share regs. These need two regs.
4744 Suppose A and B are given different regs.
4745 That leaves none for C. */
4746 for (pass
= 0; pass
< 2; pass
++)
4748 /* I is the index in spill_regs.
4749 We advance it round-robin between insns to use all spill regs
4750 equally, so that inherited reloads have a chance
4751 of leapfrogging each other. Don't do this, however, when we have
4752 group needs and failure would be fatal; if we only have a relatively
4753 small number of spill registers, and more than one of them has
4754 group needs, then by starting in the middle, we may end up
4755 allocating the first one in such a way that we are not left with
4756 sufficient groups to handle the rest. */
4758 if (noerror
|| ! force_group
)
4763 for (count
= 0; count
< n_spills
; count
++)
4765 int class = (int) reload_reg_class
[r
];
4767 i
= (i
+ 1) % n_spills
;
4769 if (reload_reg_free_p (spill_regs
[i
], reload_opnum
[r
],
4770 reload_when_needed
[r
])
4771 && TEST_HARD_REG_BIT (reg_class_contents
[class], spill_regs
[i
])
4772 && HARD_REGNO_MODE_OK (spill_regs
[i
], reload_mode
[r
])
4773 /* Look first for regs to share, then for unshared. But
4774 don't share regs used for inherited reloads; they are
4775 the ones we want to preserve. */
4777 || (TEST_HARD_REG_BIT (reload_reg_used_at_all
,
4779 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit
,
4782 int nr
= HARD_REGNO_NREGS (spill_regs
[i
], reload_mode
[r
]);
4783 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4784 (on 68000) got us two FP regs. If NR is 1,
4785 we would reject both of them. */
4787 nr
= CLASS_MAX_NREGS (reload_reg_class
[r
], reload_mode
[r
]);
4788 /* If we need only one reg, we have already won. */
4791 /* But reject a single reg if we demand a group. */
4796 /* Otherwise check that as many consecutive regs as we need
4798 Also, don't use for a group registers that are
4799 needed for nongroups. */
4800 if (! TEST_HARD_REG_BIT (counted_for_nongroups
, spill_regs
[i
]))
4803 regno
= spill_regs
[i
] + nr
- 1;
4804 if (!(TEST_HARD_REG_BIT (reg_class_contents
[class], regno
)
4805 && spill_reg_order
[regno
] >= 0
4806 && reload_reg_free_p (regno
, reload_opnum
[r
],
4807 reload_when_needed
[r
])
4808 && ! TEST_HARD_REG_BIT (counted_for_nongroups
,
4818 /* If we found something on pass 1, omit pass 2. */
4819 if (count
< n_spills
)
4823 /* We should have found a spill register by now. */
4824 if (count
== n_spills
)
4831 /* I is the index in SPILL_REG_RTX of the reload register we are to
4832 allocate. Get an rtx for it and find its register number. */
4834 new = spill_reg_rtx
[i
];
4836 if (new == 0 || GET_MODE (new) != reload_mode
[r
])
4837 spill_reg_rtx
[i
] = new
4838 = gen_rtx (REG
, reload_mode
[r
], spill_regs
[i
]);
4840 regno
= true_regnum (new);
4842 /* Detect when the reload reg can't hold the reload mode.
4843 This used to be one `if', but Sequent compiler can't handle that. */
4844 if (HARD_REGNO_MODE_OK (regno
, reload_mode
[r
]))
4846 enum machine_mode test_mode
= VOIDmode
;
4848 test_mode
= GET_MODE (reload_in
[r
]);
4849 /* If reload_in[r] has VOIDmode, it means we will load it
4850 in whatever mode the reload reg has: to wit, reload_mode[r].
4851 We have already tested that for validity. */
4852 /* Aside from that, we need to test that the expressions
4853 to reload from or into have modes which are valid for this
4854 reload register. Otherwise the reload insns would be invalid. */
4855 if (! (reload_in
[r
] != 0 && test_mode
!= VOIDmode
4856 && ! HARD_REGNO_MODE_OK (regno
, test_mode
)))
4857 if (! (reload_out
[r
] != 0
4858 && ! HARD_REGNO_MODE_OK (regno
, GET_MODE (reload_out
[r
]))))
4860 /* The reg is OK. */
4863 /* Mark as in use for this insn the reload regs we use
4865 mark_reload_reg_in_use (spill_regs
[i
], reload_opnum
[r
],
4866 reload_when_needed
[r
], reload_mode
[r
]);
4868 reload_reg_rtx
[r
] = new;
4869 reload_spill_index
[r
] = i
;
4874 /* The reg is not OK. */
4879 if (asm_noperands (PATTERN (insn
)) < 0)
4880 /* It's the compiler's fault. */
4881 fatal_insn ("Could not find a spill register", insn
);
4883 /* It's the user's fault; the operand's mode and constraint
4884 don't match. Disable this reload so we don't crash in final. */
4885 error_for_asm (insn
,
4886 "`asm' operand constraint incompatible with operand size");
4889 reload_reg_rtx
[r
] = 0;
4890 reload_optional
[r
] = 1;
4891 reload_secondary_p
[r
] = 1;
4896 /* Assign hard reg targets for the pseudo-registers we must reload
4897 into hard regs for this insn.
4898 Also output the instructions to copy them in and out of the hard regs.
4900 For machines with register classes, we are responsible for
4901 finding a reload reg in the proper class. */
4904 choose_reload_regs (insn
, avoid_return_reg
)
4906 rtx avoid_return_reg
;
4909 int max_group_size
= 1;
4910 enum reg_class group_class
= NO_REGS
;
4913 rtx save_reload_reg_rtx
[MAX_RELOADS
];
4914 char save_reload_inherited
[MAX_RELOADS
];
4915 rtx save_reload_inheritance_insn
[MAX_RELOADS
];
4916 rtx save_reload_override_in
[MAX_RELOADS
];
4917 int save_reload_spill_index
[MAX_RELOADS
];
4918 HARD_REG_SET save_reload_reg_used
;
4919 HARD_REG_SET save_reload_reg_used_in_input_addr
[MAX_RECOG_OPERANDS
];
4920 HARD_REG_SET save_reload_reg_used_in_output_addr
[MAX_RECOG_OPERANDS
];
4921 HARD_REG_SET save_reload_reg_used_in_input
[MAX_RECOG_OPERANDS
];
4922 HARD_REG_SET save_reload_reg_used_in_output
[MAX_RECOG_OPERANDS
];
4923 HARD_REG_SET save_reload_reg_used_in_op_addr
;
4924 HARD_REG_SET save_reload_reg_used_in_op_addr_reload
;
4925 HARD_REG_SET save_reload_reg_used_in_insn
;
4926 HARD_REG_SET save_reload_reg_used_in_other_addr
;
4927 HARD_REG_SET save_reload_reg_used_at_all
;
4929 bzero (reload_inherited
, MAX_RELOADS
);
4930 bzero ((char *) reload_inheritance_insn
, MAX_RELOADS
* sizeof (rtx
));
4931 bzero ((char *) reload_override_in
, MAX_RELOADS
* sizeof (rtx
));
4933 CLEAR_HARD_REG_SET (reload_reg_used
);
4934 CLEAR_HARD_REG_SET (reload_reg_used_at_all
);
4935 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr
);
4936 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload
);
4937 CLEAR_HARD_REG_SET (reload_reg_used_in_insn
);
4938 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr
);
4940 for (i
= 0; i
< reload_n_operands
; i
++)
4942 CLEAR_HARD_REG_SET (reload_reg_used_in_output
[i
]);
4943 CLEAR_HARD_REG_SET (reload_reg_used_in_input
[i
]);
4944 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr
[i
]);
4945 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr
[i
]);
4948 #ifdef SMALL_REGISTER_CLASSES
4949 /* Don't bother with avoiding the return reg
4950 if we have no mandatory reload that could use it. */
4951 if (avoid_return_reg
)
4954 int regno
= REGNO (avoid_return_reg
);
4956 = HARD_REGNO_NREGS (regno
, GET_MODE (avoid_return_reg
));
4959 for (r
= regno
; r
< regno
+ nregs
; r
++)
4960 if (spill_reg_order
[r
] >= 0)
4961 for (j
= 0; j
< n_reloads
; j
++)
4962 if (!reload_optional
[j
] && reload_reg_rtx
[j
] == 0
4963 && (reload_in
[j
] != 0 || reload_out
[j
] != 0
4964 || reload_secondary_p
[j
])
4966 TEST_HARD_REG_BIT (reg_class_contents
[(int) reload_reg_class
[j
]], r
))
4969 avoid_return_reg
= 0;
4971 #endif /* SMALL_REGISTER_CLASSES */
4973 #if 0 /* Not needed, now that we can always retry without inheritance. */
4974 /* See if we have more mandatory reloads than spill regs.
4975 If so, then we cannot risk optimizations that could prevent
4976 reloads from sharing one spill register.
4978 Since we will try finding a better register than reload_reg_rtx
4979 unless it is equal to reload_in or reload_out, count such reloads. */
4983 #ifdef SMALL_REGISTER_CLASSES
4984 int tem
= (avoid_return_reg
!= 0);
4986 for (j
= 0; j
< n_reloads
; j
++)
4987 if (! reload_optional
[j
]
4988 && (reload_in
[j
] != 0 || reload_out
[j
] != 0 || reload_secondary_p
[j
])
4989 && (reload_reg_rtx
[j
] == 0
4990 || (! rtx_equal_p (reload_reg_rtx
[j
], reload_in
[j
])
4991 && ! rtx_equal_p (reload_reg_rtx
[j
], reload_out
[j
]))))
4998 #ifdef SMALL_REGISTER_CLASSES
4999 /* Don't use the subroutine call return reg for a reload
5000 if we are supposed to avoid it. */
5001 if (avoid_return_reg
)
5003 int regno
= REGNO (avoid_return_reg
);
5005 = HARD_REGNO_NREGS (regno
, GET_MODE (avoid_return_reg
));
5008 for (r
= regno
; r
< regno
+ nregs
; r
++)
5009 if (spill_reg_order
[r
] >= 0)
5010 SET_HARD_REG_BIT (reload_reg_used
, r
);
5012 #endif /* SMALL_REGISTER_CLASSES */
5014 /* In order to be certain of getting the registers we need,
5015 we must sort the reloads into order of increasing register class.
5016 Then our grabbing of reload registers will parallel the process
5017 that provided the reload registers.
5019 Also note whether any of the reloads wants a consecutive group of regs.
5020 If so, record the maximum size of the group desired and what
5021 register class contains all the groups needed by this insn. */
5023 for (j
= 0; j
< n_reloads
; j
++)
5025 reload_order
[j
] = j
;
5026 reload_spill_index
[j
] = -1;
5029 = (reload_inmode
[j
] == VOIDmode
5030 || (GET_MODE_SIZE (reload_outmode
[j
])
5031 > GET_MODE_SIZE (reload_inmode
[j
])))
5032 ? reload_outmode
[j
] : reload_inmode
[j
];
5034 reload_nregs
[j
] = CLASS_MAX_NREGS (reload_reg_class
[j
], reload_mode
[j
]);
5036 if (reload_nregs
[j
] > 1)
5038 max_group_size
= MAX (reload_nregs
[j
], max_group_size
);
5039 group_class
= reg_class_superunion
[(int)reload_reg_class
[j
]][(int)group_class
];
5042 /* If we have already decided to use a certain register,
5043 don't use it in another way. */
5044 if (reload_reg_rtx
[j
])
5045 mark_reload_reg_in_use (REGNO (reload_reg_rtx
[j
]), reload_opnum
[j
],
5046 reload_when_needed
[j
], reload_mode
[j
]);
5050 qsort (reload_order
, n_reloads
, sizeof (short), reload_reg_class_lower
);
5052 bcopy ((char *) reload_reg_rtx
, (char *) save_reload_reg_rtx
,
5053 sizeof reload_reg_rtx
);
5054 bcopy (reload_inherited
, save_reload_inherited
, sizeof reload_inherited
);
5055 bcopy ((char *) reload_inheritance_insn
,
5056 (char *) save_reload_inheritance_insn
,
5057 sizeof reload_inheritance_insn
);
5058 bcopy ((char *) reload_override_in
, (char *) save_reload_override_in
,
5059 sizeof reload_override_in
);
5060 bcopy ((char *) reload_spill_index
, (char *) save_reload_spill_index
,
5061 sizeof reload_spill_index
);
5062 COPY_HARD_REG_SET (save_reload_reg_used
, reload_reg_used
);
5063 COPY_HARD_REG_SET (save_reload_reg_used_at_all
, reload_reg_used_at_all
);
5064 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr
,
5065 reload_reg_used_in_op_addr
);
5067 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload
,
5068 reload_reg_used_in_op_addr_reload
);
5070 COPY_HARD_REG_SET (save_reload_reg_used_in_insn
,
5071 reload_reg_used_in_insn
);
5072 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr
,
5073 reload_reg_used_in_other_addr
);
5075 for (i
= 0; i
< reload_n_operands
; i
++)
5077 COPY_HARD_REG_SET (save_reload_reg_used_in_output
[i
],
5078 reload_reg_used_in_output
[i
]);
5079 COPY_HARD_REG_SET (save_reload_reg_used_in_input
[i
],
5080 reload_reg_used_in_input
[i
]);
5081 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr
[i
],
5082 reload_reg_used_in_input_addr
[i
]);
5083 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr
[i
],
5084 reload_reg_used_in_output_addr
[i
]);
5087 /* If -O, try first with inheritance, then turning it off.
5088 If not -O, don't do inheritance.
5089 Using inheritance when not optimizing leads to paradoxes
5090 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5091 because one side of the comparison might be inherited. */
5093 for (inheritance
= optimize
> 0; inheritance
>= 0; inheritance
--)
5095 /* Process the reloads in order of preference just found.
5096 Beyond this point, subregs can be found in reload_reg_rtx.
5098 This used to look for an existing reloaded home for all
5099 of the reloads, and only then perform any new reloads.
5100 But that could lose if the reloads were done out of reg-class order
5101 because a later reload with a looser constraint might have an old
5102 home in a register needed by an earlier reload with a tighter constraint.
5104 To solve this, we make two passes over the reloads, in the order
5105 described above. In the first pass we try to inherit a reload
5106 from a previous insn. If there is a later reload that needs a
5107 class that is a proper subset of the class being processed, we must
5108 also allocate a spill register during the first pass.
5110 Then make a second pass over the reloads to allocate any reloads
5111 that haven't been given registers yet. */
5113 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit
);
5115 for (j
= 0; j
< n_reloads
; j
++)
5117 register int r
= reload_order
[j
];
5119 /* Ignore reloads that got marked inoperative. */
5120 if (reload_out
[r
] == 0 && reload_in
[r
] == 0 && ! reload_secondary_p
[r
])
5123 /* If find_reloads chose a to use reload_in or reload_out as a reload
5124 register, we don't need to chose one. Otherwise, try even if it found
5125 one since we might save an insn if we find the value lying around. */
5126 if (reload_in
[r
] != 0 && reload_reg_rtx
[r
] != 0
5127 && (rtx_equal_p (reload_in
[r
], reload_reg_rtx
[r
])
5128 || rtx_equal_p (reload_out
[r
], reload_reg_rtx
[r
])))
5131 #if 0 /* No longer needed for correct operation.
5132 It might give better code, or might not; worth an experiment? */
5133 /* If this is an optional reload, we can't inherit from earlier insns
5134 until we are sure that any non-optional reloads have been allocated.
5135 The following code takes advantage of the fact that optional reloads
5136 are at the end of reload_order. */
5137 if (reload_optional
[r
] != 0)
5138 for (i
= 0; i
< j
; i
++)
5139 if ((reload_out
[reload_order
[i
]] != 0
5140 || reload_in
[reload_order
[i
]] != 0
5141 || reload_secondary_p
[reload_order
[i
]])
5142 && ! reload_optional
[reload_order
[i
]]
5143 && reload_reg_rtx
[reload_order
[i
]] == 0)
5144 allocate_reload_reg (reload_order
[i
], insn
, 0, inheritance
);
5147 /* First see if this pseudo is already available as reloaded
5148 for a previous insn. We cannot try to inherit for reloads
5149 that are smaller than the maximum number of registers needed
5150 for groups unless the register we would allocate cannot be used
5153 We could check here to see if this is a secondary reload for
5154 an object that is already in a register of the desired class.
5155 This would avoid the need for the secondary reload register.
5156 But this is complex because we can't easily determine what
5157 objects might want to be loaded via this reload. So let a register
5158 be allocated here. In `emit_reload_insns' we suppress one of the
5159 loads in the case described above. */
5163 register int regno
= -1;
5164 enum machine_mode mode
;
5166 if (reload_in
[r
] == 0)
5168 else if (GET_CODE (reload_in
[r
]) == REG
)
5170 regno
= REGNO (reload_in
[r
]);
5171 mode
= GET_MODE (reload_in
[r
]);
5173 else if (GET_CODE (reload_in_reg
[r
]) == REG
)
5175 regno
= REGNO (reload_in_reg
[r
]);
5176 mode
= GET_MODE (reload_in_reg
[r
]);
5179 /* This won't work, since REGNO can be a pseudo reg number.
5180 Also, it takes much more hair to keep track of all the things
5181 that can invalidate an inherited reload of part of a pseudoreg. */
5182 else if (GET_CODE (reload_in
[r
]) == SUBREG
5183 && GET_CODE (SUBREG_REG (reload_in
[r
])) == REG
)
5184 regno
= REGNO (SUBREG_REG (reload_in
[r
])) + SUBREG_WORD (reload_in
[r
]);
5187 if (regno
>= 0 && reg_last_reload_reg
[regno
] != 0)
5189 i
= spill_reg_order
[REGNO (reg_last_reload_reg
[regno
])];
5191 if (reg_reloaded_contents
[i
] == regno
5192 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg
[regno
]))
5193 >= GET_MODE_SIZE (mode
))
5194 && HARD_REGNO_MODE_OK (spill_regs
[i
], reload_mode
[r
])
5195 && TEST_HARD_REG_BIT (reg_class_contents
[(int) reload_reg_class
[r
]],
5197 && (reload_nregs
[r
] == max_group_size
5198 || ! TEST_HARD_REG_BIT (reg_class_contents
[(int) group_class
],
5200 && reload_reg_free_p (spill_regs
[i
], reload_opnum
[r
],
5201 reload_when_needed
[r
])
5202 && reload_reg_free_before_p (spill_regs
[i
],
5204 reload_when_needed
[r
]))
5206 /* If a group is needed, verify that all the subsequent
5207 registers still have their values intact. */
5209 = HARD_REGNO_NREGS (spill_regs
[i
], reload_mode
[r
]);
5212 for (k
= 1; k
< nr
; k
++)
5213 if (reg_reloaded_contents
[spill_reg_order
[spill_regs
[i
] + k
]]
5221 /* We found a register that contains the
5222 value we need. If this register is the
5223 same as an `earlyclobber' operand of the
5224 current insn, just mark it as a place to
5225 reload from since we can't use it as the
5226 reload register itself. */
5228 for (i1
= 0; i1
< n_earlyclobbers
; i1
++)
5229 if (reg_overlap_mentioned_for_reload_p
5230 (reg_last_reload_reg
[regno
],
5231 reload_earlyclobbers
[i1
]))
5234 if (i1
!= n_earlyclobbers
5235 /* Don't really use the inherited spill reg
5236 if we need it wider than we've got it. */
5237 || (GET_MODE_SIZE (reload_mode
[r
])
5238 > GET_MODE_SIZE (mode
)))
5239 reload_override_in
[r
] = reg_last_reload_reg
[regno
];
5243 /* We can use this as a reload reg. */
5244 /* Mark the register as in use for this part of
5246 mark_reload_reg_in_use (spill_regs
[i
],
5248 reload_when_needed
[r
],
5250 reload_reg_rtx
[r
] = reg_last_reload_reg
[regno
];
5251 reload_inherited
[r
] = 1;
5252 reload_inheritance_insn
[r
]
5253 = reg_reloaded_insn
[i
];
5254 reload_spill_index
[r
] = i
;
5255 for (k
= 0; k
< nr
; k
++)
5256 SET_HARD_REG_BIT (reload_reg_used_for_inherit
,
5264 /* Here's another way to see if the value is already lying around. */
5266 && reload_in
[r
] != 0
5267 && ! reload_inherited
[r
]
5268 && reload_out
[r
] == 0
5269 && (CONSTANT_P (reload_in
[r
])
5270 || GET_CODE (reload_in
[r
]) == PLUS
5271 || GET_CODE (reload_in
[r
]) == REG
5272 || GET_CODE (reload_in
[r
]) == MEM
)
5273 && (reload_nregs
[r
] == max_group_size
5274 || ! reg_classes_intersect_p (reload_reg_class
[r
], group_class
)))
5277 = find_equiv_reg (reload_in
[r
], insn
, reload_reg_class
[r
],
5278 -1, NULL_PTR
, 0, reload_mode
[r
]);
5283 if (GET_CODE (equiv
) == REG
)
5284 regno
= REGNO (equiv
);
5285 else if (GET_CODE (equiv
) == SUBREG
)
5287 /* This must be a SUBREG of a hard register.
5288 Make a new REG since this might be used in an
5289 address and not all machines support SUBREGs
5291 regno
= REGNO (SUBREG_REG (equiv
)) + SUBREG_WORD (equiv
);
5292 equiv
= gen_rtx (REG
, reload_mode
[r
], regno
);
5298 /* If we found a spill reg, reject it unless it is free
5299 and of the desired class. */
5301 && ((spill_reg_order
[regno
] >= 0
5302 && ! reload_reg_free_before_p (regno
, reload_opnum
[r
],
5303 reload_when_needed
[r
]))
5304 || ! TEST_HARD_REG_BIT (reg_class_contents
[(int) reload_reg_class
[r
]],
5308 if (equiv
!= 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all
, regno
))
5311 if (equiv
!= 0 && ! HARD_REGNO_MODE_OK (regno
, reload_mode
[r
]))
5314 /* We found a register that contains the value we need.
5315 If this register is the same as an `earlyclobber' operand
5316 of the current insn, just mark it as a place to reload from
5317 since we can't use it as the reload register itself. */
5320 for (i
= 0; i
< n_earlyclobbers
; i
++)
5321 if (reg_overlap_mentioned_for_reload_p (equiv
,
5322 reload_earlyclobbers
[i
]))
5324 reload_override_in
[r
] = equiv
;
5329 /* JRV: If the equiv register we have found is explicitly
5330 clobbered in the current insn, mark but don't use, as above. */
5332 if (equiv
!= 0 && regno_clobbered_p (regno
, insn
))
5334 reload_override_in
[r
] = equiv
;
5338 /* If we found an equivalent reg, say no code need be generated
5339 to load it, and use it as our reload reg. */
5340 if (equiv
!= 0 && regno
!= HARD_FRAME_POINTER_REGNUM
)
5342 reload_reg_rtx
[r
] = equiv
;
5343 reload_inherited
[r
] = 1;
5344 /* If it is a spill reg,
5345 mark the spill reg as in use for this insn. */
5346 i
= spill_reg_order
[regno
];
5349 int nr
= HARD_REGNO_NREGS (regno
, reload_mode
[r
]);
5351 mark_reload_reg_in_use (regno
, reload_opnum
[r
],
5352 reload_when_needed
[r
],
5354 for (k
= 0; k
< nr
; k
++)
5355 SET_HARD_REG_BIT (reload_reg_used_for_inherit
, regno
+ k
);
5360 /* If we found a register to use already, or if this is an optional
5361 reload, we are done. */
5362 if (reload_reg_rtx
[r
] != 0 || reload_optional
[r
] != 0)
5365 #if 0 /* No longer needed for correct operation. Might or might not
5366 give better code on the average. Want to experiment? */
5368 /* See if there is a later reload that has a class different from our
5369 class that intersects our class or that requires less register
5370 than our reload. If so, we must allocate a register to this
5371 reload now, since that reload might inherit a previous reload
5372 and take the only available register in our class. Don't do this
5373 for optional reloads since they will force all previous reloads
5374 to be allocated. Also don't do this for reloads that have been
5377 for (i
= j
+ 1; i
< n_reloads
; i
++)
5379 int s
= reload_order
[i
];
5381 if ((reload_in
[s
] == 0 && reload_out
[s
] == 0
5382 && ! reload_secondary_p
[s
])
5383 || reload_optional
[s
])
5386 if ((reload_reg_class
[s
] != reload_reg_class
[r
]
5387 && reg_classes_intersect_p (reload_reg_class
[r
],
5388 reload_reg_class
[s
]))
5389 || reload_nregs
[s
] < reload_nregs
[r
])
5396 allocate_reload_reg (r
, insn
, j
== n_reloads
- 1, inheritance
);
5400 /* Now allocate reload registers for anything non-optional that
5401 didn't get one yet. */
5402 for (j
= 0; j
< n_reloads
; j
++)
5404 register int r
= reload_order
[j
];
5406 /* Ignore reloads that got marked inoperative. */
5407 if (reload_out
[r
] == 0 && reload_in
[r
] == 0 && ! reload_secondary_p
[r
])
5410 /* Skip reloads that already have a register allocated or are
5412 if (reload_reg_rtx
[r
] != 0 || reload_optional
[r
])
5415 if (! allocate_reload_reg (r
, insn
, j
== n_reloads
- 1, inheritance
))
5419 /* If that loop got all the way, we have won. */
5424 /* Loop around and try without any inheritance. */
5425 /* First undo everything done by the failed attempt
5426 to allocate with inheritance. */
5427 bcopy ((char *) save_reload_reg_rtx
, (char *) reload_reg_rtx
,
5428 sizeof reload_reg_rtx
);
5429 bcopy ((char *) save_reload_inherited
, (char *) reload_inherited
,
5430 sizeof reload_inherited
);
5431 bcopy ((char *) save_reload_inheritance_insn
,
5432 (char *) reload_inheritance_insn
,
5433 sizeof reload_inheritance_insn
);
5434 bcopy ((char *) save_reload_override_in
, (char *) reload_override_in
,
5435 sizeof reload_override_in
);
5436 bcopy ((char *) save_reload_spill_index
, (char *) reload_spill_index
,
5437 sizeof reload_spill_index
);
5438 COPY_HARD_REG_SET (reload_reg_used
, save_reload_reg_used
);
5439 COPY_HARD_REG_SET (reload_reg_used_at_all
, save_reload_reg_used_at_all
);
5440 COPY_HARD_REG_SET (reload_reg_used_in_op_addr
,
5441 save_reload_reg_used_in_op_addr
);
5442 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload
,
5443 save_reload_reg_used_in_op_addr_reload
);
5444 COPY_HARD_REG_SET (reload_reg_used_in_insn
,
5445 save_reload_reg_used_in_insn
);
5446 COPY_HARD_REG_SET (reload_reg_used_in_other_addr
,
5447 save_reload_reg_used_in_other_addr
);
5449 for (i
= 0; i
< reload_n_operands
; i
++)
5451 COPY_HARD_REG_SET (reload_reg_used_in_input
[i
],
5452 save_reload_reg_used_in_input
[i
]);
5453 COPY_HARD_REG_SET (reload_reg_used_in_output
[i
],
5454 save_reload_reg_used_in_output
[i
]);
5455 COPY_HARD_REG_SET (reload_reg_used_in_input_addr
[i
],
5456 save_reload_reg_used_in_input_addr
[i
]);
5457 COPY_HARD_REG_SET (reload_reg_used_in_output_addr
[i
],
5458 save_reload_reg_used_in_output_addr
[i
]);
5462 /* If we thought we could inherit a reload, because it seemed that
5463 nothing else wanted the same reload register earlier in the insn,
5464 verify that assumption, now that all reloads have been assigned. */
5466 for (j
= 0; j
< n_reloads
; j
++)
5468 register int r
= reload_order
[j
];
5470 if (reload_inherited
[r
] && reload_reg_rtx
[r
] != 0
5471 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx
[r
]),
5473 reload_when_needed
[r
]))
5474 reload_inherited
[r
] = 0;
5476 /* If we found a better place to reload from,
5477 validate it in the same fashion, if it is a reload reg. */
5478 if (reload_override_in
[r
]
5479 && (GET_CODE (reload_override_in
[r
]) == REG
5480 || GET_CODE (reload_override_in
[r
]) == SUBREG
))
5482 int regno
= true_regnum (reload_override_in
[r
]);
5483 if (spill_reg_order
[regno
] >= 0
5484 && ! reload_reg_free_before_p (regno
, reload_opnum
[r
],
5485 reload_when_needed
[r
]))
5486 reload_override_in
[r
] = 0;
5490 /* Now that reload_override_in is known valid,
5491 actually override reload_in. */
5492 for (j
= 0; j
< n_reloads
; j
++)
5493 if (reload_override_in
[j
])
5494 reload_in
[j
] = reload_override_in
[j
];
5496 /* If this reload won't be done because it has been cancelled or is
5497 optional and not inherited, clear reload_reg_rtx so other
5498 routines (such as subst_reloads) don't get confused. */
5499 for (j
= 0; j
< n_reloads
; j
++)
5500 if (reload_reg_rtx
[j
] != 0
5501 && ((reload_optional
[j
] && ! reload_inherited
[j
])
5502 || (reload_in
[j
] == 0 && reload_out
[j
] == 0
5503 && ! reload_secondary_p
[j
])))
5505 int regno
= true_regnum (reload_reg_rtx
[j
]);
5507 if (spill_reg_order
[regno
] >= 0)
5508 clear_reload_reg_in_use (regno
, reload_opnum
[j
],
5509 reload_when_needed
[j
], reload_mode
[j
]);
5510 reload_reg_rtx
[j
] = 0;
5513 /* Record which pseudos and which spill regs have output reloads. */
5514 for (j
= 0; j
< n_reloads
; j
++)
5516 register int r
= reload_order
[j
];
5518 i
= reload_spill_index
[r
];
5520 /* I is nonneg if this reload used one of the spill regs.
5521 If reload_reg_rtx[r] is 0, this is an optional reload
5522 that we opted to ignore. */
5523 if (reload_out
[r
] != 0 && GET_CODE (reload_out
[r
]) == REG
5524 && reload_reg_rtx
[r
] != 0)
5526 register int nregno
= REGNO (reload_out
[r
]);
5529 if (nregno
< FIRST_PSEUDO_REGISTER
)
5530 nr
= HARD_REGNO_NREGS (nregno
, reload_mode
[r
]);
5533 reg_has_output_reload
[nregno
+ nr
] = 1;
5537 nr
= HARD_REGNO_NREGS (spill_regs
[i
], reload_mode
[r
]);
5539 SET_HARD_REG_BIT (reg_is_output_reload
, spill_regs
[i
] + nr
);
5542 if (reload_when_needed
[r
] != RELOAD_OTHER
5543 && reload_when_needed
[r
] != RELOAD_FOR_OUTPUT
5544 && reload_when_needed
[r
] != RELOAD_FOR_INSN
)
5550 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5551 reloads of the same item for fear that we might not have enough reload
5552 registers. However, normally they will get the same reload register
5553 and hence actually need not be loaded twice.
5555 Here we check for the most common case of this phenomenon: when we have
5556 a number of reloads for the same object, each of which were allocated
5557 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5558 reload, and is not modified in the insn itself. If we find such,
5559 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5560 This will not increase the number of spill registers needed and will
5561 prevent redundant code. */
5563 #ifdef SMALL_REGISTER_CLASSES
5566 merge_assigned_reloads (insn
)
5571 /* Scan all the reloads looking for ones that only load values and
5572 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5573 assigned and not modified by INSN. */
5575 for (i
= 0; i
< n_reloads
; i
++)
5577 if (reload_in
[i
] == 0 || reload_when_needed
[i
] == RELOAD_OTHER
5578 || reload_out
[i
] != 0 || reload_reg_rtx
[i
] == 0
5579 || reg_set_p (reload_reg_rtx
[i
], insn
))
5582 /* Look at all other reloads. Ensure that the only use of this
5583 reload_reg_rtx is in a reload that just loads the same value
5584 as we do. Note that any secondary reloads must be of the identical
5585 class since the values, modes, and result registers are the
5586 same, so we need not do anything with any secondary reloads. */
5588 for (j
= 0; j
< n_reloads
; j
++)
5590 if (i
== j
|| reload_reg_rtx
[j
] == 0
5591 || ! reg_overlap_mentioned_p (reload_reg_rtx
[j
],
5595 /* If the reload regs aren't exactly the same (e.g, different modes)
5596 or if the values are different, we can't merge anything with this
5599 if (! rtx_equal_p (reload_reg_rtx
[i
], reload_reg_rtx
[j
])
5600 || reload_out
[j
] != 0 || reload_in
[j
] == 0
5601 || ! rtx_equal_p (reload_in
[i
], reload_in
[j
]))
5605 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5606 we, in fact, found any matching reloads. */
5610 for (j
= 0; j
< n_reloads
; j
++)
5611 if (i
!= j
&& reload_reg_rtx
[j
] != 0
5612 && rtx_equal_p (reload_reg_rtx
[i
], reload_reg_rtx
[j
]))
5614 reload_when_needed
[i
] = RELOAD_OTHER
;
5616 transfer_replacements (i
, j
);
5619 /* If this is now RELOAD_OTHER, look for any reloads that load
5620 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5621 if they were for inputs, RELOAD_OTHER for outputs. Note that
5622 this test is equivalent to looking for reloads for this operand
5625 if (reload_when_needed
[i
] == RELOAD_OTHER
)
5626 for (j
= 0; j
< n_reloads
; j
++)
5627 if (reload_in
[j
] != 0
5628 && reload_when_needed
[i
] != RELOAD_OTHER
5629 && reg_overlap_mentioned_for_reload_p (reload_in
[j
],
5631 reload_when_needed
[j
]
5632 = reload_when_needed
[i
] == RELOAD_FOR_INPUT_ADDRESS
5633 ? RELOAD_FOR_OTHER_ADDRESS
: RELOAD_OTHER
;
5637 #endif /* SMALL_RELOAD_CLASSES */
5639 /* Output insns to reload values in and out of the chosen reload regs. */
5642 emit_reload_insns (insn
)
5646 rtx input_reload_insns
[MAX_RECOG_OPERANDS
];
5647 rtx other_input_address_reload_insns
= 0;
5648 rtx other_input_reload_insns
= 0;
5649 rtx input_address_reload_insns
[MAX_RECOG_OPERANDS
];
5650 rtx output_reload_insns
[MAX_RECOG_OPERANDS
];
5651 rtx output_address_reload_insns
[MAX_RECOG_OPERANDS
];
5652 rtx operand_reload_insns
= 0;
5653 rtx other_operand_reload_insns
= 0;
5654 rtx following_insn
= NEXT_INSN (insn
);
5655 rtx before_insn
= insn
;
5657 /* Values to be put in spill_reg_store are put here first. */
5658 rtx new_spill_reg_store
[FIRST_PSEUDO_REGISTER
];
5660 for (j
= 0; j
< reload_n_operands
; j
++)
5661 input_reload_insns
[j
] = input_address_reload_insns
[j
]
5662 = output_reload_insns
[j
] = output_address_reload_insns
[j
] = 0;
5664 /* Now output the instructions to copy the data into and out of the
5665 reload registers. Do these in the order that the reloads were reported,
5666 since reloads of base and index registers precede reloads of operands
5667 and the operands may need the base and index registers reloaded. */
5669 for (j
= 0; j
< n_reloads
; j
++)
5672 rtx oldequiv_reg
= 0;
5674 if (reload_spill_index
[j
] >= 0)
5675 new_spill_reg_store
[reload_spill_index
[j
]] = 0;
5678 if (old
!= 0 && ! reload_inherited
[j
]
5679 && ! rtx_equal_p (reload_reg_rtx
[j
], old
)
5680 && reload_reg_rtx
[j
] != 0)
5682 register rtx reloadreg
= reload_reg_rtx
[j
];
5684 enum machine_mode mode
;
5687 /* Determine the mode to reload in.
5688 This is very tricky because we have three to choose from.
5689 There is the mode the insn operand wants (reload_inmode[J]).
5690 There is the mode of the reload register RELOADREG.
5691 There is the intrinsic mode of the operand, which we could find
5692 by stripping some SUBREGs.
5693 It turns out that RELOADREG's mode is irrelevant:
5694 we can change that arbitrarily.
5696 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5697 then the reload reg may not support QImode moves, so use SImode.
5698 If foo is in memory due to spilling a pseudo reg, this is safe,
5699 because the QImode value is in the least significant part of a
5700 slot big enough for a SImode. If foo is some other sort of
5701 memory reference, then it is impossible to reload this case,
5702 so previous passes had better make sure this never happens.
5704 Then consider a one-word union which has SImode and one of its
5705 members is a float, being fetched as (SUBREG:SF union:SI).
5706 We must fetch that as SFmode because we could be loading into
5707 a float-only register. In this case OLD's mode is correct.
5709 Consider an immediate integer: it has VOIDmode. Here we need
5710 to get a mode from something else.
5712 In some cases, there is a fourth mode, the operand's
5713 containing mode. If the insn specifies a containing mode for
5714 this operand, it overrides all others.
5716 I am not sure whether the algorithm here is always right,
5717 but it does the right things in those cases. */
5719 mode
= GET_MODE (old
);
5720 if (mode
== VOIDmode
)
5721 mode
= reload_inmode
[j
];
5723 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5724 /* If we need a secondary register for this operation, see if
5725 the value is already in a register in that class. Don't
5726 do this if the secondary register will be used as a scratch
5729 if (reload_secondary_in_reload
[j
] >= 0
5730 && reload_secondary_in_icode
[j
] == CODE_FOR_nothing
5733 = find_equiv_reg (old
, insn
,
5734 reload_reg_class
[reload_secondary_in_reload
[j
]],
5735 -1, NULL_PTR
, 0, mode
);
5738 /* If reloading from memory, see if there is a register
5739 that already holds the same value. If so, reload from there.
5740 We can pass 0 as the reload_reg_p argument because
5741 any other reload has either already been emitted,
5742 in which case find_equiv_reg will see the reload-insn,
5743 or has yet to be emitted, in which case it doesn't matter
5744 because we will use this equiv reg right away. */
5746 if (oldequiv
== 0 && optimize
5747 && (GET_CODE (old
) == MEM
5748 || (GET_CODE (old
) == REG
5749 && REGNO (old
) >= FIRST_PSEUDO_REGISTER
5750 && reg_renumber
[REGNO (old
)] < 0)))
5751 oldequiv
= find_equiv_reg (old
, insn
, ALL_REGS
,
5752 -1, NULL_PTR
, 0, mode
);
5756 int regno
= true_regnum (oldequiv
);
5758 /* If OLDEQUIV is a spill register, don't use it for this
5759 if any other reload needs it at an earlier stage of this insn
5760 or at this stage. */
5761 if (spill_reg_order
[regno
] >= 0
5762 && (! reload_reg_free_p (regno
, reload_opnum
[j
],
5763 reload_when_needed
[j
])
5764 || ! reload_reg_free_before_p (regno
, reload_opnum
[j
],
5765 reload_when_needed
[j
])))
5768 /* If OLDEQUIV is not a spill register,
5769 don't use it if any other reload wants it. */
5770 if (spill_reg_order
[regno
] < 0)
5773 for (k
= 0; k
< n_reloads
; k
++)
5774 if (reload_reg_rtx
[k
] != 0 && k
!= j
5775 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx
[k
],
5783 /* If it is no cheaper to copy from OLDEQUIV into the
5784 reload register than it would be to move from memory,
5785 don't use it. Likewise, if we need a secondary register
5789 && ((REGNO_REG_CLASS (regno
) != reload_reg_class
[j
]
5790 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno
),
5791 reload_reg_class
[j
])
5792 >= MEMORY_MOVE_COST (mode
)))
5793 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5794 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class
[j
],
5798 #ifdef SECONDARY_MEMORY_NEEDED
5799 || SECONDARY_MEMORY_NEEDED (reload_reg_class
[j
],
5800 REGNO_REG_CLASS (regno
),
5809 else if (GET_CODE (oldequiv
) == REG
)
5810 oldequiv_reg
= oldequiv
;
5811 else if (GET_CODE (oldequiv
) == SUBREG
)
5812 oldequiv_reg
= SUBREG_REG (oldequiv
);
5814 /* If we are reloading from a register that was recently stored in
5815 with an output-reload, see if we can prove there was
5816 actually no need to store the old value in it. */
5818 if (optimize
&& GET_CODE (oldequiv
) == REG
5819 && REGNO (oldequiv
) < FIRST_PSEUDO_REGISTER
5820 && spill_reg_order
[REGNO (oldequiv
)] >= 0
5821 && spill_reg_store
[spill_reg_order
[REGNO (oldequiv
)]] != 0
5822 && find_reg_note (insn
, REG_DEAD
, reload_in
[j
])
5823 /* This is unsafe if operand occurs more than once in current
5824 insn. Perhaps some occurrences weren't reloaded. */
5825 && count_occurrences (PATTERN (insn
), reload_in
[j
]) == 1)
5826 delete_output_reload
5827 (insn
, j
, spill_reg_store
[spill_reg_order
[REGNO (oldequiv
)]]);
5829 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5830 then load RELOADREG from OLDEQUIV. Note that we cannot use
5831 gen_lowpart_common since it can do the wrong thing when
5832 RELOADREG has a multi-word mode. Note that RELOADREG
5833 must always be a REG here. */
5835 if (GET_MODE (reloadreg
) != mode
)
5836 reloadreg
= gen_rtx (REG
, mode
, REGNO (reloadreg
));
5837 while (GET_CODE (oldequiv
) == SUBREG
&& GET_MODE (oldequiv
) != mode
)
5838 oldequiv
= SUBREG_REG (oldequiv
);
5839 if (GET_MODE (oldequiv
) != VOIDmode
5840 && mode
!= GET_MODE (oldequiv
))
5841 oldequiv
= gen_rtx (SUBREG
, mode
, oldequiv
, 0);
5843 /* Switch to the right place to emit the reload insns. */
5844 switch (reload_when_needed
[j
])
5847 where
= &other_input_reload_insns
;
5849 case RELOAD_FOR_INPUT
:
5850 where
= &input_reload_insns
[reload_opnum
[j
]];
5852 case RELOAD_FOR_INPUT_ADDRESS
:
5853 where
= &input_address_reload_insns
[reload_opnum
[j
]];
5855 case RELOAD_FOR_OUTPUT_ADDRESS
:
5856 where
= &output_address_reload_insns
[reload_opnum
[j
]];
5858 case RELOAD_FOR_OPERAND_ADDRESS
:
5859 where
= &operand_reload_insns
;
5861 case RELOAD_FOR_OPADDR_ADDR
:
5862 where
= &other_operand_reload_insns
;
5864 case RELOAD_FOR_OTHER_ADDRESS
:
5865 where
= &other_input_address_reload_insns
;
5871 push_to_sequence (*where
);
5874 /* Auto-increment addresses must be reloaded in a special way. */
5875 if (GET_CODE (oldequiv
) == POST_INC
5876 || GET_CODE (oldequiv
) == POST_DEC
5877 || GET_CODE (oldequiv
) == PRE_INC
5878 || GET_CODE (oldequiv
) == PRE_DEC
)
5880 /* We are not going to bother supporting the case where a
5881 incremented register can't be copied directly from
5882 OLDEQUIV since this seems highly unlikely. */
5883 if (reload_secondary_in_reload
[j
] >= 0)
5885 /* Prevent normal processing of this reload. */
5887 /* Output a special code sequence for this case. */
5888 inc_for_reload (reloadreg
, oldequiv
, reload_inc
[j
]);
5891 /* If we are reloading a pseudo-register that was set by the previous
5892 insn, see if we can get rid of that pseudo-register entirely
5893 by redirecting the previous insn into our reload register. */
5895 else if (optimize
&& GET_CODE (old
) == REG
5896 && REGNO (old
) >= FIRST_PSEUDO_REGISTER
5897 && dead_or_set_p (insn
, old
)
5898 /* This is unsafe if some other reload
5899 uses the same reg first. */
5900 && reload_reg_free_before_p (REGNO (reloadreg
),
5902 reload_when_needed
[j
]))
5904 rtx temp
= PREV_INSN (insn
);
5905 while (temp
&& GET_CODE (temp
) == NOTE
)
5906 temp
= PREV_INSN (temp
);
5908 && GET_CODE (temp
) == INSN
5909 && GET_CODE (PATTERN (temp
)) == SET
5910 && SET_DEST (PATTERN (temp
)) == old
5911 /* Make sure we can access insn_operand_constraint. */
5912 && asm_noperands (PATTERN (temp
)) < 0
5913 /* This is unsafe if prev insn rejects our reload reg. */
5914 && constraint_accepts_reg_p (insn_operand_constraint
[recog_memoized (temp
)][0],
5916 /* This is unsafe if operand occurs more than once in current
5917 insn. Perhaps some occurrences aren't reloaded. */
5918 && count_occurrences (PATTERN (insn
), old
) == 1
5919 /* Don't risk splitting a matching pair of operands. */
5920 && ! reg_mentioned_p (old
, SET_SRC (PATTERN (temp
))))
5922 /* Store into the reload register instead of the pseudo. */
5923 SET_DEST (PATTERN (temp
)) = reloadreg
;
5924 /* If these are the only uses of the pseudo reg,
5925 pretend for GDB it lives in the reload reg we used. */
5926 if (reg_n_deaths
[REGNO (old
)] == 1
5927 && reg_n_sets
[REGNO (old
)] == 1)
5929 reg_renumber
[REGNO (old
)] = REGNO (reload_reg_rtx
[j
]);
5930 alter_reg (REGNO (old
), -1);
5936 /* We can't do that, so output an insn to load RELOADREG. */
5940 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5941 rtx second_reload_reg
= 0;
5942 enum insn_code icode
;
5944 /* If we have a secondary reload, pick up the secondary register
5945 and icode, if any. If OLDEQUIV and OLD are different or
5946 if this is an in-out reload, recompute whether or not we
5947 still need a secondary register and what the icode should
5948 be. If we still need a secondary register and the class or
5949 icode is different, go back to reloading from OLD if using
5950 OLDEQUIV means that we got the wrong type of register. We
5951 cannot have different class or icode due to an in-out reload
5952 because we don't make such reloads when both the input and
5953 output need secondary reload registers. */
5955 if (reload_secondary_in_reload
[j
] >= 0)
5957 int secondary_reload
= reload_secondary_in_reload
[j
];
5958 rtx real_oldequiv
= oldequiv
;
5961 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5962 and similarly for OLD.
5963 See comments in get_secondary_reload in reload.c. */
5964 if (GET_CODE (oldequiv
) == REG
5965 && REGNO (oldequiv
) >= FIRST_PSEUDO_REGISTER
5966 && reg_equiv_mem
[REGNO (oldequiv
)] != 0)
5967 real_oldequiv
= reg_equiv_mem
[REGNO (oldequiv
)];
5969 if (GET_CODE (old
) == REG
5970 && REGNO (old
) >= FIRST_PSEUDO_REGISTER
5971 && reg_equiv_mem
[REGNO (old
)] != 0)
5972 real_old
= reg_equiv_mem
[REGNO (old
)];
5974 second_reload_reg
= reload_reg_rtx
[secondary_reload
];
5975 icode
= reload_secondary_in_icode
[j
];
5977 if ((old
!= oldequiv
&& ! rtx_equal_p (old
, oldequiv
))
5978 || (reload_in
[j
] != 0 && reload_out
[j
] != 0))
5980 enum reg_class new_class
5981 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class
[j
],
5982 mode
, real_oldequiv
);
5984 if (new_class
== NO_REGS
)
5985 second_reload_reg
= 0;
5988 enum insn_code new_icode
;
5989 enum machine_mode new_mode
;
5991 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) new_class
],
5992 REGNO (second_reload_reg
)))
5993 oldequiv
= old
, real_oldequiv
= real_old
;
5996 new_icode
= reload_in_optab
[(int) mode
];
5997 if (new_icode
!= CODE_FOR_nothing
5998 && ((insn_operand_predicate
[(int) new_icode
][0]
5999 && ! ((*insn_operand_predicate
[(int) new_icode
][0])
6001 || (insn_operand_predicate
[(int) new_icode
][1]
6002 && ! ((*insn_operand_predicate
[(int) new_icode
][1])
6003 (real_oldequiv
, mode
)))))
6004 new_icode
= CODE_FOR_nothing
;
6006 if (new_icode
== CODE_FOR_nothing
)
6009 new_mode
= insn_operand_mode
[(int) new_icode
][2];
6011 if (GET_MODE (second_reload_reg
) != new_mode
)
6013 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg
),
6015 oldequiv
= old
, real_oldequiv
= real_old
;
6018 = gen_rtx (REG
, new_mode
,
6019 REGNO (second_reload_reg
));
6025 /* If we still need a secondary reload register, check
6026 to see if it is being used as a scratch or intermediate
6027 register and generate code appropriately. If we need
6028 a scratch register, use REAL_OLDEQUIV since the form of
6029 the insn may depend on the actual address if it is
6032 if (second_reload_reg
)
6034 if (icode
!= CODE_FOR_nothing
)
6036 emit_insn (GEN_FCN (icode
) (reloadreg
, real_oldequiv
,
6037 second_reload_reg
));
6042 /* See if we need a scratch register to load the
6043 intermediate register (a tertiary reload). */
6044 enum insn_code tertiary_icode
6045 = reload_secondary_in_icode
[secondary_reload
];
6047 if (tertiary_icode
!= CODE_FOR_nothing
)
6049 rtx third_reload_reg
6050 = reload_reg_rtx
[reload_secondary_in_reload
[secondary_reload
]];
6052 emit_insn ((GEN_FCN (tertiary_icode
)
6053 (second_reload_reg
, real_oldequiv
,
6054 third_reload_reg
)));
6057 gen_reload (second_reload_reg
, oldequiv
,
6059 reload_when_needed
[j
]);
6061 oldequiv
= second_reload_reg
;
6067 if (! special
&& ! rtx_equal_p (reloadreg
, oldequiv
))
6068 gen_reload (reloadreg
, oldequiv
, reload_opnum
[j
],
6069 reload_when_needed
[j
]);
6071 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6072 /* We may have to make a REG_DEAD note for the secondary reload
6073 register in the insns we just made. Find the last insn that
6074 mentioned the register. */
6075 if (! special
&& second_reload_reg
6076 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg
)))
6080 for (prev
= get_last_insn (); prev
;
6081 prev
= PREV_INSN (prev
))
6082 if (GET_RTX_CLASS (GET_CODE (prev
) == 'i')
6083 && reg_overlap_mentioned_for_reload_p (second_reload_reg
,
6086 REG_NOTES (prev
) = gen_rtx (EXPR_LIST
, REG_DEAD
,
6095 /* End this sequence. */
6096 *where
= get_insns ();
6100 /* Add a note saying the input reload reg
6101 dies in this insn, if anyone cares. */
6102 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6104 && reload_reg_rtx
[j
] != old
6105 && reload_reg_rtx
[j
] != 0
6106 && reload_out
[j
] == 0
6107 && ! reload_inherited
[j
]
6108 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx
[j
])))
6110 register rtx reloadreg
= reload_reg_rtx
[j
];
6113 /* We can't abort here because we need to support this for sched.c.
6114 It's not terrible to miss a REG_DEAD note, but we should try
6115 to figure out how to do this correctly. */
6116 /* The code below is incorrect for address-only reloads. */
6117 if (reload_when_needed
[j
] != RELOAD_OTHER
6118 && reload_when_needed
[j
] != RELOAD_FOR_INPUT
)
6122 /* Add a death note to this insn, for an input reload. */
6124 if ((reload_when_needed
[j
] == RELOAD_OTHER
6125 || reload_when_needed
[j
] == RELOAD_FOR_INPUT
)
6126 && ! dead_or_set_p (insn
, reloadreg
))
6128 = gen_rtx (EXPR_LIST
, REG_DEAD
,
6129 reloadreg
, REG_NOTES (insn
));
6132 /* When we inherit a reload, the last marked death of the reload reg
6133 may no longer really be a death. */
6134 if (reload_reg_rtx
[j
] != 0
6135 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx
[j
]))
6136 && reload_inherited
[j
])
6138 /* Handle inheriting an output reload.
6139 Remove the death note from the output reload insn. */
6140 if (reload_spill_index
[j
] >= 0
6141 && GET_CODE (reload_in
[j
]) == REG
6142 && spill_reg_store
[reload_spill_index
[j
]] != 0
6143 && find_regno_note (spill_reg_store
[reload_spill_index
[j
]],
6144 REG_DEAD
, REGNO (reload_reg_rtx
[j
])))
6145 remove_death (REGNO (reload_reg_rtx
[j
]),
6146 spill_reg_store
[reload_spill_index
[j
]]);
6147 /* Likewise for input reloads that were inherited. */
6148 else if (reload_spill_index
[j
] >= 0
6149 && GET_CODE (reload_in
[j
]) == REG
6150 && spill_reg_store
[reload_spill_index
[j
]] == 0
6151 && reload_inheritance_insn
[j
] != 0
6152 && find_regno_note (reload_inheritance_insn
[j
], REG_DEAD
,
6153 REGNO (reload_reg_rtx
[j
])))
6154 remove_death (REGNO (reload_reg_rtx
[j
]),
6155 reload_inheritance_insn
[j
]);
6160 /* We got this register from find_equiv_reg.
6161 Search back for its last death note and get rid of it.
6162 But don't search back too far.
6163 Don't go past a place where this reg is set,
6164 since a death note before that remains valid. */
6165 for (prev
= PREV_INSN (insn
);
6166 prev
&& GET_CODE (prev
) != CODE_LABEL
;
6167 prev
= PREV_INSN (prev
))
6168 if (GET_RTX_CLASS (GET_CODE (prev
)) == 'i'
6169 && dead_or_set_p (prev
, reload_reg_rtx
[j
]))
6171 if (find_regno_note (prev
, REG_DEAD
,
6172 REGNO (reload_reg_rtx
[j
])))
6173 remove_death (REGNO (reload_reg_rtx
[j
]), prev
);
6179 /* We might have used find_equiv_reg above to choose an alternate
6180 place from which to reload. If so, and it died, we need to remove
6181 that death and move it to one of the insns we just made. */
6183 if (oldequiv_reg
!= 0
6184 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg
)))
6188 for (prev
= PREV_INSN (insn
); prev
&& GET_CODE (prev
) != CODE_LABEL
;
6189 prev
= PREV_INSN (prev
))
6190 if (GET_RTX_CLASS (GET_CODE (prev
)) == 'i'
6191 && dead_or_set_p (prev
, oldequiv_reg
))
6193 if (find_regno_note (prev
, REG_DEAD
, REGNO (oldequiv_reg
)))
6195 for (prev1
= this_reload_insn
;
6196 prev1
; prev1
= PREV_INSN (prev1
))
6197 if (GET_RTX_CLASS (GET_CODE (prev1
) == 'i')
6198 && reg_overlap_mentioned_for_reload_p (oldequiv_reg
,
6201 REG_NOTES (prev1
) = gen_rtx (EXPR_LIST
, REG_DEAD
,
6206 remove_death (REGNO (oldequiv_reg
), prev
);
6213 /* If we are reloading a register that was recently stored in with an
6214 output-reload, see if we can prove there was
6215 actually no need to store the old value in it. */
6217 if (optimize
&& reload_inherited
[j
] && reload_spill_index
[j
] >= 0
6218 && reload_in
[j
] != 0
6219 && GET_CODE (reload_in
[j
]) == REG
6221 /* There doesn't seem to be any reason to restrict this to pseudos
6222 and doing so loses in the case where we are copying from a
6223 register of the wrong class. */
6224 && REGNO (reload_in
[j
]) >= FIRST_PSEUDO_REGISTER
6226 && spill_reg_store
[reload_spill_index
[j
]] != 0
6227 /* This is unsafe if some other reload uses the same reg first. */
6228 && reload_reg_free_before_p (spill_regs
[reload_spill_index
[j
]],
6229 reload_opnum
[j
], reload_when_needed
[j
])
6230 && dead_or_set_p (insn
, reload_in
[j
])
6231 /* This is unsafe if operand occurs more than once in current
6232 insn. Perhaps some occurrences weren't reloaded. */
6233 && count_occurrences (PATTERN (insn
), reload_in
[j
]) == 1)
6234 delete_output_reload (insn
, j
,
6235 spill_reg_store
[reload_spill_index
[j
]]);
6237 /* Input-reloading is done. Now do output-reloading,
6238 storing the value from the reload-register after the main insn
6239 if reload_out[j] is nonzero.
6241 ??? At some point we need to support handling output reloads of
6242 JUMP_INSNs or insns that set cc0. */
6243 old
= reload_out
[j
];
6245 && reload_reg_rtx
[j
] != old
6246 && reload_reg_rtx
[j
] != 0)
6248 register rtx reloadreg
= reload_reg_rtx
[j
];
6249 register rtx second_reloadreg
= 0;
6251 enum machine_mode mode
;
6254 /* An output operand that dies right away does need a reload,
6255 but need not be copied from it. Show the new location in the
6257 if ((GET_CODE (old
) == REG
|| GET_CODE (old
) == SCRATCH
)
6258 && (note
= find_reg_note (insn
, REG_UNUSED
, old
)) != 0)
6260 XEXP (note
, 0) = reload_reg_rtx
[j
];
6263 else if (GET_CODE (old
) == SCRATCH
)
6264 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6265 but we don't want to make an output reload. */
6269 /* Strip off of OLD any size-increasing SUBREGs such as
6270 (SUBREG:SI foo:QI 0). */
6272 while (GET_CODE (old
) == SUBREG
&& SUBREG_WORD (old
) == 0
6273 && (GET_MODE_SIZE (GET_MODE (old
))
6274 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old
)))))
6275 old
= SUBREG_REG (old
);
6278 /* If is a JUMP_INSN, we can't support output reloads yet. */
6279 if (GET_CODE (insn
) == JUMP_INSN
)
6282 push_to_sequence (output_reload_insns
[reload_opnum
[j
]]);
6284 /* Determine the mode to reload in.
6285 See comments above (for input reloading). */
6287 mode
= GET_MODE (old
);
6288 if (mode
== VOIDmode
)
6290 /* VOIDmode should never happen for an output. */
6291 if (asm_noperands (PATTERN (insn
)) < 0)
6292 /* It's the compiler's fault. */
6293 fatal_insn ("VOIDmode on an output", insn
);
6294 error_for_asm (insn
, "output operand is constant in `asm'");
6295 /* Prevent crash--use something we know is valid. */
6297 old
= gen_rtx (REG
, mode
, REGNO (reloadreg
));
6300 if (GET_MODE (reloadreg
) != mode
)
6301 reloadreg
= gen_rtx (REG
, mode
, REGNO (reloadreg
));
6303 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6305 /* If we need two reload regs, set RELOADREG to the intermediate
6306 one, since it will be stored into OLD. We might need a secondary
6307 register only for an input reload, so check again here. */
6309 if (reload_secondary_out_reload
[j
] >= 0)
6313 if (GET_CODE (old
) == REG
&& REGNO (old
) >= FIRST_PSEUDO_REGISTER
6314 && reg_equiv_mem
[REGNO (old
)] != 0)
6315 real_old
= reg_equiv_mem
[REGNO (old
)];
6317 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class
[j
],
6321 second_reloadreg
= reloadreg
;
6322 reloadreg
= reload_reg_rtx
[reload_secondary_out_reload
[j
]];
6324 /* See if RELOADREG is to be used as a scratch register
6325 or as an intermediate register. */
6326 if (reload_secondary_out_icode
[j
] != CODE_FOR_nothing
)
6328 emit_insn ((GEN_FCN (reload_secondary_out_icode
[j
])
6329 (real_old
, second_reloadreg
, reloadreg
)));
6334 /* See if we need both a scratch and intermediate reload
6337 int secondary_reload
= reload_secondary_out_reload
[j
];
6338 enum insn_code tertiary_icode
6339 = reload_secondary_out_icode
[secondary_reload
];
6341 if (GET_MODE (reloadreg
) != mode
)
6342 reloadreg
= gen_rtx (REG
, mode
, REGNO (reloadreg
));
6344 if (tertiary_icode
!= CODE_FOR_nothing
)
6347 = reload_reg_rtx
[reload_secondary_out_reload
[secondary_reload
]];
6349 /* Copy primary reload reg to secondary reload reg.
6350 (Note that these have been swapped above, then
6351 secondary reload reg to OLD using our insn. */
6353 gen_reload (reloadreg
, second_reloadreg
,
6354 reload_opnum
[j
], reload_when_needed
[j
]);
6355 emit_insn ((GEN_FCN (tertiary_icode
)
6356 (real_old
, reloadreg
, third_reloadreg
)));
6361 /* Copy between the reload regs here and then to
6364 gen_reload (reloadreg
, second_reloadreg
,
6365 reload_opnum
[j
], reload_when_needed
[j
]);
6371 /* Output the last reload insn. */
6373 gen_reload (old
, reloadreg
, reload_opnum
[j
],
6374 reload_when_needed
[j
]);
6376 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6377 /* If final will look at death notes for this reg,
6378 put one on the last output-reload insn to use it. Similarly
6379 for any secondary register. */
6380 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg
)))
6381 for (p
= get_last_insn (); p
; p
= PREV_INSN (p
))
6382 if (GET_RTX_CLASS (GET_CODE (p
)) == 'i'
6383 && reg_overlap_mentioned_for_reload_p (reloadreg
,
6385 REG_NOTES (p
) = gen_rtx (EXPR_LIST
, REG_DEAD
,
6386 reloadreg
, REG_NOTES (p
));
6388 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6390 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg
)))
6391 for (p
= get_last_insn (); p
; p
= PREV_INSN (p
))
6392 if (GET_RTX_CLASS (GET_CODE (p
)) == 'i'
6393 && reg_overlap_mentioned_for_reload_p (second_reloadreg
,
6395 REG_NOTES (p
) = gen_rtx (EXPR_LIST
, REG_DEAD
,
6396 second_reloadreg
, REG_NOTES (p
));
6399 /* Look at all insns we emitted, just to be safe. */
6400 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
6401 if (GET_RTX_CLASS (GET_CODE (p
)) == 'i')
6403 /* If this output reload doesn't come from a spill reg,
6404 clear any memory of reloaded copies of the pseudo reg.
6405 If this output reload comes from a spill reg,
6406 reg_has_output_reload will make this do nothing. */
6407 note_stores (PATTERN (p
), forget_old_reloads_1
);
6409 if (reg_mentioned_p (reload_reg_rtx
[j
], PATTERN (p
))
6410 && reload_spill_index
[j
] >= 0)
6411 new_spill_reg_store
[reload_spill_index
[j
]] = p
;
6414 output_reload_insns
[reload_opnum
[j
]] = get_insns ();
6419 /* Now write all the insns we made for reloads in the order expected by
6420 the allocation functions. Prior to the insn being reloaded, we write
6421 the following reloads:
6423 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6425 RELOAD_OTHER reloads.
6427 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6428 the RELOAD_FOR_INPUT reload for the operand.
6430 RELOAD_FOR_OPADDR_ADDRS reloads.
6432 RELOAD_FOR_OPERAND_ADDRESS reloads.
6434 After the insn being reloaded, we write the following:
6436 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6437 the RELOAD_FOR_OUTPUT reload for that operand. */
6439 emit_insns_before (other_input_address_reload_insns
, before_insn
);
6440 emit_insns_before (other_input_reload_insns
, before_insn
);
6442 for (j
= 0; j
< reload_n_operands
; j
++)
6444 emit_insns_before (input_address_reload_insns
[j
], before_insn
);
6445 emit_insns_before (input_reload_insns
[j
], before_insn
);
6448 emit_insns_before (other_operand_reload_insns
, before_insn
);
6449 emit_insns_before (operand_reload_insns
, before_insn
);
6451 for (j
= 0; j
< reload_n_operands
; j
++)
6453 emit_insns_before (output_address_reload_insns
[j
], following_insn
);
6454 emit_insns_before (output_reload_insns
[j
], following_insn
);
6457 /* Move death notes from INSN
6458 to output-operand-address and output reload insns. */
6459 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6462 /* Loop over those insns, last ones first. */
6463 for (insn1
= PREV_INSN (following_insn
); insn1
!= insn
;
6464 insn1
= PREV_INSN (insn1
))
6465 if (GET_CODE (insn1
) == INSN
&& GET_CODE (PATTERN (insn1
)) == SET
)
6467 rtx source
= SET_SRC (PATTERN (insn1
));
6468 rtx dest
= SET_DEST (PATTERN (insn1
));
6470 /* The note we will examine next. */
6471 rtx reg_notes
= REG_NOTES (insn
);
6472 /* The place that pointed to this note. */
6473 rtx
*prev_reg_note
= ®_NOTES (insn
);
6475 /* If the note is for something used in the source of this
6476 reload insn, or in the output address, move the note. */
6479 rtx next_reg_notes
= XEXP (reg_notes
, 1);
6480 if (REG_NOTE_KIND (reg_notes
) == REG_DEAD
6481 && GET_CODE (XEXP (reg_notes
, 0)) == REG
6482 && ((GET_CODE (dest
) != REG
6483 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes
, 0),
6485 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes
, 0),
6488 *prev_reg_note
= next_reg_notes
;
6489 XEXP (reg_notes
, 1) = REG_NOTES (insn1
);
6490 REG_NOTES (insn1
) = reg_notes
;
6493 prev_reg_note
= &XEXP (reg_notes
, 1);
6495 reg_notes
= next_reg_notes
;
6501 /* For all the spill regs newly reloaded in this instruction,
6502 record what they were reloaded from, so subsequent instructions
6503 can inherit the reloads.
6505 Update spill_reg_store for the reloads of this insn.
6506 Copy the elements that were updated in the loop above. */
6508 for (j
= 0; j
< n_reloads
; j
++)
6510 register int r
= reload_order
[j
];
6511 register int i
= reload_spill_index
[r
];
6513 /* I is nonneg if this reload used one of the spill regs.
6514 If reload_reg_rtx[r] is 0, this is an optional reload
6515 that we opted to ignore.
6517 Also ignore reloads that don't reach the end of the insn,
6518 since we will eventually see the one that does. */
6520 if (i
>= 0 && reload_reg_rtx
[r
] != 0
6521 && reload_reg_reaches_end_p (spill_regs
[i
], reload_opnum
[r
],
6522 reload_when_needed
[r
]))
6524 /* First, clear out memory of what used to be in this spill reg.
6525 If consecutive registers are used, clear them all. */
6527 = HARD_REGNO_NREGS (spill_regs
[i
], GET_MODE (reload_reg_rtx
[r
]));
6530 for (k
= 0; k
< nr
; k
++)
6532 reg_reloaded_contents
[spill_reg_order
[spill_regs
[i
] + k
]] = -1;
6533 reg_reloaded_insn
[spill_reg_order
[spill_regs
[i
] + k
]] = 0;
6536 /* Maybe the spill reg contains a copy of reload_out. */
6537 if (reload_out
[r
] != 0 && GET_CODE (reload_out
[r
]) == REG
)
6539 register int nregno
= REGNO (reload_out
[r
]);
6540 int nnr
= (nregno
>= FIRST_PSEUDO_REGISTER
? 1
6541 : HARD_REGNO_NREGS (nregno
,
6542 GET_MODE (reload_reg_rtx
[r
])));
6544 spill_reg_store
[i
] = new_spill_reg_store
[i
];
6545 reg_last_reload_reg
[nregno
] = reload_reg_rtx
[r
];
6547 /* If NREGNO is a hard register, it may occupy more than
6548 one register. If it does, say what is in the
6549 rest of the registers assuming that both registers
6550 agree on how many words the object takes. If not,
6551 invalidate the subsequent registers. */
6553 if (nregno
< FIRST_PSEUDO_REGISTER
)
6554 for (k
= 1; k
< nnr
; k
++)
6555 reg_last_reload_reg
[nregno
+ k
]
6556 = (nr
== nnr
? gen_rtx (REG
,
6557 reg_raw_mode
[REGNO (reload_reg_rtx
[r
]) + k
],
6558 REGNO (reload_reg_rtx
[r
]) + k
)
6561 /* Now do the inverse operation. */
6562 for (k
= 0; k
< nr
; k
++)
6564 reg_reloaded_contents
[spill_reg_order
[spill_regs
[i
] + k
]]
6565 = (nregno
>= FIRST_PSEUDO_REGISTER
|| nr
!= nnr
? nregno
6567 reg_reloaded_insn
[spill_reg_order
[spill_regs
[i
] + k
]] = insn
;
6571 /* Maybe the spill reg contains a copy of reload_in. Only do
6572 something if there will not be an output reload for
6573 the register being reloaded. */
6574 else if (reload_out
[r
] == 0
6575 && reload_in
[r
] != 0
6576 && ((GET_CODE (reload_in
[r
]) == REG
6577 && ! reg_has_output_reload
[REGNO (reload_in
[r
])]
6578 || (GET_CODE (reload_in_reg
[r
]) == REG
6579 && ! reg_has_output_reload
[REGNO (reload_in_reg
[r
])]))))
6581 register int nregno
;
6584 if (GET_CODE (reload_in
[r
]) == REG
)
6585 nregno
= REGNO (reload_in
[r
]);
6587 nregno
= REGNO (reload_in_reg
[r
]);
6589 nnr
= (nregno
>= FIRST_PSEUDO_REGISTER
? 1
6590 : HARD_REGNO_NREGS (nregno
,
6591 GET_MODE (reload_reg_rtx
[r
])));
6593 reg_last_reload_reg
[nregno
] = reload_reg_rtx
[r
];
6595 if (nregno
< FIRST_PSEUDO_REGISTER
)
6596 for (k
= 1; k
< nnr
; k
++)
6597 reg_last_reload_reg
[nregno
+ k
]
6598 = (nr
== nnr
? gen_rtx (REG
,
6599 reg_raw_mode
[REGNO (reload_reg_rtx
[r
]) + k
],
6600 REGNO (reload_reg_rtx
[r
]) + k
)
6603 /* Unless we inherited this reload, show we haven't
6604 recently done a store. */
6605 if (! reload_inherited
[r
])
6606 spill_reg_store
[i
] = 0;
6608 for (k
= 0; k
< nr
; k
++)
6610 reg_reloaded_contents
[spill_reg_order
[spill_regs
[i
] + k
]]
6611 = (nregno
>= FIRST_PSEUDO_REGISTER
|| nr
!= nnr
? nregno
6613 reg_reloaded_insn
[spill_reg_order
[spill_regs
[i
] + k
]]
6619 /* The following if-statement was #if 0'd in 1.34 (or before...).
6620 It's reenabled in 1.35 because supposedly nothing else
6621 deals with this problem. */
6623 /* If a register gets output-reloaded from a non-spill register,
6624 that invalidates any previous reloaded copy of it.
6625 But forget_old_reloads_1 won't get to see it, because
6626 it thinks only about the original insn. So invalidate it here. */
6627 if (i
< 0 && reload_out
[r
] != 0 && GET_CODE (reload_out
[r
]) == REG
)
6629 register int nregno
= REGNO (reload_out
[r
]);
6630 int num_regs
= HARD_REGNO_NREGS (nregno
, GET_MODE (reload_out
[r
]));
6632 while (num_regs
-- > 0)
6633 reg_last_reload_reg
[nregno
+ num_regs
] = 0;
6638 /* Emit code to perform a reload from IN (which may be a reload register) to
6639 OUT (which may also be a reload register). IN or OUT is from operand
6640 OPNUM with reload type TYPE.
6642 Returns first insn emitted. */
6645 gen_reload (out
, in
, opnum
, type
)
6649 enum reload_type type
;
6651 rtx last
= get_last_insn ();
6653 /* How to do this reload can get quite tricky. Normally, we are being
6654 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6655 register that didn't get a hard register. In that case we can just
6656 call emit_move_insn.
6658 We can also be asked to reload a PLUS that adds a register or a MEM to
6659 another register, constant or MEM. This can occur during frame pointer
6660 elimination and while reloading addresses. This case is handled by
6661 trying to emit a single insn to perform the add. If it is not valid,
6662 we use a two insn sequence.
6664 Finally, we could be called to handle an 'o' constraint by putting
6665 an address into a register. In that case, we first try to do this
6666 with a named pattern of "reload_load_address". If no such pattern
6667 exists, we just emit a SET insn and hope for the best (it will normally
6668 be valid on machines that use 'o').
6670 This entire process is made complex because reload will never
6671 process the insns we generate here and so we must ensure that
6672 they will fit their constraints and also by the fact that parts of
6673 IN might be being reloaded separately and replaced with spill registers.
6674 Because of this, we are, in some sense, just guessing the right approach
6675 here. The one listed above seems to work.
6677 ??? At some point, this whole thing needs to be rethought. */
6679 if (GET_CODE (in
) == PLUS
6680 && (GET_CODE (XEXP (in
, 0)) == REG
6681 || GET_CODE (XEXP (in
, 0)) == MEM
)
6682 && (GET_CODE (XEXP (in
, 1)) == REG
6683 || CONSTANT_P (XEXP (in
, 1))
6684 || GET_CODE (XEXP (in
, 1)) == MEM
))
6686 /* We need to compute the sum of a register or a MEM and another
6687 register, constant, or MEM, and put it into the reload
6688 register. The best possible way of doing this is if the machine
6689 has a three-operand ADD insn that accepts the required operands.
6691 The simplest approach is to try to generate such an insn and see if it
6692 is recognized and matches its constraints. If so, it can be used.
6694 It might be better not to actually emit the insn unless it is valid,
6695 but we need to pass the insn as an operand to `recog' and
6696 `insn_extract' and it is simpler to emit and then delete the insn if
6697 not valid than to dummy things up. */
6699 rtx op0
, op1
, tem
, insn
;
6702 op0
= find_replacement (&XEXP (in
, 0));
6703 op1
= find_replacement (&XEXP (in
, 1));
6705 /* Since constraint checking is strict, commutativity won't be
6706 checked, so we need to do that here to avoid spurious failure
6707 if the add instruction is two-address and the second operand
6708 of the add is the same as the reload reg, which is frequently
6709 the case. If the insn would be A = B + A, rearrange it so
6710 it will be A = A + B as constrain_operands expects. */
6712 if (GET_CODE (XEXP (in
, 1)) == REG
6713 && REGNO (out
) == REGNO (XEXP (in
, 1)))
6714 tem
= op0
, op0
= op1
, op1
= tem
;
6716 if (op0
!= XEXP (in
, 0) || op1
!= XEXP (in
, 1))
6717 in
= gen_rtx (PLUS
, GET_MODE (in
), op0
, op1
);
6719 insn
= emit_insn (gen_rtx (SET
, VOIDmode
, out
, in
));
6720 code
= recog_memoized (insn
);
6724 insn_extract (insn
);
6725 /* We want constrain operands to treat this insn strictly in
6726 its validity determination, i.e., the way it would after reload
6728 if (constrain_operands (code
, 1))
6732 delete_insns_since (last
);
6734 /* If that failed, we must use a conservative two-insn sequence.
6735 use move to copy constant, MEM, or pseudo register to the reload
6736 register since "move" will be able to handle an arbitrary operand,
6737 unlike add which can't, in general. Then add the registers.
6739 If there is another way to do this for a specific machine, a
6740 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6743 if (CONSTANT_P (op1
) || GET_CODE (op1
) == MEM
6744 || (GET_CODE (op1
) == REG
6745 && REGNO (op1
) >= FIRST_PSEUDO_REGISTER
))
6746 tem
= op0
, op0
= op1
, op1
= tem
;
6748 emit_insn (gen_move_insn (out
, op0
));
6750 /* If OP0 and OP1 are the same, we can use OUT for OP1.
6751 This fixes a problem on the 32K where the stack pointer cannot
6752 be used as an operand of an add insn. */
6754 if (rtx_equal_p (op0
, op1
))
6757 insn
= emit_insn (gen_add2_insn (out
, op1
));
6759 /* If that failed, copy the address register to the reload register.
6760 Then add the constant to the reload register. */
6762 code
= recog_memoized (insn
);
6766 insn_extract (insn
);
6767 /* We want constrain operands to treat this insn strictly in
6768 its validity determination, i.e., the way it would after reload
6770 if (constrain_operands (code
, 1))
6774 delete_insns_since (last
);
6776 emit_insn (gen_move_insn (out
, op1
));
6777 emit_insn (gen_add2_insn (out
, op0
));
6780 #ifdef SECONDARY_MEMORY_NEEDED
6781 /* If we need a memory location to do the move, do it that way. */
6782 else if (GET_CODE (in
) == REG
&& REGNO (in
) < FIRST_PSEUDO_REGISTER
6783 && GET_CODE (out
) == REG
&& REGNO (out
) < FIRST_PSEUDO_REGISTER
6784 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in
)),
6785 REGNO_REG_CLASS (REGNO (out
)),
6788 /* Get the memory to use and rewrite both registers to its mode. */
6789 rtx loc
= get_secondary_mem (in
, GET_MODE (out
), opnum
, type
);
6791 if (GET_MODE (loc
) != GET_MODE (out
))
6792 out
= gen_rtx (REG
, GET_MODE (loc
), REGNO (out
));
6794 if (GET_MODE (loc
) != GET_MODE (in
))
6795 in
= gen_rtx (REG
, GET_MODE (loc
), REGNO (in
));
6797 emit_insn (gen_move_insn (loc
, in
));
6798 emit_insn (gen_move_insn (out
, loc
));
6802 /* If IN is a simple operand, use gen_move_insn. */
6803 else if (GET_RTX_CLASS (GET_CODE (in
)) == 'o' || GET_CODE (in
) == SUBREG
)
6804 emit_insn (gen_move_insn (out
, in
));
6806 #ifdef HAVE_reload_load_address
6807 else if (HAVE_reload_load_address
)
6808 emit_insn (gen_reload_load_address (out
, in
));
6811 /* Otherwise, just write (set OUT IN) and hope for the best. */
6813 emit_insn (gen_rtx (SET
, VOIDmode
, out
, in
));
6815 /* Return the first insn emitted.
6816 We can not just return get_last_insn, because there may have
6817 been multiple instructions emitted. Also note that gen_move_insn may
6818 emit more than one insn itself, so we can not assume that there is one
6819 insn emitted per emit_insn_before call. */
6821 return last
? NEXT_INSN (last
) : get_insns ();
6824 /* Delete a previously made output-reload
6825 whose result we now believe is not needed.
6826 First we double-check.
6828 INSN is the insn now being processed.
6829 OUTPUT_RELOAD_INSN is the insn of the output reload.
6830 J is the reload-number for this insn. */
6833 delete_output_reload (insn
, j
, output_reload_insn
)
6836 rtx output_reload_insn
;
6840 /* Get the raw pseudo-register referred to. */
6842 rtx reg
= reload_in
[j
];
6843 while (GET_CODE (reg
) == SUBREG
)
6844 reg
= SUBREG_REG (reg
);
6846 /* If the pseudo-reg we are reloading is no longer referenced
6847 anywhere between the store into it and here,
6848 and no jumps or labels intervene, then the value can get
6849 here through the reload reg alone.
6850 Otherwise, give up--return. */
6851 for (i1
= NEXT_INSN (output_reload_insn
);
6852 i1
!= insn
; i1
= NEXT_INSN (i1
))
6854 if (GET_CODE (i1
) == CODE_LABEL
|| GET_CODE (i1
) == JUMP_INSN
)
6856 if ((GET_CODE (i1
) == INSN
|| GET_CODE (i1
) == CALL_INSN
)
6857 && reg_mentioned_p (reg
, PATTERN (i1
)))
6861 if (cannot_omit_stores
[REGNO (reg
)])
6864 /* If this insn will store in the pseudo again,
6865 the previous store can be removed. */
6866 if (reload_out
[j
] == reload_in
[j
])
6867 delete_insn (output_reload_insn
);
6869 /* See if the pseudo reg has been completely replaced
6870 with reload regs. If so, delete the store insn
6871 and forget we had a stack slot for the pseudo. */
6872 else if (reg_n_deaths
[REGNO (reg
)] == 1
6873 && reg_basic_block
[REGNO (reg
)] >= 0
6874 && find_regno_note (insn
, REG_DEAD
, REGNO (reg
)))
6878 /* We know that it was used only between here
6879 and the beginning of the current basic block.
6880 (We also know that the last use before INSN was
6881 the output reload we are thinking of deleting, but never mind that.)
6882 Search that range; see if any ref remains. */
6883 for (i2
= PREV_INSN (insn
); i2
; i2
= PREV_INSN (i2
))
6885 rtx set
= single_set (i2
);
6887 /* Uses which just store in the pseudo don't count,
6888 since if they are the only uses, they are dead. */
6889 if (set
!= 0 && SET_DEST (set
) == reg
)
6891 if (GET_CODE (i2
) == CODE_LABEL
6892 || GET_CODE (i2
) == JUMP_INSN
)
6894 if ((GET_CODE (i2
) == INSN
|| GET_CODE (i2
) == CALL_INSN
)
6895 && reg_mentioned_p (reg
, PATTERN (i2
)))
6896 /* Some other ref remains;
6897 we can't do anything. */
6901 /* Delete the now-dead stores into this pseudo. */
6902 for (i2
= PREV_INSN (insn
); i2
; i2
= PREV_INSN (i2
))
6904 rtx set
= single_set (i2
);
6906 if (set
!= 0 && SET_DEST (set
) == reg
)
6908 if (GET_CODE (i2
) == CODE_LABEL
6909 || GET_CODE (i2
) == JUMP_INSN
)
6913 /* For the debugging info,
6914 say the pseudo lives in this reload reg. */
6915 reg_renumber
[REGNO (reg
)] = REGNO (reload_reg_rtx
[j
]);
6916 alter_reg (REGNO (reg
), -1);
6920 /* Output reload-insns to reload VALUE into RELOADREG.
6921 VALUE is an autoincrement or autodecrement RTX whose operand
6922 is a register or memory location;
6923 so reloading involves incrementing that location.
6925 INC_AMOUNT is the number to increment or decrement by (always positive).
6926 This cannot be deduced from VALUE. */
6929 inc_for_reload (reloadreg
, value
, inc_amount
)
6934 /* REG or MEM to be copied and incremented. */
6935 rtx incloc
= XEXP (value
, 0);
6936 /* Nonzero if increment after copying. */
6937 int post
= (GET_CODE (value
) == POST_DEC
|| GET_CODE (value
) == POST_INC
);
6943 /* No hard register is equivalent to this register after
6944 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6945 we could inc/dec that register as well (maybe even using it for
6946 the source), but I'm not sure it's worth worrying about. */
6947 if (GET_CODE (incloc
) == REG
)
6948 reg_last_reload_reg
[REGNO (incloc
)] = 0;
6950 if (GET_CODE (value
) == PRE_DEC
|| GET_CODE (value
) == POST_DEC
)
6951 inc_amount
= - inc_amount
;
6953 inc
= GEN_INT (inc_amount
);
6955 /* If this is post-increment, first copy the location to the reload reg. */
6957 emit_insn (gen_move_insn (reloadreg
, incloc
));
6959 /* See if we can directly increment INCLOC. Use a method similar to that
6962 last
= get_last_insn ();
6963 add_insn
= emit_insn (gen_rtx (SET
, VOIDmode
, incloc
,
6964 gen_rtx (PLUS
, GET_MODE (incloc
),
6967 code
= recog_memoized (add_insn
);
6970 insn_extract (add_insn
);
6971 if (constrain_operands (code
, 1))
6973 /* If this is a pre-increment and we have incremented the value
6974 where it lives, copy the incremented value to RELOADREG to
6975 be used as an address. */
6978 emit_insn (gen_move_insn (reloadreg
, incloc
));
6984 delete_insns_since (last
);
6986 /* If couldn't do the increment directly, must increment in RELOADREG.
6987 The way we do this depends on whether this is pre- or post-increment.
6988 For pre-increment, copy INCLOC to the reload register, increment it
6989 there, then save back. */
6993 emit_insn (gen_move_insn (reloadreg
, incloc
));
6994 emit_insn (gen_add2_insn (reloadreg
, inc
));
6995 emit_insn (gen_move_insn (incloc
, reloadreg
));
7000 Because this might be a jump insn or a compare, and because RELOADREG
7001 may not be available after the insn in an input reload, we must do
7002 the incrementation before the insn being reloaded for.
7004 We have already copied INCLOC to RELOADREG. Increment the copy in
7005 RELOADREG, save that back, then decrement RELOADREG so it has
7006 the original value. */
7008 emit_insn (gen_add2_insn (reloadreg
, inc
));
7009 emit_insn (gen_move_insn (incloc
, reloadreg
));
7010 emit_insn (gen_add2_insn (reloadreg
, GEN_INT (-inc_amount
)));
7016 /* Return 1 if we are certain that the constraint-string STRING allows
7017 the hard register REG. Return 0 if we can't be sure of this. */
7020 constraint_accepts_reg_p (string
, reg
)
7025 int regno
= true_regnum (reg
);
7028 /* Initialize for first alternative. */
7030 /* Check that each alternative contains `g' or `r'. */
7032 switch (c
= *string
++)
7035 /* If an alternative lacks `g' or `r', we lose. */
7038 /* If an alternative lacks `g' or `r', we lose. */
7041 /* Initialize for next alternative. */
7046 /* Any general reg wins for this alternative. */
7047 if (TEST_HARD_REG_BIT (reg_class_contents
[(int) GENERAL_REGS
], regno
))
7051 /* Any reg in specified class wins for this alternative. */
7053 enum reg_class
class = REG_CLASS_FROM_LETTER (c
);
7055 if (TEST_HARD_REG_BIT (reg_class_contents
[(int) class], regno
))
7061 /* Return the number of places FIND appears within X, but don't count
7062 an occurrence if some SET_DEST is FIND. */
7065 count_occurrences (x
, find
)
7066 register rtx x
, find
;
7069 register enum rtx_code code
;
7070 register char *format_ptr
;
7078 code
= GET_CODE (x
);
7093 if (SET_DEST (x
) == find
)
7094 return count_occurrences (SET_SRC (x
), find
);
7098 format_ptr
= GET_RTX_FORMAT (code
);
7101 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
7103 switch (*format_ptr
++)
7106 count
+= count_occurrences (XEXP (x
, i
), find
);
7110 if (XVEC (x
, i
) != NULL
)
7112 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
7113 count
+= count_occurrences (XVECEXP (x
, i
, j
), find
);