1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
26 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "insn-flags.h"
31 #include "insn-codes.h"
37 #include "basic-block.h"
42 /* This file contains the reload pass of the compiler, which is
43 run after register allocation has been done. It checks that
44 each insn is valid (operands required to be in registers really
45 are in registers of the proper class) and fixes up invalid ones
46 by copying values temporarily into registers for the insns
49 The results of register allocation are described by the vector
50 reg_renumber; the insns still contain pseudo regs, but reg_renumber
51 can be used to find which hard reg, if any, a pseudo reg is in.
53 The technique we always use is to free up a few hard regs that are
54 called ``reload regs'', and for each place where a pseudo reg
55 must be in a hard reg, copy it temporarily into one of the reload regs.
57 All the pseudos that were formerly allocated to the hard regs that
58 are now in use as reload regs must be ``spilled''. This means
59 that they go to other hard regs, or to stack slots if no other
60 available hard regs can be found. Spilling can invalidate more
61 insns, requiring additional need for reloads, so we must keep checking
62 until the process stabilizes.
64 For machines with different classes of registers, we must keep track
65 of the register class needed for each reload, and make sure that
66 we allocate enough reload registers of each class.
68 The file reload.c contains the code that checks one insn for
69 validity and reports the reloads that it needs. This file
70 is in charge of scanning the entire rtl code, accumulating the
71 reload needs, spilling, assigning reload registers to use for
72 fixing up each insn, and generating the new insns to copy values
73 into the reload registers. */
76 #ifndef REGISTER_MOVE_COST
77 #define REGISTER_MOVE_COST(x, y) 2
80 /* During reload_as_needed, element N contains a REG rtx for the hard reg
81 into which reg N has been reloaded (perhaps for a previous insn). */
82 static rtx
*reg_last_reload_reg
;
84 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
85 for an output reload that stores into reg N. */
86 static char *reg_has_output_reload
;
88 /* Indicates which hard regs are reload-registers for an output reload
89 in the current insn. */
90 static HARD_REG_SET reg_is_output_reload
;
92 /* Element N is the constant value to which pseudo reg N is equivalent,
93 or zero if pseudo reg N is not equivalent to a constant.
94 find_reloads looks at this in order to replace pseudo reg N
95 with the constant it stands for. */
96 rtx
*reg_equiv_constant
;
98 /* Element N is a memory location to which pseudo reg N is equivalent,
99 prior to any register elimination (such as frame pointer to stack
100 pointer). Depending on whether or not it is a valid address, this value
101 is transferred to either reg_equiv_address or reg_equiv_mem. */
102 rtx
*reg_equiv_memory_loc
;
104 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
105 This is used when the address is not valid as a memory address
106 (because its displacement is too big for the machine.) */
107 rtx
*reg_equiv_address
;
109 /* Element N is the memory slot to which pseudo reg N is equivalent,
110 or zero if pseudo reg N is not equivalent to a memory slot. */
113 /* Widest width in which each pseudo reg is referred to (via subreg). */
114 static int *reg_max_ref_width
;
116 /* Element N is the insn that initialized reg N from its equivalent
117 constant or memory slot. */
118 static rtx
*reg_equiv_init
;
120 /* During reload_as_needed, element N contains the last pseudo regno reloaded
121 into hard register N. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124 static int reg_reloaded_contents
[FIRST_PSEUDO_REGISTER
];
126 /* During reload_as_needed, element N contains the insn for which
127 hard register N was last used. Its contents are significant only
128 when reg_reloaded_valid is set for this register. */
129 static rtx reg_reloaded_insn
[FIRST_PSEUDO_REGISTER
];
131 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid */
132 static HARD_REG_SET reg_reloaded_valid
;
133 /* Indicate if the register was dead at the end of the reload.
134 This is only valid if reg_reloaded_contents is set and valid. */
135 static HARD_REG_SET reg_reloaded_dead
;
137 /* Number of spill-regs so far; number of valid elements of spill_regs. */
140 /* In parallel with spill_regs, contains REG rtx's for those regs.
141 Holds the last rtx used for any given reg, or 0 if it has never
142 been used for spilling yet. This rtx is reused, provided it has
144 static rtx spill_reg_rtx
[FIRST_PSEUDO_REGISTER
];
146 /* In parallel with spill_regs, contains nonzero for a spill reg
147 that was stored after the last time it was used.
148 The precise value is the insn generated to do the store. */
149 static rtx spill_reg_store
[FIRST_PSEUDO_REGISTER
];
151 /* This table is the inverse mapping of spill_regs:
152 indexed by hard reg number,
153 it contains the position of that reg in spill_regs,
154 or -1 for something that is not in spill_regs. */
155 static short spill_reg_order
[FIRST_PSEUDO_REGISTER
];
157 /* This reg set indicates registers that may not be used for retrying global
158 allocation. The registers that may not be used include all spill registers
159 and the frame pointer (if we are using one). */
160 HARD_REG_SET forbidden_regs
;
162 /* This reg set indicates registers that are not good for spill registers.
163 They will not be used to complete groups of spill registers. This includes
164 all fixed registers, registers that may be eliminated, and, if
165 SMALL_REGISTER_CLASSES is zero, registers explicitly used in the rtl.
167 (spill_reg_order prevents these registers from being used to start a
169 static HARD_REG_SET bad_spill_regs
;
171 /* Describes order of use of registers for reloading
172 of spilled pseudo-registers. `spills' is the number of
173 elements that are actually valid; new ones are added at the end. */
174 static short spill_regs
[FIRST_PSEUDO_REGISTER
];
176 /* This reg set indicates those registers that have been used a spill
177 registers. This information is used in reorg.c, to help figure out
178 what registers are live at any point. It is assumed that all spill_regs
179 are dead at every CODE_LABEL. */
181 HARD_REG_SET used_spill_regs
;
183 /* Index of last register assigned as a spill register. We allocate in
184 a round-robin fashion. */
186 static int last_spill_reg
;
188 /* Describes order of preference for putting regs into spill_regs.
189 Contains the numbers of all the hard regs, in order most preferred first.
190 This order is different for each function.
191 It is set up by order_regs_for_reload.
192 Empty elements at the end contain -1. */
193 static short potential_reload_regs
[FIRST_PSEUDO_REGISTER
];
195 /* 1 for a hard register that appears explicitly in the rtl
196 (for example, function value registers, special registers
197 used by insns, structure value pointer registers). */
198 static char regs_explicitly_used
[FIRST_PSEUDO_REGISTER
];
200 /* Indicates if a register was counted against the need for
201 groups. 0 means it can count against max_nongroup instead. */
202 static HARD_REG_SET counted_for_groups
;
204 /* Indicates if a register was counted against the need for
205 non-groups. 0 means it can become part of a new group.
206 During choose_reload_regs, 1 here means don't use this reg
207 as part of a group, even if it seems to be otherwise ok. */
208 static HARD_REG_SET counted_for_nongroups
;
210 /* Nonzero if indirect addressing is supported on the machine; this means
211 that spilling (REG n) does not require reloading it into a register in
212 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
213 value indicates the level of indirect addressing supported, e.g., two
214 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
217 static char spill_indirect_levels
;
219 /* Nonzero if indirect addressing is supported when the innermost MEM is
220 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
221 which these are valid is the same as spill_indirect_levels, above. */
223 char indirect_symref_ok
;
225 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
227 char double_reg_address_ok
;
229 /* Record the stack slot for each spilled hard register. */
231 static rtx spill_stack_slot
[FIRST_PSEUDO_REGISTER
];
233 /* Width allocated so far for that stack slot. */
235 static int spill_stack_slot_width
[FIRST_PSEUDO_REGISTER
];
237 /* Indexed by register class and basic block number, nonzero if there is
238 any need for a spill register of that class in that basic block.
239 The pointer is 0 if we did stupid allocation and don't know
240 the structure of basic blocks. */
242 char *basic_block_needs
[N_REG_CLASSES
];
244 /* First uid used by insns created by reload in this function.
245 Used in find_equiv_reg. */
246 int reload_first_uid
;
248 /* Flag set by local-alloc or global-alloc if anything is live in
249 a call-clobbered reg across calls. */
251 int caller_save_needed
;
253 /* The register class to use for a base register when reloading an
254 address. This is normally BASE_REG_CLASS, but it may be different
255 when using SMALL_REGISTER_CLASSES and passing parameters in
257 enum reg_class reload_address_base_reg_class
;
259 /* The register class to use for an index register when reloading an
260 address. This is normally INDEX_REG_CLASS, but it may be different
261 when using SMALL_REGISTER_CLASSES and passing parameters in
263 enum reg_class reload_address_index_reg_class
;
265 /* Set to 1 while reload_as_needed is operating.
266 Required by some machines to handle any generated moves differently. */
268 int reload_in_progress
= 0;
270 /* These arrays record the insn_code of insns that may be needed to
271 perform input and output reloads of special objects. They provide a
272 place to pass a scratch register. */
274 enum insn_code reload_in_optab
[NUM_MACHINE_MODES
];
275 enum insn_code reload_out_optab
[NUM_MACHINE_MODES
];
277 /* This obstack is used for allocation of rtl during register elimination.
278 The allocated storage can be freed once find_reloads has processed the
281 struct obstack reload_obstack
;
282 char *reload_firstobj
;
284 #define obstack_chunk_alloc xmalloc
285 #define obstack_chunk_free free
287 /* List of labels that must never be deleted. */
288 extern rtx forced_labels
;
290 /* Allocation number table from global register allocation. */
291 extern int *reg_allocno
;
293 /* This structure is used to record information about register eliminations.
294 Each array entry describes one possible way of eliminating a register
295 in favor of another. If there is more than one way of eliminating a
296 particular register, the most preferred should be specified first. */
298 static struct elim_table
300 int from
; /* Register number to be eliminated. */
301 int to
; /* Register number used as replacement. */
302 int initial_offset
; /* Initial difference between values. */
303 int can_eliminate
; /* Non-zero if this elimination can be done. */
304 int can_eliminate_previous
; /* Value of CAN_ELIMINATE in previous scan over
305 insns made by reload. */
306 int offset
; /* Current offset between the two regs. */
307 int max_offset
; /* Maximum offset between the two regs. */
308 int previous_offset
; /* Offset at end of previous insn. */
309 int ref_outside_mem
; /* "to" has been referenced outside a MEM. */
310 rtx from_rtx
; /* REG rtx for the register to be eliminated.
311 We cannot simply compare the number since
312 we might then spuriously replace a hard
313 register corresponding to a pseudo
314 assigned to the reg to be eliminated. */
315 rtx to_rtx
; /* REG rtx for the replacement. */
318 /* If a set of eliminable registers was specified, define the table from it.
319 Otherwise, default to the normal case of the frame pointer being
320 replaced by the stack pointer. */
322 #ifdef ELIMINABLE_REGS
325 {{ FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
}};
328 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
330 /* Record the number of pending eliminations that have an offset not equal
331 to their initial offset. If non-zero, we use a new copy of each
332 replacement result in any insns encountered. */
333 static int num_not_at_initial_offset
;
335 /* Count the number of registers that we may be able to eliminate. */
336 static int num_eliminable
;
338 /* For each label, we record the offset of each elimination. If we reach
339 a label by more than one path and an offset differs, we cannot do the
340 elimination. This information is indexed by the number of the label.
341 The first table is an array of flags that records whether we have yet
342 encountered a label and the second table is an array of arrays, one
343 entry in the latter array for each elimination. */
345 static char *offsets_known_at
;
346 static int (*offsets_at
)[NUM_ELIMINABLE_REGS
];
348 /* Number of labels in the current function. */
350 static int num_labels
;
352 struct hard_reg_n_uses
{ int regno
; int uses
; };
354 static int possible_group_p
PROTO((int, int *));
355 static void count_possible_groups
PROTO((int *, enum machine_mode
*,
357 static int modes_equiv_for_class_p
PROTO((enum machine_mode
,
360 static void spill_failure
PROTO((rtx
));
361 static int new_spill_reg
PROTO((int, int, int *, int *, int,
363 static void delete_dead_insn
PROTO((rtx
));
364 static void alter_reg
PROTO((int, int));
365 static void mark_scratch_live
PROTO((rtx
));
366 static void set_label_offsets
PROTO((rtx
, rtx
, int));
367 static int eliminate_regs_in_insn
PROTO((rtx
, int));
368 static void mark_not_eliminable
PROTO((rtx
, rtx
));
369 static int spill_hard_reg
PROTO((int, int, FILE *, int));
370 static void scan_paradoxical_subregs
PROTO((rtx
));
371 static int hard_reg_use_compare
PROTO((const GENERIC_PTR
, const GENERIC_PTR
));
372 static void order_regs_for_reload
PROTO((int));
373 static int compare_spill_regs
PROTO((const GENERIC_PTR
, const GENERIC_PTR
));
374 static void reload_as_needed
PROTO((rtx
, int));
375 static void forget_old_reloads_1
PROTO((rtx
, rtx
));
376 static int reload_reg_class_lower
PROTO((const GENERIC_PTR
, const GENERIC_PTR
));
377 static void mark_reload_reg_in_use
PROTO((int, int, enum reload_type
,
379 static void clear_reload_reg_in_use
PROTO((int, int, enum reload_type
,
381 static int reload_reg_free_p
PROTO((int, int, enum reload_type
));
382 static int reload_reg_free_before_p
PROTO((int, int, enum reload_type
));
383 static int reload_reg_free_for_value_p
PROTO((int, int, enum reload_type
, rtx
, rtx
, int));
384 static int reload_reg_reaches_end_p
PROTO((int, int, enum reload_type
));
385 static int allocate_reload_reg
PROTO((int, rtx
, int, int));
386 static void choose_reload_regs
PROTO((rtx
, rtx
));
387 static void merge_assigned_reloads
PROTO((rtx
));
388 static void emit_reload_insns
PROTO((rtx
));
389 static void delete_output_reload
PROTO((rtx
, int, rtx
));
390 static void inc_for_reload
PROTO((rtx
, rtx
, int));
391 static int constraint_accepts_reg_p
PROTO((char *, rtx
));
392 static void reload_cse_invalidate_regno
PROTO((int, enum machine_mode
, int));
393 static int reload_cse_mem_conflict_p
PROTO((rtx
, rtx
));
394 static void reload_cse_invalidate_mem
PROTO((rtx
));
395 static void reload_cse_invalidate_rtx
PROTO((rtx
, rtx
));
396 static int reload_cse_regno_equal_p
PROTO((int, rtx
, enum machine_mode
));
397 static int reload_cse_noop_set_p
PROTO((rtx
, rtx
));
398 static int reload_cse_simplify_set
PROTO((rtx
, rtx
));
399 static int reload_cse_simplify_operands
PROTO((rtx
));
400 static void reload_cse_check_clobber
PROTO((rtx
, rtx
));
401 static void reload_cse_record_set
PROTO((rtx
, rtx
));
402 static void reload_cse_delete_death_notes
PROTO((rtx
));
403 static void reload_cse_no_longer_dead
PROTO((int, enum machine_mode
));
405 /* Initialize the reload pass once per compilation. */
412 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
413 Set spill_indirect_levels to the number of levels such addressing is
414 permitted, zero if it is not permitted at all. */
417 = gen_rtx_MEM (Pmode
,
419 gen_rtx_REG (Pmode
, LAST_VIRTUAL_REGISTER
+ 1),
421 spill_indirect_levels
= 0;
423 while (memory_address_p (QImode
, tem
))
425 spill_indirect_levels
++;
426 tem
= gen_rtx_MEM (Pmode
, tem
);
429 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
431 tem
= gen_rtx_MEM (Pmode
, gen_rtx_SYMBOL_REF (Pmode
, "foo"));
432 indirect_symref_ok
= memory_address_p (QImode
, tem
);
434 /* See if reg+reg is a valid (and offsettable) address. */
436 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
438 tem
= gen_rtx_PLUS (Pmode
,
439 gen_rtx_REG (Pmode
, HARD_FRAME_POINTER_REGNUM
),
440 gen_rtx_REG (Pmode
, i
));
441 /* This way, we make sure that reg+reg is an offsettable address. */
442 tem
= plus_constant (tem
, 4);
444 if (memory_address_p (QImode
, tem
))
446 double_reg_address_ok
= 1;
451 /* Initialize obstack for our rtl allocation. */
452 gcc_obstack_init (&reload_obstack
);
453 reload_firstobj
= (char *) obstack_alloc (&reload_obstack
, 0);
455 /* Decide which register class should be used when reloading
456 addresses. If we are using SMALL_REGISTER_CLASSES, and any
457 parameters are passed in registers, then we do not want to use
458 those registers when reloading an address. Otherwise, if a
459 function argument needs a reload, we may wind up clobbering
460 another argument to the function which was already computed. If
461 we find a subset class which simply avoids those registers, we
462 use it instead. ??? It would be better to only use the
463 restricted class when we actually are loading function arguments,
464 but that is hard to determine. */
465 reload_address_base_reg_class
= BASE_REG_CLASS
;
466 reload_address_index_reg_class
= INDEX_REG_CLASS
;
467 if (SMALL_REGISTER_CLASSES
)
470 HARD_REG_SET base
, index
;
473 COPY_HARD_REG_SET (base
, reg_class_contents
[BASE_REG_CLASS
]);
474 COPY_HARD_REG_SET (index
, reg_class_contents
[INDEX_REG_CLASS
]);
475 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
477 if (FUNCTION_ARG_REGNO_P (regno
))
479 CLEAR_HARD_REG_BIT (base
, regno
);
480 CLEAR_HARD_REG_BIT (index
, regno
);
484 GO_IF_HARD_REG_EQUAL (base
, reg_class_contents
[BASE_REG_CLASS
],
486 for (p
= reg_class_subclasses
[BASE_REG_CLASS
];
487 *p
!= LIM_REG_CLASSES
;
490 GO_IF_HARD_REG_EQUAL (base
, reg_class_contents
[*p
], usebase
);
493 reload_address_base_reg_class
= *p
;
498 GO_IF_HARD_REG_EQUAL (index
, reg_class_contents
[INDEX_REG_CLASS
],
500 for (p
= reg_class_subclasses
[INDEX_REG_CLASS
];
501 *p
!= LIM_REG_CLASSES
;
504 GO_IF_HARD_REG_EQUAL (index
, reg_class_contents
[*p
], useindex
);
507 reload_address_index_reg_class
= *p
;
514 /* Main entry point for the reload pass.
516 FIRST is the first insn of the function being compiled.
518 GLOBAL nonzero means we were called from global_alloc
519 and should attempt to reallocate any pseudoregs that we
520 displace from hard regs we will use for reloads.
521 If GLOBAL is zero, we do not have enough information to do that,
522 so any pseudo reg that is spilled must go to the stack.
524 DUMPFILE is the global-reg debugging dump file stream, or 0.
525 If it is nonzero, messages are written to it to describe
526 which registers are seized as reload regs, which pseudo regs
527 are spilled from them, and where the pseudo regs are reallocated to.
529 Return value is nonzero if reload failed
530 and we must not do any more for this function. */
533 reload (first
, global
, dumpfile
)
539 register int i
, j
, k
;
541 register struct elim_table
*ep
;
543 /* The two pointers used to track the true location of the memory used
544 for label offsets. */
545 char *real_known_ptr
= NULL_PTR
;
546 int (*real_at_ptr
)[NUM_ELIMINABLE_REGS
];
548 int something_changed
;
549 int something_needs_reloads
;
550 int something_needs_elimination
;
551 int new_basic_block_needs
;
552 enum reg_class caller_save_spill_class
= NO_REGS
;
553 int caller_save_group_size
= 1;
555 /* Nonzero means we couldn't get enough spill regs. */
558 /* The basic block number currently being processed for INSN. */
561 /* Make sure even insns with volatile mem refs are recognizable. */
564 /* Enable find_equiv_reg to distinguish insns made by reload. */
565 reload_first_uid
= get_max_uid ();
567 for (i
= 0; i
< N_REG_CLASSES
; i
++)
568 basic_block_needs
[i
] = 0;
570 #ifdef SECONDARY_MEMORY_NEEDED
571 /* Initialize the secondary memory table. */
572 clear_secondary_mem ();
575 /* Remember which hard regs appear explicitly
576 before we merge into `regs_ever_live' the ones in which
577 pseudo regs have been allocated. */
578 bcopy (regs_ever_live
, regs_explicitly_used
, sizeof regs_ever_live
);
580 /* We don't have a stack slot for any spill reg yet. */
581 bzero ((char *) spill_stack_slot
, sizeof spill_stack_slot
);
582 bzero ((char *) spill_stack_slot_width
, sizeof spill_stack_slot_width
);
584 /* Initialize the save area information for caller-save, in case some
588 /* Compute which hard registers are now in use
589 as homes for pseudo registers.
590 This is done here rather than (eg) in global_alloc
591 because this point is reached even if not optimizing. */
592 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
595 /* A function that receives a nonlocal goto must save all call-saved
597 if (current_function_has_nonlocal_label
)
598 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
600 if (! call_used_regs
[i
] && ! fixed_regs
[i
])
601 regs_ever_live
[i
] = 1;
604 for (i
= 0; i
< scratch_list_length
; i
++)
606 mark_scratch_live (scratch_list
[i
]);
608 /* Make sure that the last insn in the chain
609 is not something that needs reloading. */
610 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
612 /* Find all the pseudo registers that didn't get hard regs
613 but do have known equivalent constants or memory slots.
614 These include parameters (known equivalent to parameter slots)
615 and cse'd or loop-moved constant memory addresses.
617 Record constant equivalents in reg_equiv_constant
618 so they will be substituted by find_reloads.
619 Record memory equivalents in reg_mem_equiv so they can
620 be substituted eventually by altering the REG-rtx's. */
622 reg_equiv_constant
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
623 bzero ((char *) reg_equiv_constant
, max_regno
* sizeof (rtx
));
624 reg_equiv_memory_loc
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
625 bzero ((char *) reg_equiv_memory_loc
, max_regno
* sizeof (rtx
));
626 reg_equiv_mem
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
627 bzero ((char *) reg_equiv_mem
, max_regno
* sizeof (rtx
));
628 reg_equiv_init
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
629 bzero ((char *) reg_equiv_init
, max_regno
* sizeof (rtx
));
630 reg_equiv_address
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
631 bzero ((char *) reg_equiv_address
, max_regno
* sizeof (rtx
));
632 reg_max_ref_width
= (int *) alloca (max_regno
* sizeof (int));
633 bzero ((char *) reg_max_ref_width
, max_regno
* sizeof (int));
635 if (SMALL_REGISTER_CLASSES
)
636 CLEAR_HARD_REG_SET (forbidden_regs
);
638 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
639 Also find all paradoxical subregs and find largest such for each pseudo.
640 On machines with small register classes, record hard registers that
641 are used for user variables. These can never be used for spills.
642 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
643 caller-saved registers must be marked live. */
645 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
647 rtx set
= single_set (insn
);
649 if (GET_CODE (insn
) == NOTE
&& CONST_CALL_P (insn
)
650 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_SETJMP
)
651 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
652 if (! call_used_regs
[i
])
653 regs_ever_live
[i
] = 1;
655 if (set
!= 0 && GET_CODE (SET_DEST (set
)) == REG
)
657 rtx note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
);
659 #ifdef LEGITIMATE_PIC_OPERAND_P
660 && (! CONSTANT_P (XEXP (note
, 0)) || ! flag_pic
661 || LEGITIMATE_PIC_OPERAND_P (XEXP (note
, 0)))
665 rtx x
= XEXP (note
, 0);
666 i
= REGNO (SET_DEST (set
));
667 if (i
> LAST_VIRTUAL_REGISTER
)
669 if (GET_CODE (x
) == MEM
)
671 /* If the operand is a PLUS, the MEM may be shared,
672 so make sure we have an unshared copy here. */
673 if (GET_CODE (XEXP (x
, 0)) == PLUS
)
676 reg_equiv_memory_loc
[i
] = x
;
678 else if (CONSTANT_P (x
))
680 if (LEGITIMATE_CONSTANT_P (x
))
681 reg_equiv_constant
[i
] = x
;
683 reg_equiv_memory_loc
[i
]
684 = force_const_mem (GET_MODE (SET_DEST (set
)), x
);
689 /* If this register is being made equivalent to a MEM
690 and the MEM is not SET_SRC, the equivalencing insn
691 is one with the MEM as a SET_DEST and it occurs later.
692 So don't mark this insn now. */
693 if (GET_CODE (x
) != MEM
694 || rtx_equal_p (SET_SRC (set
), x
))
695 reg_equiv_init
[i
] = insn
;
700 /* If this insn is setting a MEM from a register equivalent to it,
701 this is the equivalencing insn. */
702 else if (set
&& GET_CODE (SET_DEST (set
)) == MEM
703 && GET_CODE (SET_SRC (set
)) == REG
704 && reg_equiv_memory_loc
[REGNO (SET_SRC (set
))]
705 && rtx_equal_p (SET_DEST (set
),
706 reg_equiv_memory_loc
[REGNO (SET_SRC (set
))]))
707 reg_equiv_init
[REGNO (SET_SRC (set
))] = insn
;
709 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
710 scan_paradoxical_subregs (PATTERN (insn
));
713 /* Does this function require a frame pointer? */
715 frame_pointer_needed
= (! flag_omit_frame_pointer
716 #ifdef EXIT_IGNORE_STACK
717 /* ?? If EXIT_IGNORE_STACK is set, we will not save
718 and restore sp for alloca. So we can't eliminate
719 the frame pointer in that case. At some point,
720 we should improve this by emitting the
721 sp-adjusting insns for this case. */
722 || (current_function_calls_alloca
723 && EXIT_IGNORE_STACK
)
725 || FRAME_POINTER_REQUIRED
);
729 /* Initialize the table of registers to eliminate. The way we do this
730 depends on how the eliminable registers were defined. */
731 #ifdef ELIMINABLE_REGS
732 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
734 ep
->can_eliminate
= ep
->can_eliminate_previous
735 = (CAN_ELIMINATE (ep
->from
, ep
->to
)
736 && ! (ep
->to
== STACK_POINTER_REGNUM
&& frame_pointer_needed
));
739 reg_eliminate
[0].can_eliminate
= reg_eliminate
[0].can_eliminate_previous
740 = ! frame_pointer_needed
;
743 /* Count the number of eliminable registers and build the FROM and TO
744 REG rtx's. Note that code in gen_rtx will cause, e.g.,
745 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
746 We depend on this. */
747 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
749 num_eliminable
+= ep
->can_eliminate
;
750 ep
->from_rtx
= gen_rtx_REG (Pmode
, ep
->from
);
751 ep
->to_rtx
= gen_rtx_REG (Pmode
, ep
->to
);
754 num_labels
= max_label_num () - get_first_label_num ();
756 /* Allocate the tables used to store offset information at labels. */
757 /* We used to use alloca here, but the size of what it would try to
758 allocate would occasionally cause it to exceed the stack limit and
759 cause a core dump. */
760 real_known_ptr
= xmalloc (num_labels
);
762 = (int (*)[NUM_ELIMINABLE_REGS
])
763 xmalloc (num_labels
* NUM_ELIMINABLE_REGS
* sizeof (int));
765 offsets_known_at
= real_known_ptr
- get_first_label_num ();
767 = (int (*)[NUM_ELIMINABLE_REGS
]) (real_at_ptr
- get_first_label_num ());
769 /* Alter each pseudo-reg rtx to contain its hard reg number.
770 Assign stack slots to the pseudos that lack hard regs or equivalents.
771 Do not touch virtual registers. */
773 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_regno
; i
++)
776 /* If we have some registers we think can be eliminated, scan all insns to
777 see if there is an insn that sets one of these registers to something
778 other than itself plus a constant. If so, the register cannot be
779 eliminated. Doing this scan here eliminates an extra pass through the
780 main reload loop in the most common case where register elimination
782 for (insn
= first
; insn
&& num_eliminable
; insn
= NEXT_INSN (insn
))
783 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
784 || GET_CODE (insn
) == CALL_INSN
)
785 note_stores (PATTERN (insn
), mark_not_eliminable
);
787 #ifndef REGISTER_CONSTRAINTS
788 /* If all the pseudo regs have hard regs,
789 except for those that are never referenced,
790 we know that no reloads are needed. */
791 /* But that is not true if there are register constraints, since
792 in that case some pseudos might be in the wrong kind of hard reg. */
794 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
795 if (reg_renumber
[i
] == -1 && REG_N_REFS (i
) != 0)
798 if (i
== max_regno
&& num_eliminable
== 0 && ! caller_save_needed
)
800 free (real_known_ptr
);
806 /* Compute the order of preference for hard registers to spill.
807 Store them by decreasing preference in potential_reload_regs. */
809 order_regs_for_reload (global
);
811 /* So far, no hard regs have been spilled. */
813 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
814 spill_reg_order
[i
] = -1;
816 /* Initialize to -1, which means take the first spill register. */
819 /* On most machines, we can't use any register explicitly used in the
820 rtl as a spill register. But on some, we have to. Those will have
821 taken care to keep the life of hard regs as short as possible. */
823 if (! SMALL_REGISTER_CLASSES
)
824 COPY_HARD_REG_SET (forbidden_regs
, bad_spill_regs
);
826 /* Spill any hard regs that we know we can't eliminate. */
827 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
828 if (! ep
->can_eliminate
)
829 spill_hard_reg (ep
->from
, global
, dumpfile
, 1);
831 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
832 if (frame_pointer_needed
)
833 spill_hard_reg (HARD_FRAME_POINTER_REGNUM
, global
, dumpfile
, 1);
837 for (i
= 0; i
< N_REG_CLASSES
; i
++)
839 basic_block_needs
[i
] = (char *) alloca (n_basic_blocks
);
840 bzero (basic_block_needs
[i
], n_basic_blocks
);
843 /* From now on, we need to emit any moves without making new pseudos. */
844 reload_in_progress
= 1;
846 /* This loop scans the entire function each go-round
847 and repeats until one repetition spills no additional hard regs. */
849 /* This flag is set when a pseudo reg is spilled,
850 to require another pass. Note that getting an additional reload
851 reg does not necessarily imply any pseudo reg was spilled;
852 sometimes we find a reload reg that no pseudo reg was allocated in. */
853 something_changed
= 1;
854 /* This flag is set if there are any insns that require reloading. */
855 something_needs_reloads
= 0;
856 /* This flag is set if there are any insns that require register
858 something_needs_elimination
= 0;
859 while (something_changed
)
863 /* For each class, number of reload regs needed in that class.
864 This is the maximum over all insns of the needs in that class
865 of the individual insn. */
866 int max_needs
[N_REG_CLASSES
];
867 /* For each class, size of group of consecutive regs
868 that is needed for the reloads of this class. */
869 int group_size
[N_REG_CLASSES
];
870 /* For each class, max number of consecutive groups needed.
871 (Each group contains group_size[CLASS] consecutive registers.) */
872 int max_groups
[N_REG_CLASSES
];
873 /* For each class, max number needed of regs that don't belong
874 to any of the groups. */
875 int max_nongroups
[N_REG_CLASSES
];
876 /* For each class, the machine mode which requires consecutive
877 groups of regs of that class.
878 If two different modes ever require groups of one class,
879 they must be the same size and equally restrictive for that class,
880 otherwise we can't handle the complexity. */
881 enum machine_mode group_mode
[N_REG_CLASSES
];
882 /* Record the insn where each maximum need is first found. */
883 rtx max_needs_insn
[N_REG_CLASSES
];
884 rtx max_groups_insn
[N_REG_CLASSES
];
885 rtx max_nongroups_insn
[N_REG_CLASSES
];
887 HOST_WIDE_INT starting_frame_size
;
888 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
889 int previous_frame_pointer_needed
= frame_pointer_needed
;
891 static char *reg_class_names
[] = REG_CLASS_NAMES
;
893 something_changed
= 0;
894 bzero ((char *) max_needs
, sizeof max_needs
);
895 bzero ((char *) max_groups
, sizeof max_groups
);
896 bzero ((char *) max_nongroups
, sizeof max_nongroups
);
897 bzero ((char *) max_needs_insn
, sizeof max_needs_insn
);
898 bzero ((char *) max_groups_insn
, sizeof max_groups_insn
);
899 bzero ((char *) max_nongroups_insn
, sizeof max_nongroups_insn
);
900 bzero ((char *) group_size
, sizeof group_size
);
901 for (i
= 0; i
< N_REG_CLASSES
; i
++)
902 group_mode
[i
] = VOIDmode
;
904 /* Keep track of which basic blocks are needing the reloads. */
907 /* Remember whether any element of basic_block_needs
908 changes from 0 to 1 in this pass. */
909 new_basic_block_needs
= 0;
911 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done
912 here because the stack size may be a part of the offset computation
913 for register elimination, and there might have been new stack slots
914 created in the last iteration of this loop. */
915 assign_stack_local (BLKmode
, 0, 0);
917 starting_frame_size
= get_frame_size ();
919 /* Reset all offsets on eliminable registers to their initial values. */
920 #ifdef ELIMINABLE_REGS
921 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
923 INITIAL_ELIMINATION_OFFSET (ep
->from
, ep
->to
, ep
->initial_offset
);
924 ep
->previous_offset
= ep
->offset
925 = ep
->max_offset
= ep
->initial_offset
;
928 #ifdef INITIAL_FRAME_POINTER_OFFSET
929 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate
[0].initial_offset
);
931 if (!FRAME_POINTER_REQUIRED
)
933 reg_eliminate
[0].initial_offset
= 0;
935 reg_eliminate
[0].previous_offset
= reg_eliminate
[0].max_offset
936 = reg_eliminate
[0].offset
= reg_eliminate
[0].initial_offset
;
939 num_not_at_initial_offset
= 0;
941 bzero ((char *) &offsets_known_at
[get_first_label_num ()], num_labels
);
943 /* Set a known offset for each forced label to be at the initial offset
944 of each elimination. We do this because we assume that all
945 computed jumps occur from a location where each elimination is
946 at its initial offset. */
948 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
950 set_label_offsets (XEXP (x
, 0), NULL_RTX
, 1);
952 /* For each pseudo register that has an equivalent location defined,
953 try to eliminate any eliminable registers (such as the frame pointer)
954 assuming initial offsets for the replacement register, which
957 If the resulting location is directly addressable, substitute
958 the MEM we just got directly for the old REG.
960 If it is not addressable but is a constant or the sum of a hard reg
961 and constant, it is probably not addressable because the constant is
962 out of range, in that case record the address; we will generate
963 hairy code to compute the address in a register each time it is
964 needed. Similarly if it is a hard register, but one that is not
965 valid as an address register.
967 If the location is not addressable, but does not have one of the
968 above forms, assign a stack slot. We have to do this to avoid the
969 potential of producing lots of reloads if, e.g., a location involves
970 a pseudo that didn't get a hard register and has an equivalent memory
971 location that also involves a pseudo that didn't get a hard register.
973 Perhaps at some point we will improve reload_when_needed handling
974 so this problem goes away. But that's very hairy. */
976 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
977 if (reg_renumber
[i
] < 0 && reg_equiv_memory_loc
[i
])
979 rtx x
= eliminate_regs (reg_equiv_memory_loc
[i
], 0, NULL_RTX
);
981 if (strict_memory_address_p (GET_MODE (regno_reg_rtx
[i
]),
983 reg_equiv_mem
[i
] = x
, reg_equiv_address
[i
] = 0;
984 else if (CONSTANT_P (XEXP (x
, 0))
985 || (GET_CODE (XEXP (x
, 0)) == REG
986 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
)
987 || (GET_CODE (XEXP (x
, 0)) == PLUS
988 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
989 && (REGNO (XEXP (XEXP (x
, 0), 0))
990 < FIRST_PSEUDO_REGISTER
)
991 && CONSTANT_P (XEXP (XEXP (x
, 0), 1))))
992 reg_equiv_address
[i
] = XEXP (x
, 0), reg_equiv_mem
[i
] = 0;
995 /* Make a new stack slot. Then indicate that something
996 changed so we go back and recompute offsets for
997 eliminable registers because the allocation of memory
998 below might change some offset. reg_equiv_{mem,address}
999 will be set up for this pseudo on the next pass around
1001 reg_equiv_memory_loc
[i
] = 0;
1002 reg_equiv_init
[i
] = 0;
1004 something_changed
= 1;
1008 /* If we allocated another pseudo to the stack, redo elimination
1010 if (something_changed
)
1013 /* If caller-saves needs a group, initialize the group to include
1014 the size and mode required for caller-saves. */
1016 if (caller_save_group_size
> 1)
1018 group_mode
[(int) caller_save_spill_class
] = Pmode
;
1019 group_size
[(int) caller_save_spill_class
] = caller_save_group_size
;
1022 /* Compute the most additional registers needed by any instruction.
1023 Collect information separately for each class of regs. */
1025 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
1027 if (global
&& this_block
+ 1 < n_basic_blocks
1028 && insn
== basic_block_head
[this_block
+1])
1031 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
1032 might include REG_LABEL), we need to see what effects this
1033 has on the known offsets at labels. */
1035 if (GET_CODE (insn
) == CODE_LABEL
|| GET_CODE (insn
) == JUMP_INSN
1036 || (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
1037 && REG_NOTES (insn
) != 0))
1038 set_label_offsets (insn
, insn
, 0);
1040 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
1042 /* Nonzero means don't use a reload reg that overlaps
1043 the place where a function value can be returned. */
1044 rtx avoid_return_reg
= 0;
1046 rtx old_body
= PATTERN (insn
);
1047 int old_code
= INSN_CODE (insn
);
1048 rtx old_notes
= REG_NOTES (insn
);
1049 int did_elimination
= 0;
1051 /* To compute the number of reload registers of each class
1052 needed for an insn, we must simulate what choose_reload_regs
1053 can do. We do this by splitting an insn into an "input" and
1054 an "output" part. RELOAD_OTHER reloads are used in both.
1055 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
1056 which must be live over the entire input section of reloads,
1057 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
1058 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
1061 The registers needed for output are RELOAD_OTHER and
1062 RELOAD_FOR_OUTPUT, which are live for the entire output
1063 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
1064 reloads for each operand.
1066 The total number of registers needed is the maximum of the
1067 inputs and outputs. */
1071 /* [0] is normal, [1] is nongroup. */
1072 int regs
[2][N_REG_CLASSES
];
1073 int groups
[N_REG_CLASSES
];
1076 /* Each `struct needs' corresponds to one RELOAD_... type. */
1080 struct needs output
;
1082 struct needs other_addr
;
1083 struct needs op_addr
;
1084 struct needs op_addr_reload
;
1085 struct needs in_addr
[MAX_RECOG_OPERANDS
];
1086 struct needs in_addr_addr
[MAX_RECOG_OPERANDS
];
1087 struct needs out_addr
[MAX_RECOG_OPERANDS
];
1088 struct needs out_addr_addr
[MAX_RECOG_OPERANDS
];
1091 /* If needed, eliminate any eliminable registers. */
1093 did_elimination
= eliminate_regs_in_insn (insn
, 0);
1095 /* Set avoid_return_reg if this is an insn
1096 that might use the value of a function call. */
1097 if (SMALL_REGISTER_CLASSES
&& GET_CODE (insn
) == CALL_INSN
)
1099 if (GET_CODE (PATTERN (insn
)) == SET
)
1100 after_call
= SET_DEST (PATTERN (insn
));
1101 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
1102 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1103 after_call
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 0));
1107 else if (SMALL_REGISTER_CLASSES
&& after_call
!= 0
1108 && !(GET_CODE (PATTERN (insn
)) == SET
1109 && SET_DEST (PATTERN (insn
)) == stack_pointer_rtx
)
1110 && GET_CODE (PATTERN (insn
)) != USE
)
1112 if (reg_referenced_p (after_call
, PATTERN (insn
)))
1113 avoid_return_reg
= after_call
;
1117 /* Analyze the instruction. */
1118 find_reloads (insn
, 0, spill_indirect_levels
, global
,
1121 /* Remember for later shortcuts which insns had any reloads or
1122 register eliminations.
1124 One might think that it would be worthwhile to mark insns
1125 that need register replacements but not reloads, but this is
1126 not safe because find_reloads may do some manipulation of
1127 the insn (such as swapping commutative operands), which would
1128 be lost when we restore the old pattern after register
1129 replacement. So the actions of find_reloads must be redone in
1130 subsequent passes or in reload_as_needed.
1132 However, it is safe to mark insns that need reloads
1133 but not register replacement. */
1135 PUT_MODE (insn
, (did_elimination
? QImode
1136 : n_reloads
? HImode
1137 : GET_MODE (insn
) == DImode
? DImode
1140 /* Discard any register replacements done. */
1141 if (did_elimination
)
1143 obstack_free (&reload_obstack
, reload_firstobj
);
1144 PATTERN (insn
) = old_body
;
1145 INSN_CODE (insn
) = old_code
;
1146 REG_NOTES (insn
) = old_notes
;
1147 something_needs_elimination
= 1;
1150 /* If this insn has no reloads, we need not do anything except
1151 in the case of a CALL_INSN when we have caller-saves and
1152 caller-save needs reloads. */
1155 && ! (GET_CODE (insn
) == CALL_INSN
1156 && caller_save_spill_class
!= NO_REGS
))
1159 something_needs_reloads
= 1;
1160 bzero ((char *) &insn_needs
, sizeof insn_needs
);
1162 /* Count each reload once in every class
1163 containing the reload's own class. */
1165 for (i
= 0; i
< n_reloads
; i
++)
1167 register enum reg_class
*p
;
1168 enum reg_class
class = reload_reg_class
[i
];
1170 enum machine_mode mode
;
1171 struct needs
*this_needs
;
1173 /* Don't count the dummy reloads, for which one of the
1174 regs mentioned in the insn can be used for reloading.
1175 Don't count optional reloads.
1176 Don't count reloads that got combined with others. */
1177 if (reload_reg_rtx
[i
] != 0
1178 || reload_optional
[i
] != 0
1179 || (reload_out
[i
] == 0 && reload_in
[i
] == 0
1180 && ! reload_secondary_p
[i
]))
1183 /* Show that a reload register of this class is needed
1184 in this basic block. We do not use insn_needs and
1185 insn_groups because they are overly conservative for
1187 if (global
&& ! basic_block_needs
[(int) class][this_block
])
1189 basic_block_needs
[(int) class][this_block
] = 1;
1190 new_basic_block_needs
= 1;
1193 mode
= reload_inmode
[i
];
1194 if (GET_MODE_SIZE (reload_outmode
[i
]) > GET_MODE_SIZE (mode
))
1195 mode
= reload_outmode
[i
];
1196 size
= CLASS_MAX_NREGS (class, mode
);
1198 /* Decide which time-of-use to count this reload for. */
1199 switch (reload_when_needed
[i
])
1202 this_needs
= &insn_needs
.other
;
1204 case RELOAD_FOR_INPUT
:
1205 this_needs
= &insn_needs
.input
;
1207 case RELOAD_FOR_OUTPUT
:
1208 this_needs
= &insn_needs
.output
;
1210 case RELOAD_FOR_INSN
:
1211 this_needs
= &insn_needs
.insn
;
1213 case RELOAD_FOR_OTHER_ADDRESS
:
1214 this_needs
= &insn_needs
.other_addr
;
1216 case RELOAD_FOR_INPUT_ADDRESS
:
1217 this_needs
= &insn_needs
.in_addr
[reload_opnum
[i
]];
1219 case RELOAD_FOR_INPADDR_ADDRESS
:
1220 this_needs
= &insn_needs
.in_addr_addr
[reload_opnum
[i
]];
1222 case RELOAD_FOR_OUTPUT_ADDRESS
:
1223 this_needs
= &insn_needs
.out_addr
[reload_opnum
[i
]];
1225 case RELOAD_FOR_OUTADDR_ADDRESS
:
1226 this_needs
= &insn_needs
.out_addr_addr
[reload_opnum
[i
]];
1228 case RELOAD_FOR_OPERAND_ADDRESS
:
1229 this_needs
= &insn_needs
.op_addr
;
1231 case RELOAD_FOR_OPADDR_ADDR
:
1232 this_needs
= &insn_needs
.op_addr_reload
;
1238 enum machine_mode other_mode
, allocate_mode
;
1240 /* Count number of groups needed separately from
1241 number of individual regs needed. */
1242 this_needs
->groups
[(int) class]++;
1243 p
= reg_class_superclasses
[(int) class];
1244 while (*p
!= LIM_REG_CLASSES
)
1245 this_needs
->groups
[(int) *p
++]++;
1247 /* Record size and mode of a group of this class. */
1248 /* If more than one size group is needed,
1249 make all groups the largest needed size. */
1250 if (group_size
[(int) class] < size
)
1252 other_mode
= group_mode
[(int) class];
1253 allocate_mode
= mode
;
1255 group_size
[(int) class] = size
;
1256 group_mode
[(int) class] = mode
;
1261 allocate_mode
= group_mode
[(int) class];
1264 /* Crash if two dissimilar machine modes both need
1265 groups of consecutive regs of the same class. */
1267 if (other_mode
!= VOIDmode
&& other_mode
!= allocate_mode
1268 && ! modes_equiv_for_class_p (allocate_mode
,
1270 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1275 this_needs
->regs
[reload_nongroup
[i
]][(int) class] += 1;
1276 p
= reg_class_superclasses
[(int) class];
1277 while (*p
!= LIM_REG_CLASSES
)
1278 this_needs
->regs
[reload_nongroup
[i
]][(int) *p
++] += 1;
1284 /* All reloads have been counted for this insn;
1285 now merge the various times of use.
1286 This sets insn_needs, etc., to the maximum total number
1287 of registers needed at any point in this insn. */
1289 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1291 int in_max
, out_max
;
1293 /* Compute normal and nongroup needs. */
1294 for (j
= 0; j
<= 1; j
++)
1296 for (in_max
= 0, out_max
= 0, k
= 0;
1297 k
< reload_n_operands
; k
++)
1301 (insn_needs
.in_addr
[k
].regs
[j
][i
]
1302 + insn_needs
.in_addr_addr
[k
].regs
[j
][i
]));
1304 = MAX (out_max
, insn_needs
.out_addr
[k
].regs
[j
][i
]);
1307 insn_needs
.out_addr_addr
[k
].regs
[j
][i
]);
1310 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1311 and operand addresses but not things used to reload
1312 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1313 don't conflict with things needed to reload inputs or
1316 in_max
= MAX (MAX (insn_needs
.op_addr
.regs
[j
][i
],
1317 insn_needs
.op_addr_reload
.regs
[j
][i
]),
1320 out_max
= MAX (out_max
, insn_needs
.insn
.regs
[j
][i
]);
1322 insn_needs
.input
.regs
[j
][i
]
1323 = MAX (insn_needs
.input
.regs
[j
][i
]
1324 + insn_needs
.op_addr
.regs
[j
][i
]
1325 + insn_needs
.insn
.regs
[j
][i
],
1326 in_max
+ insn_needs
.input
.regs
[j
][i
]);
1328 insn_needs
.output
.regs
[j
][i
] += out_max
;
1329 insn_needs
.other
.regs
[j
][i
]
1330 += MAX (MAX (insn_needs
.input
.regs
[j
][i
],
1331 insn_needs
.output
.regs
[j
][i
]),
1332 insn_needs
.other_addr
.regs
[j
][i
]);
1336 /* Now compute group needs. */
1337 for (in_max
= 0, out_max
= 0, j
= 0;
1338 j
< reload_n_operands
; j
++)
1340 in_max
= MAX (in_max
, insn_needs
.in_addr
[j
].groups
[i
]);
1341 in_max
= MAX (in_max
,
1342 insn_needs
.in_addr_addr
[j
].groups
[i
]);
1344 = MAX (out_max
, insn_needs
.out_addr
[j
].groups
[i
]);
1346 = MAX (out_max
, insn_needs
.out_addr_addr
[j
].groups
[i
]);
1349 in_max
= MAX (MAX (insn_needs
.op_addr
.groups
[i
],
1350 insn_needs
.op_addr_reload
.groups
[i
]),
1352 out_max
= MAX (out_max
, insn_needs
.insn
.groups
[i
]);
1354 insn_needs
.input
.groups
[i
]
1355 = MAX (insn_needs
.input
.groups
[i
]
1356 + insn_needs
.op_addr
.groups
[i
]
1357 + insn_needs
.insn
.groups
[i
],
1358 in_max
+ insn_needs
.input
.groups
[i
]);
1360 insn_needs
.output
.groups
[i
] += out_max
;
1361 insn_needs
.other
.groups
[i
]
1362 += MAX (MAX (insn_needs
.input
.groups
[i
],
1363 insn_needs
.output
.groups
[i
]),
1364 insn_needs
.other_addr
.groups
[i
]);
1367 /* If this is a CALL_INSN and caller-saves will need
1368 a spill register, act as if the spill register is
1369 needed for this insn. However, the spill register
1370 can be used by any reload of this insn, so we only
1371 need do something if no need for that class has
1374 The assumption that every CALL_INSN will trigger a
1375 caller-save is highly conservative, however, the number
1376 of cases where caller-saves will need a spill register but
1377 a block containing a CALL_INSN won't need a spill register
1378 of that class should be quite rare.
1380 If a group is needed, the size and mode of the group will
1381 have been set up at the beginning of this loop. */
1383 if (GET_CODE (insn
) == CALL_INSN
1384 && caller_save_spill_class
!= NO_REGS
)
1386 /* See if this register would conflict with any reload that
1387 needs a group or any reload that needs a nongroup. */
1388 int nongroup_need
= 0;
1389 int *caller_save_needs
;
1391 for (j
= 0; j
< n_reloads
; j
++)
1392 if (reg_classes_intersect_p (caller_save_spill_class
,
1393 reload_reg_class
[j
])
1394 && ((CLASS_MAX_NREGS
1395 (reload_reg_class
[j
],
1396 (GET_MODE_SIZE (reload_outmode
[j
])
1397 > GET_MODE_SIZE (reload_inmode
[j
]))
1398 ? reload_outmode
[j
] : reload_inmode
[j
])
1400 || reload_nongroup
[j
]))
1407 = (caller_save_group_size
> 1
1408 ? insn_needs
.other
.groups
1409 : insn_needs
.other
.regs
[nongroup_need
]);
1411 if (caller_save_needs
[(int) caller_save_spill_class
] == 0)
1413 register enum reg_class
*p
1414 = reg_class_superclasses
[(int) caller_save_spill_class
];
1416 caller_save_needs
[(int) caller_save_spill_class
]++;
1418 while (*p
!= LIM_REG_CLASSES
)
1419 caller_save_needs
[(int) *p
++] += 1;
1422 /* Show that this basic block will need a register of
1426 && ! (basic_block_needs
[(int) caller_save_spill_class
]
1429 basic_block_needs
[(int) caller_save_spill_class
]
1431 new_basic_block_needs
= 1;
1435 /* If this insn stores the value of a function call,
1436 and that value is in a register that has been spilled,
1437 and if the insn needs a reload in a class
1438 that might use that register as the reload register,
1439 then add an extra need in that class.
1440 This makes sure we have a register available that does
1441 not overlap the return value. */
1443 if (SMALL_REGISTER_CLASSES
&& avoid_return_reg
)
1445 int regno
= REGNO (avoid_return_reg
);
1447 = HARD_REGNO_NREGS (regno
, GET_MODE (avoid_return_reg
));
1449 int basic_needs
[N_REG_CLASSES
], basic_groups
[N_REG_CLASSES
];
1451 /* First compute the "basic needs", which counts a
1452 need only in the smallest class in which it
1455 bcopy ((char *) insn_needs
.other
.regs
[0],
1456 (char *) basic_needs
, sizeof basic_needs
);
1457 bcopy ((char *) insn_needs
.other
.groups
,
1458 (char *) basic_groups
, sizeof basic_groups
);
1460 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1464 if (basic_needs
[i
] >= 0)
1465 for (p
= reg_class_superclasses
[i
];
1466 *p
!= LIM_REG_CLASSES
; p
++)
1467 basic_needs
[(int) *p
] -= basic_needs
[i
];
1469 if (basic_groups
[i
] >= 0)
1470 for (p
= reg_class_superclasses
[i
];
1471 *p
!= LIM_REG_CLASSES
; p
++)
1472 basic_groups
[(int) *p
] -= basic_groups
[i
];
1475 /* Now count extra regs if there might be a conflict with
1476 the return value register. */
1478 for (r
= regno
; r
< regno
+ nregs
; r
++)
1479 if (spill_reg_order
[r
] >= 0)
1480 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1481 if (TEST_HARD_REG_BIT (reg_class_contents
[i
], r
))
1483 if (basic_needs
[i
] > 0)
1487 insn_needs
.other
.regs
[0][i
]++;
1488 p
= reg_class_superclasses
[i
];
1489 while (*p
!= LIM_REG_CLASSES
)
1490 insn_needs
.other
.regs
[0][(int) *p
++]++;
1492 if (basic_groups
[i
] > 0)
1496 insn_needs
.other
.groups
[i
]++;
1497 p
= reg_class_superclasses
[i
];
1498 while (*p
!= LIM_REG_CLASSES
)
1499 insn_needs
.other
.groups
[(int) *p
++]++;
1504 /* For each class, collect maximum need of any insn. */
1506 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1508 if (max_needs
[i
] < insn_needs
.other
.regs
[0][i
])
1510 max_needs
[i
] = insn_needs
.other
.regs
[0][i
];
1511 max_needs_insn
[i
] = insn
;
1513 if (max_groups
[i
] < insn_needs
.other
.groups
[i
])
1515 max_groups
[i
] = insn_needs
.other
.groups
[i
];
1516 max_groups_insn
[i
] = insn
;
1518 if (max_nongroups
[i
] < insn_needs
.other
.regs
[1][i
])
1520 max_nongroups
[i
] = insn_needs
.other
.regs
[1][i
];
1521 max_nongroups_insn
[i
] = insn
;
1525 /* Note that there is a continue statement above. */
1528 /* If we allocated any new memory locations, make another pass
1529 since it might have changed elimination offsets. */
1530 if (starting_frame_size
!= get_frame_size ())
1531 something_changed
= 1;
1534 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1536 if (max_needs
[i
] > 0)
1538 ";; Need %d reg%s of class %s (for insn %d).\n",
1539 max_needs
[i
], max_needs
[i
] == 1 ? "" : "s",
1540 reg_class_names
[i
], INSN_UID (max_needs_insn
[i
]));
1541 if (max_nongroups
[i
] > 0)
1543 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1544 max_nongroups
[i
], max_nongroups
[i
] == 1 ? "" : "s",
1545 reg_class_names
[i
], INSN_UID (max_nongroups_insn
[i
]));
1546 if (max_groups
[i
] > 0)
1548 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1549 max_groups
[i
], max_groups
[i
] == 1 ? "" : "s",
1550 mode_name
[(int) group_mode
[i
]],
1551 reg_class_names
[i
], INSN_UID (max_groups_insn
[i
]));
1554 /* If we have caller-saves, set up the save areas and see if caller-save
1555 will need a spill register. */
1557 if (caller_save_needed
)
1559 /* Set the offsets for setup_save_areas. */
1560 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
1562 ep
->previous_offset
= ep
->max_offset
;
1564 if ( ! setup_save_areas (&something_changed
)
1565 && caller_save_spill_class
== NO_REGS
)
1567 /* The class we will need depends on whether the machine
1568 supports the sum of two registers for an address; see
1569 find_address_reloads for details. */
1571 caller_save_spill_class
1572 = double_reg_address_ok
? INDEX_REG_CLASS
: BASE_REG_CLASS
;
1573 caller_save_group_size
1574 = CLASS_MAX_NREGS (caller_save_spill_class
, Pmode
);
1575 something_changed
= 1;
1579 /* See if anything that happened changes which eliminations are valid.
1580 For example, on the Sparc, whether or not the frame pointer can
1581 be eliminated can depend on what registers have been used. We need
1582 not check some conditions again (such as flag_omit_frame_pointer)
1583 since they can't have changed. */
1585 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
1586 if ((ep
->from
== HARD_FRAME_POINTER_REGNUM
&& FRAME_POINTER_REQUIRED
)
1587 #ifdef ELIMINABLE_REGS
1588 || ! CAN_ELIMINATE (ep
->from
, ep
->to
)
1591 ep
->can_eliminate
= 0;
1593 /* Look for the case where we have discovered that we can't replace
1594 register A with register B and that means that we will now be
1595 trying to replace register A with register C. This means we can
1596 no longer replace register C with register B and we need to disable
1597 such an elimination, if it exists. This occurs often with A == ap,
1598 B == sp, and C == fp. */
1600 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
1602 struct elim_table
*op
;
1603 register int new_to
= -1;
1605 if (! ep
->can_eliminate
&& ep
->can_eliminate_previous
)
1607 /* Find the current elimination for ep->from, if there is a
1609 for (op
= reg_eliminate
;
1610 op
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; op
++)
1611 if (op
->from
== ep
->from
&& op
->can_eliminate
)
1617 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1619 for (op
= reg_eliminate
;
1620 op
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; op
++)
1621 if (op
->from
== new_to
&& op
->to
== ep
->to
)
1622 op
->can_eliminate
= 0;
1626 /* See if any registers that we thought we could eliminate the previous
1627 time are no longer eliminable. If so, something has changed and we
1628 must spill the register. Also, recompute the number of eliminable
1629 registers and see if the frame pointer is needed; it is if there is
1630 no elimination of the frame pointer that we can perform. */
1632 frame_pointer_needed
= 1;
1633 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
1635 if (ep
->can_eliminate
&& ep
->from
== FRAME_POINTER_REGNUM
1636 && ep
->to
!= HARD_FRAME_POINTER_REGNUM
)
1637 frame_pointer_needed
= 0;
1639 if (! ep
->can_eliminate
&& ep
->can_eliminate_previous
)
1641 ep
->can_eliminate_previous
= 0;
1642 spill_hard_reg (ep
->from
, global
, dumpfile
, 1);
1643 something_changed
= 1;
1648 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1649 /* If we didn't need a frame pointer last time, but we do now, spill
1650 the hard frame pointer. */
1651 if (frame_pointer_needed
&& ! previous_frame_pointer_needed
)
1653 spill_hard_reg (HARD_FRAME_POINTER_REGNUM
, global
, dumpfile
, 1);
1654 something_changed
= 1;
1658 /* If all needs are met, we win. */
1660 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1661 if (max_needs
[i
] > 0 || max_groups
[i
] > 0 || max_nongroups
[i
] > 0)
1663 if (i
== N_REG_CLASSES
&& !new_basic_block_needs
&& ! something_changed
)
1666 /* Not all needs are met; must spill some hard regs. */
1668 /* Put all registers spilled so far back in potential_reload_regs, but
1669 put them at the front, since we've already spilled most of the
1670 pseudos in them (we might have left some pseudos unspilled if they
1671 were in a block that didn't need any spill registers of a conflicting
1672 class. We used to try to mark off the need for those registers,
1673 but doing so properly is very complex and reallocating them is the
1674 simpler approach. First, "pack" potential_reload_regs by pushing
1675 any nonnegative entries towards the end. That will leave room
1676 for the registers we already spilled.
1678 Also, undo the marking of the spill registers from the last time
1679 around in FORBIDDEN_REGS since we will be probably be allocating
1682 ??? It is theoretically possible that we might end up not using one
1683 of our previously-spilled registers in this allocation, even though
1684 they are at the head of the list. It's not clear what to do about
1685 this, but it was no better before, when we marked off the needs met
1686 by the previously-spilled registers. With the current code, globals
1687 can be allocated into these registers, but locals cannot. */
1691 for (i
= j
= FIRST_PSEUDO_REGISTER
- 1; i
>= 0; i
--)
1692 if (potential_reload_regs
[i
] != -1)
1693 potential_reload_regs
[j
--] = potential_reload_regs
[i
];
1695 for (i
= 0; i
< n_spills
; i
++)
1697 potential_reload_regs
[i
] = spill_regs
[i
];
1698 spill_reg_order
[spill_regs
[i
]] = -1;
1699 CLEAR_HARD_REG_BIT (forbidden_regs
, spill_regs
[i
]);
1705 /* Now find more reload regs to satisfy the remaining need
1706 Do it by ascending class number, since otherwise a reg
1707 might be spilled for a big class and might fail to count
1708 for a smaller class even though it belongs to that class.
1710 Count spilled regs in `spills', and add entries to
1711 `spill_regs' and `spill_reg_order'.
1713 ??? Note there is a problem here.
1714 When there is a need for a group in a high-numbered class,
1715 and also need for non-group regs that come from a lower class,
1716 the non-group regs are chosen first. If there aren't many regs,
1717 they might leave no room for a group.
1719 This was happening on the 386. To fix it, we added the code
1720 that calls possible_group_p, so that the lower class won't
1721 break up the last possible group.
1723 Really fixing the problem would require changes above
1724 in counting the regs already spilled, and in choose_reload_regs.
1725 It might be hard to avoid introducing bugs there. */
1727 CLEAR_HARD_REG_SET (counted_for_groups
);
1728 CLEAR_HARD_REG_SET (counted_for_nongroups
);
1730 for (class = 0; class < N_REG_CLASSES
; class++)
1732 /* First get the groups of registers.
1733 If we got single registers first, we might fragment
1735 while (max_groups
[class] > 0)
1737 /* If any single spilled regs happen to form groups,
1738 count them now. Maybe we don't really need
1739 to spill another group. */
1740 count_possible_groups (group_size
, group_mode
, max_groups
,
1743 if (max_groups
[class] <= 0)
1746 /* Groups of size 2 (the only groups used on most machines)
1747 are treated specially. */
1748 if (group_size
[class] == 2)
1750 /* First, look for a register that will complete a group. */
1751 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1755 j
= potential_reload_regs
[i
];
1756 if (j
>= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs
, j
)
1758 ((j
> 0 && (other
= j
- 1, spill_reg_order
[other
] >= 0)
1759 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
)
1760 && TEST_HARD_REG_BIT (reg_class_contents
[class], other
)
1761 && HARD_REGNO_MODE_OK (other
, group_mode
[class])
1762 && ! TEST_HARD_REG_BIT (counted_for_nongroups
,
1764 /* We don't want one part of another group.
1765 We could get "two groups" that overlap! */
1766 && ! TEST_HARD_REG_BIT (counted_for_groups
, other
))
1768 (j
< FIRST_PSEUDO_REGISTER
- 1
1769 && (other
= j
+ 1, spill_reg_order
[other
] >= 0)
1770 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
)
1771 && TEST_HARD_REG_BIT (reg_class_contents
[class], other
)
1772 && HARD_REGNO_MODE_OK (j
, group_mode
[class])
1773 && ! TEST_HARD_REG_BIT (counted_for_nongroups
,
1775 && ! TEST_HARD_REG_BIT (counted_for_groups
,
1778 register enum reg_class
*p
;
1780 /* We have found one that will complete a group,
1781 so count off one group as provided. */
1782 max_groups
[class]--;
1783 p
= reg_class_superclasses
[class];
1784 while (*p
!= LIM_REG_CLASSES
)
1786 if (group_size
[(int) *p
] <= group_size
[class])
1787 max_groups
[(int) *p
]--;
1791 /* Indicate both these regs are part of a group. */
1792 SET_HARD_REG_BIT (counted_for_groups
, j
);
1793 SET_HARD_REG_BIT (counted_for_groups
, other
);
1797 /* We can't complete a group, so start one. */
1798 /* Look for a pair neither of which is explicitly used. */
1799 if (SMALL_REGISTER_CLASSES
&& i
== FIRST_PSEUDO_REGISTER
)
1800 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1803 j
= potential_reload_regs
[i
];
1804 /* Verify that J+1 is a potential reload reg. */
1805 for (k
= 0; k
< FIRST_PSEUDO_REGISTER
; k
++)
1806 if (potential_reload_regs
[k
] == j
+ 1)
1808 if (j
>= 0 && j
+ 1 < FIRST_PSEUDO_REGISTER
1809 && k
< FIRST_PSEUDO_REGISTER
1810 && spill_reg_order
[j
] < 0 && spill_reg_order
[j
+ 1] < 0
1811 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
)
1812 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
+ 1)
1813 && HARD_REGNO_MODE_OK (j
, group_mode
[class])
1814 && ! TEST_HARD_REG_BIT (counted_for_nongroups
,
1816 && ! TEST_HARD_REG_BIT (bad_spill_regs
, j
+ 1)
1817 /* Reject J at this stage
1818 if J+1 was explicitly used. */
1819 && ! regs_explicitly_used
[j
+ 1])
1822 /* Now try any group at all
1823 whose registers are not in bad_spill_regs. */
1824 if (i
== FIRST_PSEUDO_REGISTER
)
1825 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1828 j
= potential_reload_regs
[i
];
1829 /* Verify that J+1 is a potential reload reg. */
1830 for (k
= 0; k
< FIRST_PSEUDO_REGISTER
; k
++)
1831 if (potential_reload_regs
[k
] == j
+ 1)
1833 if (j
>= 0 && j
+ 1 < FIRST_PSEUDO_REGISTER
1834 && k
< FIRST_PSEUDO_REGISTER
1835 && spill_reg_order
[j
] < 0 && spill_reg_order
[j
+ 1] < 0
1836 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
)
1837 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
+ 1)
1838 && HARD_REGNO_MODE_OK (j
, group_mode
[class])
1839 && ! TEST_HARD_REG_BIT (counted_for_nongroups
,
1841 && ! TEST_HARD_REG_BIT (bad_spill_regs
, j
+ 1))
1845 /* I should be the index in potential_reload_regs
1846 of the new reload reg we have found. */
1848 if (i
>= FIRST_PSEUDO_REGISTER
)
1850 /* There are no groups left to spill. */
1851 spill_failure (max_groups_insn
[class]);
1857 |= new_spill_reg (i
, class, max_needs
, NULL_PTR
,
1862 /* For groups of more than 2 registers,
1863 look for a sufficient sequence of unspilled registers,
1864 and spill them all at once. */
1865 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1869 j
= potential_reload_regs
[i
];
1871 && j
+ group_size
[class] <= FIRST_PSEUDO_REGISTER
1872 && HARD_REGNO_MODE_OK (j
, group_mode
[class]))
1874 /* Check each reg in the sequence. */
1875 for (k
= 0; k
< group_size
[class]; k
++)
1876 if (! (spill_reg_order
[j
+ k
] < 0
1877 && ! TEST_HARD_REG_BIT (bad_spill_regs
, j
+ k
)
1878 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
+ k
)))
1880 /* We got a full sequence, so spill them all. */
1881 if (k
== group_size
[class])
1883 register enum reg_class
*p
;
1884 for (k
= 0; k
< group_size
[class]; k
++)
1887 SET_HARD_REG_BIT (counted_for_groups
, j
+ k
);
1888 for (idx
= 0; idx
< FIRST_PSEUDO_REGISTER
; idx
++)
1889 if (potential_reload_regs
[idx
] == j
+ k
)
1892 |= new_spill_reg (idx
, class,
1893 max_needs
, NULL_PTR
,
1897 /* We have found one that will complete a group,
1898 so count off one group as provided. */
1899 max_groups
[class]--;
1900 p
= reg_class_superclasses
[class];
1901 while (*p
!= LIM_REG_CLASSES
)
1903 if (group_size
[(int) *p
]
1904 <= group_size
[class])
1905 max_groups
[(int) *p
]--;
1912 /* We couldn't find any registers for this reload.
1913 Avoid going into an infinite loop. */
1914 if (i
>= FIRST_PSEUDO_REGISTER
)
1916 /* There are no groups left. */
1917 spill_failure (max_groups_insn
[class]);
1924 /* Now similarly satisfy all need for single registers. */
1926 while (max_needs
[class] > 0 || max_nongroups
[class] > 0)
1928 /* If we spilled enough regs, but they weren't counted
1929 against the non-group need, see if we can count them now.
1930 If so, we can avoid some actual spilling. */
1931 if (max_needs
[class] <= 0 && max_nongroups
[class] > 0)
1932 for (i
= 0; i
< n_spills
; i
++)
1933 if (TEST_HARD_REG_BIT (reg_class_contents
[class],
1935 && !TEST_HARD_REG_BIT (counted_for_groups
,
1937 && !TEST_HARD_REG_BIT (counted_for_nongroups
,
1939 && max_nongroups
[class] > 0)
1941 register enum reg_class
*p
;
1943 SET_HARD_REG_BIT (counted_for_nongroups
, spill_regs
[i
]);
1944 max_nongroups
[class]--;
1945 p
= reg_class_superclasses
[class];
1946 while (*p
!= LIM_REG_CLASSES
)
1947 max_nongroups
[(int) *p
++]--;
1949 if (max_needs
[class] <= 0 && max_nongroups
[class] <= 0)
1952 /* Consider the potential reload regs that aren't
1953 yet in use as reload regs, in order of preference.
1954 Find the most preferred one that's in this class. */
1956 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1957 if (potential_reload_regs
[i
] >= 0
1958 && TEST_HARD_REG_BIT (reg_class_contents
[class],
1959 potential_reload_regs
[i
])
1960 /* If this reg will not be available for groups,
1961 pick one that does not foreclose possible groups.
1962 This is a kludge, and not very general,
1963 but it should be sufficient to make the 386 work,
1964 and the problem should not occur on machines with
1966 && (max_nongroups
[class] == 0
1967 || possible_group_p (potential_reload_regs
[i
], max_groups
)))
1970 /* If we couldn't get a register, try to get one even if we
1971 might foreclose possible groups. This may cause problems
1972 later, but that's better than aborting now, since it is
1973 possible that we will, in fact, be able to form the needed
1974 group even with this allocation. */
1976 if (i
>= FIRST_PSEUDO_REGISTER
1977 && (asm_noperands (max_needs
[class] > 0
1978 ? max_needs_insn
[class]
1979 : max_nongroups_insn
[class])
1981 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1982 if (potential_reload_regs
[i
] >= 0
1983 && TEST_HARD_REG_BIT (reg_class_contents
[class],
1984 potential_reload_regs
[i
]))
1987 /* I should be the index in potential_reload_regs
1988 of the new reload reg we have found. */
1990 if (i
>= FIRST_PSEUDO_REGISTER
)
1992 /* There are no possible registers left to spill. */
1993 spill_failure (max_needs
[class] > 0 ? max_needs_insn
[class]
1994 : max_nongroups_insn
[class]);
2000 |= new_spill_reg (i
, class, max_needs
, max_nongroups
,
2006 /* If global-alloc was run, notify it of any register eliminations we have
2009 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
2010 if (ep
->can_eliminate
)
2011 mark_elimination (ep
->from
, ep
->to
);
2013 /* Insert code to save and restore call-clobbered hard regs
2014 around calls. Tell if what mode to use so that we will process
2015 those insns in reload_as_needed if we have to. */
2017 if (caller_save_needed
)
2018 save_call_clobbered_regs (num_eliminable
? QImode
2019 : caller_save_spill_class
!= NO_REGS
? HImode
2022 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
2023 If that insn didn't set the register (i.e., it copied the register to
2024 memory), just delete that insn instead of the equivalencing insn plus
2025 anything now dead. If we call delete_dead_insn on that insn, we may
2026 delete the insn that actually sets the register if the register die
2027 there and that is incorrect. */
2029 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
2030 if (reg_renumber
[i
] < 0 && reg_equiv_init
[i
] != 0
2031 && GET_CODE (reg_equiv_init
[i
]) != NOTE
)
2033 if (reg_set_p (regno_reg_rtx
[i
], PATTERN (reg_equiv_init
[i
])))
2034 delete_dead_insn (reg_equiv_init
[i
]);
2037 PUT_CODE (reg_equiv_init
[i
], NOTE
);
2038 NOTE_SOURCE_FILE (reg_equiv_init
[i
]) = 0;
2039 NOTE_LINE_NUMBER (reg_equiv_init
[i
]) = NOTE_INSN_DELETED
;
2043 /* Use the reload registers where necessary
2044 by generating move instructions to move the must-be-register
2045 values into or out of the reload registers. */
2047 if (something_needs_reloads
|| something_needs_elimination
2048 || (caller_save_needed
&& num_eliminable
)
2049 || caller_save_spill_class
!= NO_REGS
)
2050 reload_as_needed (first
, global
);
2052 /* If we were able to eliminate the frame pointer, show that it is no
2053 longer live at the start of any basic block. If it ls live by
2054 virtue of being in a pseudo, that pseudo will be marked live
2055 and hence the frame pointer will be known to be live via that
2058 if (! frame_pointer_needed
)
2059 for (i
= 0; i
< n_basic_blocks
; i
++)
2060 CLEAR_REGNO_REG_SET (basic_block_live_at_start
[i
],
2061 HARD_FRAME_POINTER_REGNUM
);
2063 /* Come here (with failure set nonzero) if we can't get enough spill regs
2064 and we decide not to abort about it. */
2067 reload_in_progress
= 0;
2069 /* Now eliminate all pseudo regs by modifying them into
2070 their equivalent memory references.
2071 The REG-rtx's for the pseudos are modified in place,
2072 so all insns that used to refer to them now refer to memory.
2074 For a reg that has a reg_equiv_address, all those insns
2075 were changed by reloading so that no insns refer to it any longer;
2076 but the DECL_RTL of a variable decl may refer to it,
2077 and if so this causes the debugging info to mention the variable. */
2079 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
2083 if (reg_equiv_mem
[i
])
2085 addr
= XEXP (reg_equiv_mem
[i
], 0);
2086 in_struct
= MEM_IN_STRUCT_P (reg_equiv_mem
[i
]);
2088 if (reg_equiv_address
[i
])
2089 addr
= reg_equiv_address
[i
];
2092 if (reg_renumber
[i
] < 0)
2094 rtx reg
= regno_reg_rtx
[i
];
2095 XEXP (reg
, 0) = addr
;
2096 REG_USERVAR_P (reg
) = 0;
2097 MEM_IN_STRUCT_P (reg
) = in_struct
;
2098 /* We have no alias information about this newly created
2100 MEM_ALIAS_SET (reg
) = 0;
2101 PUT_CODE (reg
, MEM
);
2103 else if (reg_equiv_mem
[i
])
2104 XEXP (reg_equiv_mem
[i
], 0) = addr
;
2108 /* Make a pass over all the insns and delete all USEs which we inserted
2109 only to tag a REG_EQUAL note on them; if PRESERVE_DEATH_INFO_REGNO_P
2110 is defined, also remove death notes for things that are no longer
2111 registers or no longer die in the insn (e.g., an input and output
2112 pseudo being tied). */
2114 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2115 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
2117 #ifdef PRESERVE_DEATH_INFO_REGNO_P
2121 if (GET_CODE (PATTERN (insn
)) == USE
2122 && find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
2124 PUT_CODE (insn
, NOTE
);
2125 NOTE_SOURCE_FILE (insn
) = 0;
2126 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2129 #ifdef PRESERVE_DEATH_INFO_REGNO_P
2130 for (note
= REG_NOTES (insn
); note
; note
= next
)
2132 next
= XEXP (note
, 1);
2133 if (REG_NOTE_KIND (note
) == REG_DEAD
2134 && (GET_CODE (XEXP (note
, 0)) != REG
2135 || reg_set_p (XEXP (note
, 0), PATTERN (insn
))))
2136 remove_note (insn
, note
);
2141 /* If we are doing stack checking, give a warning if this function's
2142 frame size is larger than we expect. */
2143 if (flag_stack_check
&& ! STACK_CHECK_BUILTIN
)
2145 HOST_WIDE_INT size
= get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE
;
2147 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2148 if (regs_ever_live
[i
] && ! fixed_regs
[i
] && call_used_regs
[i
])
2149 size
+= UNITS_PER_WORD
;
2151 if (size
> STACK_CHECK_MAX_FRAME_SIZE
)
2152 warning ("frame size too large for reliable stack checking");
2155 /* Indicate that we no longer have known memory locations or constants. */
2156 reg_equiv_constant
= 0;
2157 reg_equiv_memory_loc
= 0;
2160 free (real_known_ptr
);
2165 free (scratch_list
);
2168 free (scratch_block
);
2171 CLEAR_HARD_REG_SET (used_spill_regs
);
2172 for (i
= 0; i
< n_spills
; i
++)
2173 SET_HARD_REG_BIT (used_spill_regs
, spill_regs
[i
]);
2178 /* Nonzero if, after spilling reg REGNO for non-groups,
2179 it will still be possible to find a group if we still need one. */
2182 possible_group_p (regno
, max_groups
)
2187 int class = (int) NO_REGS
;
2189 for (i
= 0; i
< (int) N_REG_CLASSES
; i
++)
2190 if (max_groups
[i
] > 0)
2196 if (class == (int) NO_REGS
)
2199 /* Consider each pair of consecutive registers. */
2200 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
- 1; i
++)
2202 /* Ignore pairs that include reg REGNO. */
2203 if (i
== regno
|| i
+ 1 == regno
)
2206 /* Ignore pairs that are outside the class that needs the group.
2207 ??? Here we fail to handle the case where two different classes
2208 independently need groups. But this never happens with our
2209 current machine descriptions. */
2210 if (! (TEST_HARD_REG_BIT (reg_class_contents
[class], i
)
2211 && TEST_HARD_REG_BIT (reg_class_contents
[class], i
+ 1)))
2214 /* A pair of consecutive regs we can still spill does the trick. */
2215 if (spill_reg_order
[i
] < 0 && spill_reg_order
[i
+ 1] < 0
2216 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
)
2217 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
+ 1))
2220 /* A pair of one already spilled and one we can spill does it
2221 provided the one already spilled is not otherwise reserved. */
2222 if (spill_reg_order
[i
] < 0
2223 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
)
2224 && spill_reg_order
[i
+ 1] >= 0
2225 && ! TEST_HARD_REG_BIT (counted_for_groups
, i
+ 1)
2226 && ! TEST_HARD_REG_BIT (counted_for_nongroups
, i
+ 1))
2228 if (spill_reg_order
[i
+ 1] < 0
2229 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
+ 1)
2230 && spill_reg_order
[i
] >= 0
2231 && ! TEST_HARD_REG_BIT (counted_for_groups
, i
)
2232 && ! TEST_HARD_REG_BIT (counted_for_nongroups
, i
))
2239 /* Count any groups of CLASS that can be formed from the registers recently
2243 count_possible_groups (group_size
, group_mode
, max_groups
, class)
2245 enum machine_mode
*group_mode
;
2252 /* Now find all consecutive groups of spilled registers
2253 and mark each group off against the need for such groups.
2254 But don't count them against ordinary need, yet. */
2256 if (group_size
[class] == 0)
2259 CLEAR_HARD_REG_SET (new);
2261 /* Make a mask of all the regs that are spill regs in class I. */
2262 for (i
= 0; i
< n_spills
; i
++)
2263 if (TEST_HARD_REG_BIT (reg_class_contents
[class], spill_regs
[i
])
2264 && ! TEST_HARD_REG_BIT (counted_for_groups
, spill_regs
[i
])
2265 && ! TEST_HARD_REG_BIT (counted_for_nongroups
, spill_regs
[i
]))
2266 SET_HARD_REG_BIT (new, spill_regs
[i
]);
2268 /* Find each consecutive group of them. */
2269 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
&& max_groups
[class] > 0; i
++)
2270 if (TEST_HARD_REG_BIT (new, i
)
2271 && i
+ group_size
[class] <= FIRST_PSEUDO_REGISTER
2272 && HARD_REGNO_MODE_OK (i
, group_mode
[class]))
2274 for (j
= 1; j
< group_size
[class]; j
++)
2275 if (! TEST_HARD_REG_BIT (new, i
+ j
))
2278 if (j
== group_size
[class])
2280 /* We found a group. Mark it off against this class's need for
2281 groups, and against each superclass too. */
2282 register enum reg_class
*p
;
2284 max_groups
[class]--;
2285 p
= reg_class_superclasses
[class];
2286 while (*p
!= LIM_REG_CLASSES
)
2288 if (group_size
[(int) *p
] <= group_size
[class])
2289 max_groups
[(int) *p
]--;
2293 /* Don't count these registers again. */
2294 for (j
= 0; j
< group_size
[class]; j
++)
2295 SET_HARD_REG_BIT (counted_for_groups
, i
+ j
);
2298 /* Skip to the last reg in this group. When i is incremented above,
2299 it will then point to the first reg of the next possible group. */
2304 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2305 another mode that needs to be reloaded for the same register class CLASS.
2306 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2307 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2309 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2310 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2311 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2312 causes unnecessary failures on machines requiring alignment of register
2313 groups when the two modes are different sizes, because the larger mode has
2314 more strict alignment rules than the smaller mode. */
2317 modes_equiv_for_class_p (allocate_mode
, other_mode
, class)
2318 enum machine_mode allocate_mode
, other_mode
;
2319 enum reg_class
class;
2322 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
2324 if (TEST_HARD_REG_BIT (reg_class_contents
[(int) class], regno
)
2325 && HARD_REGNO_MODE_OK (regno
, allocate_mode
)
2326 && ! HARD_REGNO_MODE_OK (regno
, other_mode
))
2332 /* Handle the failure to find a register to spill.
2333 INSN should be one of the insns which needed this particular spill reg. */
2336 spill_failure (insn
)
2339 if (asm_noperands (PATTERN (insn
)) >= 0)
2340 error_for_asm (insn
, "`asm' needs too many reloads");
2342 fatal_insn ("Unable to find a register to spill.", insn
);
2345 /* Add a new register to the tables of available spill-registers
2346 (as well as spilling all pseudos allocated to the register).
2347 I is the index of this register in potential_reload_regs.
2348 CLASS is the regclass whose need is being satisfied.
2349 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2350 so that this register can count off against them.
2351 MAX_NONGROUPS is 0 if this register is part of a group.
2352 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2355 new_spill_reg (i
, class, max_needs
, max_nongroups
, global
, dumpfile
)
2363 register enum reg_class
*p
;
2365 int regno
= potential_reload_regs
[i
];
2367 if (i
>= FIRST_PSEUDO_REGISTER
)
2368 abort (); /* Caller failed to find any register. */
2370 if (fixed_regs
[regno
] || TEST_HARD_REG_BIT (forbidden_regs
, regno
))
2372 static char *reg_class_names
[] = REG_CLASS_NAMES
;
2373 fatal ("fixed or forbidden register %d (%s) was spilled for class %s.\n\
2374 This may be due to a compiler bug or to impossible asm\n\
2375 statements or clauses.", regno
, reg_names
[regno
], reg_class_names
[class]);
2378 /* Make reg REGNO an additional reload reg. */
2380 potential_reload_regs
[i
] = -1;
2381 spill_regs
[n_spills
] = regno
;
2382 spill_reg_order
[regno
] = n_spills
;
2384 fprintf (dumpfile
, "Spilling reg %d.\n", spill_regs
[n_spills
]);
2386 /* Clear off the needs we just satisfied. */
2389 p
= reg_class_superclasses
[class];
2390 while (*p
!= LIM_REG_CLASSES
)
2391 max_needs
[(int) *p
++]--;
2393 if (max_nongroups
&& max_nongroups
[class] > 0)
2395 SET_HARD_REG_BIT (counted_for_nongroups
, regno
);
2396 max_nongroups
[class]--;
2397 p
= reg_class_superclasses
[class];
2398 while (*p
!= LIM_REG_CLASSES
)
2399 max_nongroups
[(int) *p
++]--;
2402 /* Spill every pseudo reg that was allocated to this reg
2403 or to something that overlaps this reg. */
2405 val
= spill_hard_reg (spill_regs
[n_spills
], global
, dumpfile
, 0);
2407 /* If there are some registers still to eliminate and this register
2408 wasn't ever used before, additional stack space may have to be
2409 allocated to store this register. Thus, we may have changed the offset
2410 between the stack and frame pointers, so mark that something has changed.
2411 (If new pseudos were spilled, thus requiring more space, VAL would have
2412 been set non-zero by the call to spill_hard_reg above since additional
2413 reloads may be needed in that case.
2415 One might think that we need only set VAL to 1 if this is a call-used
2416 register. However, the set of registers that must be saved by the
2417 prologue is not identical to the call-used set. For example, the
2418 register used by the call insn for the return PC is a call-used register,
2419 but must be saved by the prologue. */
2420 if (num_eliminable
&& ! regs_ever_live
[spill_regs
[n_spills
]])
2423 regs_ever_live
[spill_regs
[n_spills
]] = 1;
2429 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2430 data that is dead in INSN. */
2433 delete_dead_insn (insn
)
2436 rtx prev
= prev_real_insn (insn
);
2439 /* If the previous insn sets a register that dies in our insn, delete it
2441 if (prev
&& GET_CODE (PATTERN (prev
)) == SET
2442 && (prev_dest
= SET_DEST (PATTERN (prev
)), GET_CODE (prev_dest
) == REG
)
2443 && reg_mentioned_p (prev_dest
, PATTERN (insn
))
2444 && find_regno_note (insn
, REG_DEAD
, REGNO (prev_dest
)))
2445 delete_dead_insn (prev
);
2447 PUT_CODE (insn
, NOTE
);
2448 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2449 NOTE_SOURCE_FILE (insn
) = 0;
2452 /* Modify the home of pseudo-reg I.
2453 The new home is present in reg_renumber[I].
2455 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2456 or it may be -1, meaning there is none or it is not relevant.
2457 This is used so that all pseudos spilled from a given hard reg
2458 can share one stack slot. */
2461 alter_reg (i
, from_reg
)
2465 /* When outputting an inline function, this can happen
2466 for a reg that isn't actually used. */
2467 if (regno_reg_rtx
[i
] == 0)
2470 /* If the reg got changed to a MEM at rtl-generation time,
2472 if (GET_CODE (regno_reg_rtx
[i
]) != REG
)
2475 /* Modify the reg-rtx to contain the new hard reg
2476 number or else to contain its pseudo reg number. */
2477 REGNO (regno_reg_rtx
[i
])
2478 = reg_renumber
[i
] >= 0 ? reg_renumber
[i
] : i
;
2480 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2481 allocate a stack slot for it. */
2483 if (reg_renumber
[i
] < 0
2484 && REG_N_REFS (i
) > 0
2485 && reg_equiv_constant
[i
] == 0
2486 && reg_equiv_memory_loc
[i
] == 0)
2489 int inherent_size
= PSEUDO_REGNO_BYTES (i
);
2490 int total_size
= MAX (inherent_size
, reg_max_ref_width
[i
]);
2493 /* Each pseudo reg has an inherent size which comes from its own mode,
2494 and a total size which provides room for paradoxical subregs
2495 which refer to the pseudo reg in wider modes.
2497 We can use a slot already allocated if it provides both
2498 enough inherent space and enough total space.
2499 Otherwise, we allocate a new slot, making sure that it has no less
2500 inherent space, and no less total space, then the previous slot. */
2503 /* No known place to spill from => no slot to reuse. */
2504 x
= assign_stack_local (GET_MODE (regno_reg_rtx
[i
]), total_size
,
2505 inherent_size
== total_size
? 0 : -1);
2506 if (BYTES_BIG_ENDIAN
)
2507 /* Cancel the big-endian correction done in assign_stack_local.
2508 Get the address of the beginning of the slot.
2509 This is so we can do a big-endian correction unconditionally
2511 adjust
= inherent_size
- total_size
;
2513 RTX_UNCHANGING_P (x
) = RTX_UNCHANGING_P (regno_reg_rtx
[i
]);
2515 /* Reuse a stack slot if possible. */
2516 else if (spill_stack_slot
[from_reg
] != 0
2517 && spill_stack_slot_width
[from_reg
] >= total_size
2518 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot
[from_reg
]))
2520 x
= spill_stack_slot
[from_reg
];
2521 /* Allocate a bigger slot. */
2524 /* Compute maximum size needed, both for inherent size
2525 and for total size. */
2526 enum machine_mode mode
= GET_MODE (regno_reg_rtx
[i
]);
2528 if (spill_stack_slot
[from_reg
])
2530 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot
[from_reg
]))
2532 mode
= GET_MODE (spill_stack_slot
[from_reg
]);
2533 if (spill_stack_slot_width
[from_reg
] > total_size
)
2534 total_size
= spill_stack_slot_width
[from_reg
];
2536 /* Make a slot with that size. */
2537 x
= assign_stack_local (mode
, total_size
,
2538 inherent_size
== total_size
? 0 : -1);
2540 if (BYTES_BIG_ENDIAN
)
2542 /* Cancel the big-endian correction done in assign_stack_local.
2543 Get the address of the beginning of the slot.
2544 This is so we can do a big-endian correction unconditionally
2546 adjust
= GET_MODE_SIZE (mode
) - total_size
;
2548 stack_slot
= gen_rtx_MEM (mode_for_size (total_size
2551 plus_constant (XEXP (x
, 0), adjust
));
2553 spill_stack_slot
[from_reg
] = stack_slot
;
2554 spill_stack_slot_width
[from_reg
] = total_size
;
2557 /* On a big endian machine, the "address" of the slot
2558 is the address of the low part that fits its inherent mode. */
2559 if (BYTES_BIG_ENDIAN
&& inherent_size
< total_size
)
2560 adjust
+= (total_size
- inherent_size
);
2562 /* If we have any adjustment to make, or if the stack slot is the
2563 wrong mode, make a new stack slot. */
2564 if (adjust
!= 0 || GET_MODE (x
) != GET_MODE (regno_reg_rtx
[i
]))
2566 x
= gen_rtx_MEM (GET_MODE (regno_reg_rtx
[i
]),
2567 plus_constant (XEXP (x
, 0), adjust
));
2568 RTX_UNCHANGING_P (x
) = RTX_UNCHANGING_P (regno_reg_rtx
[i
]);
2571 /* Save the stack slot for later. */
2572 reg_equiv_memory_loc
[i
] = x
;
2576 /* Mark the slots in regs_ever_live for the hard regs
2577 used by pseudo-reg number REGNO. */
2580 mark_home_live (regno
)
2583 register int i
, lim
;
2584 i
= reg_renumber
[regno
];
2587 lim
= i
+ HARD_REGNO_NREGS (i
, PSEUDO_REGNO_MODE (regno
));
2589 regs_ever_live
[i
++] = 1;
2592 /* Mark the registers used in SCRATCH as being live. */
2595 mark_scratch_live (scratch
)
2599 int regno
= REGNO (scratch
);
2600 int lim
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (scratch
));
2602 for (i
= regno
; i
< lim
; i
++)
2603 regs_ever_live
[i
] = 1;
2606 /* This function handles the tracking of elimination offsets around branches.
2608 X is a piece of RTL being scanned.
2610 INSN is the insn that it came from, if any.
2612 INITIAL_P is non-zero if we are to set the offset to be the initial
2613 offset and zero if we are setting the offset of the label to be the
2617 set_label_offsets (x
, insn
, initial_p
)
2622 enum rtx_code code
= GET_CODE (x
);
2625 struct elim_table
*p
;
2630 if (LABEL_REF_NONLOCAL_P (x
))
2635 /* ... fall through ... */
2638 /* If we know nothing about this label, set the desired offsets. Note
2639 that this sets the offset at a label to be the offset before a label
2640 if we don't know anything about the label. This is not correct for
2641 the label after a BARRIER, but is the best guess we can make. If
2642 we guessed wrong, we will suppress an elimination that might have
2643 been possible had we been able to guess correctly. */
2645 if (! offsets_known_at
[CODE_LABEL_NUMBER (x
)])
2647 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2648 offsets_at
[CODE_LABEL_NUMBER (x
)][i
]
2649 = (initial_p
? reg_eliminate
[i
].initial_offset
2650 : reg_eliminate
[i
].offset
);
2651 offsets_known_at
[CODE_LABEL_NUMBER (x
)] = 1;
2654 /* Otherwise, if this is the definition of a label and it is
2655 preceded by a BARRIER, set our offsets to the known offset of
2659 && (tem
= prev_nonnote_insn (insn
)) != 0
2660 && GET_CODE (tem
) == BARRIER
)
2662 num_not_at_initial_offset
= 0;
2663 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2665 reg_eliminate
[i
].offset
= reg_eliminate
[i
].previous_offset
2666 = offsets_at
[CODE_LABEL_NUMBER (x
)][i
];
2667 if (reg_eliminate
[i
].can_eliminate
2668 && (reg_eliminate
[i
].offset
2669 != reg_eliminate
[i
].initial_offset
))
2670 num_not_at_initial_offset
++;
2675 /* If neither of the above cases is true, compare each offset
2676 with those previously recorded and suppress any eliminations
2677 where the offsets disagree. */
2679 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2680 if (offsets_at
[CODE_LABEL_NUMBER (x
)][i
]
2681 != (initial_p
? reg_eliminate
[i
].initial_offset
2682 : reg_eliminate
[i
].offset
))
2683 reg_eliminate
[i
].can_eliminate
= 0;
2688 set_label_offsets (PATTERN (insn
), insn
, initial_p
);
2690 /* ... fall through ... */
2694 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2695 and hence must have all eliminations at their initial offsets. */
2696 for (tem
= REG_NOTES (x
); tem
; tem
= XEXP (tem
, 1))
2697 if (REG_NOTE_KIND (tem
) == REG_LABEL
)
2698 set_label_offsets (XEXP (tem
, 0), insn
, 1);
2703 /* Each of the labels in the address vector must be at their initial
2704 offsets. We want the first field for ADDR_VEC and the second
2705 field for ADDR_DIFF_VEC. */
2707 for (i
= 0; i
< XVECLEN (x
, code
== ADDR_DIFF_VEC
); i
++)
2708 set_label_offsets (XVECEXP (x
, code
== ADDR_DIFF_VEC
, i
),
2713 /* We only care about setting PC. If the source is not RETURN,
2714 IF_THEN_ELSE, or a label, disable any eliminations not at
2715 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2716 isn't one of those possibilities. For branches to a label,
2717 call ourselves recursively.
2719 Note that this can disable elimination unnecessarily when we have
2720 a non-local goto since it will look like a non-constant jump to
2721 someplace in the current function. This isn't a significant
2722 problem since such jumps will normally be when all elimination
2723 pairs are back to their initial offsets. */
2725 if (SET_DEST (x
) != pc_rtx
)
2728 switch (GET_CODE (SET_SRC (x
)))
2735 set_label_offsets (XEXP (SET_SRC (x
), 0), insn
, initial_p
);
2739 tem
= XEXP (SET_SRC (x
), 1);
2740 if (GET_CODE (tem
) == LABEL_REF
)
2741 set_label_offsets (XEXP (tem
, 0), insn
, initial_p
);
2742 else if (GET_CODE (tem
) != PC
&& GET_CODE (tem
) != RETURN
)
2745 tem
= XEXP (SET_SRC (x
), 2);
2746 if (GET_CODE (tem
) == LABEL_REF
)
2747 set_label_offsets (XEXP (tem
, 0), insn
, initial_p
);
2748 else if (GET_CODE (tem
) != PC
&& GET_CODE (tem
) != RETURN
)
2756 /* If we reach here, all eliminations must be at their initial
2757 offset because we are doing a jump to a variable address. */
2758 for (p
= reg_eliminate
; p
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; p
++)
2759 if (p
->offset
!= p
->initial_offset
)
2760 p
->can_eliminate
= 0;
2768 /* Used for communication between the next two function to properly share
2769 the vector for an ASM_OPERANDS. */
2771 static struct rtvec_def
*old_asm_operands_vec
, *new_asm_operands_vec
;
2773 /* Scan X and replace any eliminable registers (such as fp) with a
2774 replacement (such as sp), plus an offset.
2776 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2777 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2778 MEM, we are allowed to replace a sum of a register and the constant zero
2779 with the register, which we cannot do outside a MEM. In addition, we need
2780 to record the fact that a register is referenced outside a MEM.
2782 If INSN is an insn, it is the insn containing X. If we replace a REG
2783 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2784 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2785 the REG is being modified.
2787 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2788 That's used when we eliminate in expressions stored in notes.
2789 This means, do not set ref_outside_mem even if the reference
2792 If we see a modification to a register we know about, take the
2793 appropriate action (see case SET, below).
2795 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2796 replacements done assuming all offsets are at their initial values. If
2797 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2798 encounter, return the actual location so that find_reloads will do
2799 the proper thing. */
2802 eliminate_regs (x
, mem_mode
, insn
)
2804 enum machine_mode mem_mode
;
2807 enum rtx_code code
= GET_CODE (x
);
2808 struct elim_table
*ep
;
2831 /* This is only for the benefit of the debugging backends, which call
2832 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2833 removed after CSE. */
2834 new = eliminate_regs (XEXP (x
, 0), 0, insn
);
2835 if (GET_CODE (new) == MEM
)
2836 return XEXP (new, 0);
2842 /* First handle the case where we encounter a bare register that
2843 is eliminable. Replace it with a PLUS. */
2844 if (regno
< FIRST_PSEUDO_REGISTER
)
2846 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2848 if (ep
->from_rtx
== x
&& ep
->can_eliminate
)
2851 /* Refs inside notes don't count for this purpose. */
2852 && ! (insn
!= 0 && (GET_CODE (insn
) == EXPR_LIST
2853 || GET_CODE (insn
) == INSN_LIST
)))
2854 ep
->ref_outside_mem
= 1;
2855 return plus_constant (ep
->to_rtx
, ep
->previous_offset
);
2859 else if (reg_equiv_memory_loc
&& reg_equiv_memory_loc
[regno
]
2860 && (reg_equiv_address
[regno
] || num_not_at_initial_offset
))
2862 /* In this case, find_reloads would attempt to either use an
2863 incorrect address (if something is not at its initial offset)
2864 or substitute an replaced address into an insn (which loses
2865 if the offset is changed by some later action). So we simply
2866 return the replaced stack slot (assuming it is changed by
2867 elimination) and ignore the fact that this is actually a
2868 reference to the pseudo. Ensure we make a copy of the
2869 address in case it is shared. */
2870 new = eliminate_regs (reg_equiv_memory_loc
[regno
], mem_mode
, insn
);
2871 if (new != reg_equiv_memory_loc
[regno
])
2873 if (insn
!= 0 && GET_CODE (insn
) != EXPR_LIST
2874 && GET_CODE (insn
) != INSN_LIST
)
2875 REG_NOTES (emit_insn_before (gen_rtx_USE (VOIDmode
, x
), insn
))
2876 = gen_rtx_EXPR_LIST (REG_EQUAL
, new, NULL_RTX
);
2877 return copy_rtx (new);
2883 /* If this is the sum of an eliminable register and a constant, rework
2885 if (GET_CODE (XEXP (x
, 0)) == REG
2886 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2887 && CONSTANT_P (XEXP (x
, 1)))
2889 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2891 if (ep
->from_rtx
== XEXP (x
, 0) && ep
->can_eliminate
)
2894 /* Refs inside notes don't count for this purpose. */
2895 && ! (insn
!= 0 && (GET_CODE (insn
) == EXPR_LIST
2896 || GET_CODE (insn
) == INSN_LIST
)))
2897 ep
->ref_outside_mem
= 1;
2899 /* The only time we want to replace a PLUS with a REG (this
2900 occurs when the constant operand of the PLUS is the negative
2901 of the offset) is when we are inside a MEM. We won't want
2902 to do so at other times because that would change the
2903 structure of the insn in a way that reload can't handle.
2904 We special-case the commonest situation in
2905 eliminate_regs_in_insn, so just replace a PLUS with a
2906 PLUS here, unless inside a MEM. */
2907 if (mem_mode
!= 0 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2908 && INTVAL (XEXP (x
, 1)) == - ep
->previous_offset
)
2911 return gen_rtx_PLUS (Pmode
, ep
->to_rtx
,
2912 plus_constant (XEXP (x
, 1),
2913 ep
->previous_offset
));
2916 /* If the register is not eliminable, we are done since the other
2917 operand is a constant. */
2921 /* If this is part of an address, we want to bring any constant to the
2922 outermost PLUS. We will do this by doing register replacement in
2923 our operands and seeing if a constant shows up in one of them.
2925 We assume here this is part of an address (or a "load address" insn)
2926 since an eliminable register is not likely to appear in any other
2929 If we have (plus (eliminable) (reg)), we want to produce
2930 (plus (plus (replacement) (reg) (const))). If this was part of a
2931 normal add insn, (plus (replacement) (reg)) will be pushed as a
2932 reload. This is the desired action. */
2935 rtx new0
= eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2936 rtx new1
= eliminate_regs (XEXP (x
, 1), mem_mode
, insn
);
2938 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
2940 /* If one side is a PLUS and the other side is a pseudo that
2941 didn't get a hard register but has a reg_equiv_constant,
2942 we must replace the constant here since it may no longer
2943 be in the position of any operand. */
2944 if (GET_CODE (new0
) == PLUS
&& GET_CODE (new1
) == REG
2945 && REGNO (new1
) >= FIRST_PSEUDO_REGISTER
2946 && reg_renumber
[REGNO (new1
)] < 0
2947 && reg_equiv_constant
!= 0
2948 && reg_equiv_constant
[REGNO (new1
)] != 0)
2949 new1
= reg_equiv_constant
[REGNO (new1
)];
2950 else if (GET_CODE (new1
) == PLUS
&& GET_CODE (new0
) == REG
2951 && REGNO (new0
) >= FIRST_PSEUDO_REGISTER
2952 && reg_renumber
[REGNO (new0
)] < 0
2953 && reg_equiv_constant
[REGNO (new0
)] != 0)
2954 new0
= reg_equiv_constant
[REGNO (new0
)];
2956 new = form_sum (new0
, new1
);
2958 /* As above, if we are not inside a MEM we do not want to
2959 turn a PLUS into something else. We might try to do so here
2960 for an addition of 0 if we aren't optimizing. */
2961 if (! mem_mode
&& GET_CODE (new) != PLUS
)
2962 return gen_rtx_PLUS (GET_MODE (x
), new, const0_rtx
);
2970 /* If this is the product of an eliminable register and a
2971 constant, apply the distribute law and move the constant out
2972 so that we have (plus (mult ..) ..). This is needed in order
2973 to keep load-address insns valid. This case is pathological.
2974 We ignore the possibility of overflow here. */
2975 if (GET_CODE (XEXP (x
, 0)) == REG
2976 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2977 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2978 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2980 if (ep
->from_rtx
== XEXP (x
, 0) && ep
->can_eliminate
)
2983 /* Refs inside notes don't count for this purpose. */
2984 && ! (insn
!= 0 && (GET_CODE (insn
) == EXPR_LIST
2985 || GET_CODE (insn
) == INSN_LIST
)))
2986 ep
->ref_outside_mem
= 1;
2989 plus_constant (gen_rtx_MULT (Pmode
, ep
->to_rtx
, XEXP (x
, 1)),
2990 ep
->previous_offset
* INTVAL (XEXP (x
, 1)));
2993 /* ... fall through ... */
2998 case DIV
: case UDIV
:
2999 case MOD
: case UMOD
:
3000 case AND
: case IOR
: case XOR
:
3001 case ROTATERT
: case ROTATE
:
3002 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
3004 case GE
: case GT
: case GEU
: case GTU
:
3005 case LE
: case LT
: case LEU
: case LTU
:
3007 rtx new0
= eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
3009 = XEXP (x
, 1) ? eliminate_regs (XEXP (x
, 1), mem_mode
, insn
) : 0;
3011 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
3012 return gen_rtx_fmt_ee (code
, GET_MODE (x
), new0
, new1
);
3017 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
3020 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
3021 if (new != XEXP (x
, 0))
3022 x
= gen_rtx_EXPR_LIST (REG_NOTE_KIND (x
), new, XEXP (x
, 1));
3025 /* ... fall through ... */
3028 /* Now do eliminations in the rest of the chain. If this was
3029 an EXPR_LIST, this might result in allocating more memory than is
3030 strictly needed, but it simplifies the code. */
3033 new = eliminate_regs (XEXP (x
, 1), mem_mode
, insn
);
3034 if (new != XEXP (x
, 1))
3035 return gen_rtx_fmt_ee (GET_CODE (x
), GET_MODE (x
), XEXP (x
, 0), new);
3043 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3044 if (ep
->to_rtx
== XEXP (x
, 0))
3046 int size
= GET_MODE_SIZE (mem_mode
);
3048 /* If more bytes than MEM_MODE are pushed, account for them. */
3049 #ifdef PUSH_ROUNDING
3050 if (ep
->to_rtx
== stack_pointer_rtx
)
3051 size
= PUSH_ROUNDING (size
);
3053 if (code
== PRE_DEC
|| code
== POST_DEC
)
3059 /* Fall through to generic unary operation case. */
3060 case STRICT_LOW_PART
:
3062 case SIGN_EXTEND
: case ZERO_EXTEND
:
3063 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
3064 case FLOAT
: case FIX
:
3065 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
3069 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
3070 if (new != XEXP (x
, 0))
3071 return gen_rtx_fmt_e (code
, GET_MODE (x
), new);
3075 /* Similar to above processing, but preserve SUBREG_WORD.
3076 Convert (subreg (mem)) to (mem) if not paradoxical.
3077 Also, if we have a non-paradoxical (subreg (pseudo)) and the
3078 pseudo didn't get a hard reg, we must replace this with the
3079 eliminated version of the memory location because push_reloads
3080 may do the replacement in certain circumstances. */
3081 if (GET_CODE (SUBREG_REG (x
)) == REG
3082 && (GET_MODE_SIZE (GET_MODE (x
))
3083 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
3084 && reg_equiv_memory_loc
!= 0
3085 && reg_equiv_memory_loc
[REGNO (SUBREG_REG (x
))] != 0)
3087 new = eliminate_regs (reg_equiv_memory_loc
[REGNO (SUBREG_REG (x
))],
3090 /* If we didn't change anything, we must retain the pseudo. */
3091 if (new == reg_equiv_memory_loc
[REGNO (SUBREG_REG (x
))])
3092 new = SUBREG_REG (x
);
3095 /* In this case, we must show that the pseudo is used in this
3096 insn so that delete_output_reload will do the right thing. */
3097 if (insn
!= 0 && GET_CODE (insn
) != EXPR_LIST
3098 && GET_CODE (insn
) != INSN_LIST
)
3099 REG_NOTES (emit_insn_before (gen_rtx_USE (VOIDmode
,
3102 = gen_rtx_EXPR_LIST (REG_EQUAL
, new, NULL_RTX
);
3104 /* Ensure NEW isn't shared in case we have to reload it. */
3105 new = copy_rtx (new);
3109 new = eliminate_regs (SUBREG_REG (x
), mem_mode
, insn
);
3111 if (new != XEXP (x
, 0))
3113 int x_size
= GET_MODE_SIZE (GET_MODE (x
));
3114 int new_size
= GET_MODE_SIZE (GET_MODE (new));
3116 if (GET_CODE (new) == MEM
3117 && ((x_size
< new_size
3118 #ifdef WORD_REGISTER_OPERATIONS
3119 /* On these machines, combine can create rtl of the form
3120 (set (subreg:m1 (reg:m2 R) 0) ...)
3121 where m1 < m2, and expects something interesting to
3122 happen to the entire word. Moreover, it will use the
3123 (reg:m2 R) later, expecting all bits to be preserved.
3124 So if the number of words is the same, preserve the
3125 subreg so that push_reloads can see it. */
3126 && ! ((x_size
-1)/UNITS_PER_WORD
== (new_size
-1)/UNITS_PER_WORD
)
3129 || (x_size
== new_size
))
3132 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
3133 enum machine_mode mode
= GET_MODE (x
);
3135 if (BYTES_BIG_ENDIAN
)
3136 offset
+= (MIN (UNITS_PER_WORD
,
3137 GET_MODE_SIZE (GET_MODE (new)))
3138 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
3140 PUT_MODE (new, mode
);
3141 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset
);
3145 return gen_rtx_SUBREG (GET_MODE (x
), new, SUBREG_WORD (x
));
3151 /* If using a register that is the source of an eliminate we still
3152 think can be performed, note it cannot be performed since we don't
3153 know how this register is used. */
3154 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3155 if (ep
->from_rtx
== XEXP (x
, 0))
3156 ep
->can_eliminate
= 0;
3158 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
3159 if (new != XEXP (x
, 0))
3160 return gen_rtx_fmt_e (code
, GET_MODE (x
), new);
3164 /* If clobbering a register that is the replacement register for an
3165 elimination we still think can be performed, note that it cannot
3166 be performed. Otherwise, we need not be concerned about it. */
3167 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3168 if (ep
->to_rtx
== XEXP (x
, 0))
3169 ep
->can_eliminate
= 0;
3171 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
3172 if (new != XEXP (x
, 0))
3173 return gen_rtx_fmt_e (code
, GET_MODE (x
), new);
3179 /* Properly handle sharing input and constraint vectors. */
3180 if (ASM_OPERANDS_INPUT_VEC (x
) != old_asm_operands_vec
)
3182 /* When we come to a new vector not seen before,
3183 scan all its elements; keep the old vector if none
3184 of them changes; otherwise, make a copy. */
3185 old_asm_operands_vec
= ASM_OPERANDS_INPUT_VEC (x
);
3186 temp_vec
= (rtx
*) alloca (XVECLEN (x
, 3) * sizeof (rtx
));
3187 for (i
= 0; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
3188 temp_vec
[i
] = eliminate_regs (ASM_OPERANDS_INPUT (x
, i
),
3191 for (i
= 0; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
3192 if (temp_vec
[i
] != ASM_OPERANDS_INPUT (x
, i
))
3195 if (i
== ASM_OPERANDS_INPUT_LENGTH (x
))
3196 new_asm_operands_vec
= old_asm_operands_vec
;
3198 new_asm_operands_vec
3199 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x
), temp_vec
);
3202 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3203 if (new_asm_operands_vec
== old_asm_operands_vec
)
3206 new = gen_rtx_ASM_OPERANDS (VOIDmode
, ASM_OPERANDS_TEMPLATE (x
),
3207 ASM_OPERANDS_OUTPUT_CONSTRAINT (x
),
3208 ASM_OPERANDS_OUTPUT_IDX (x
),
3209 new_asm_operands_vec
,
3210 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x
),
3211 ASM_OPERANDS_SOURCE_FILE (x
),
3212 ASM_OPERANDS_SOURCE_LINE (x
));
3213 new->volatil
= x
->volatil
;
3218 /* Check for setting a register that we know about. */
3219 if (GET_CODE (SET_DEST (x
)) == REG
)
3221 /* See if this is setting the replacement register for an
3224 If DEST is the hard frame pointer, we do nothing because we
3225 assume that all assignments to the frame pointer are for
3226 non-local gotos and are being done at a time when they are valid
3227 and do not disturb anything else. Some machines want to
3228 eliminate a fake argument pointer (or even a fake frame pointer)
3229 with either the real frame or the stack pointer. Assignments to
3230 the hard frame pointer must not prevent this elimination. */
3232 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3234 if (ep
->to_rtx
== SET_DEST (x
)
3235 && SET_DEST (x
) != hard_frame_pointer_rtx
)
3237 /* If it is being incremented, adjust the offset. Otherwise,
3238 this elimination can't be done. */
3239 rtx src
= SET_SRC (x
);
3241 if (GET_CODE (src
) == PLUS
3242 && XEXP (src
, 0) == SET_DEST (x
)
3243 && GET_CODE (XEXP (src
, 1)) == CONST_INT
)
3244 ep
->offset
-= INTVAL (XEXP (src
, 1));
3246 ep
->can_eliminate
= 0;
3249 /* Now check to see we are assigning to a register that can be
3250 eliminated. If so, it must be as part of a PARALLEL, since we
3251 will not have been called if this is a single SET. So indicate
3252 that we can no longer eliminate this reg. */
3253 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3255 if (ep
->from_rtx
== SET_DEST (x
) && ep
->can_eliminate
)
3256 ep
->can_eliminate
= 0;
3259 /* Now avoid the loop below in this common case. */
3261 rtx new0
= eliminate_regs (SET_DEST (x
), 0, insn
);
3262 rtx new1
= eliminate_regs (SET_SRC (x
), 0, insn
);
3264 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3265 write a CLOBBER insn. */
3266 if (GET_CODE (SET_DEST (x
)) == REG
&& GET_CODE (new0
) == MEM
3267 && insn
!= 0 && GET_CODE (insn
) != EXPR_LIST
3268 && GET_CODE (insn
) != INSN_LIST
)
3269 emit_insn_after (gen_rtx_CLOBBER (VOIDmode
, SET_DEST (x
)), insn
);
3271 if (new0
!= SET_DEST (x
) || new1
!= SET_SRC (x
))
3272 return gen_rtx_SET (VOIDmode
, new0
, new1
);
3278 /* This is only for the benefit of the debugging backends, which call
3279 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
3280 removed after CSE. */
3281 if (GET_CODE (XEXP (x
, 0)) == ADDRESSOF
)
3282 return eliminate_regs (XEXP (XEXP (x
, 0), 0), 0, insn
);
3284 /* Our only special processing is to pass the mode of the MEM to our
3285 recursive call and copy the flags. While we are here, handle this
3286 case more efficiently. */
3287 new = eliminate_regs (XEXP (x
, 0), GET_MODE (x
), insn
);
3288 if (new != XEXP (x
, 0))
3290 new = gen_rtx_MEM (GET_MODE (x
), new);
3291 new->volatil
= x
->volatil
;
3292 new->unchanging
= x
->unchanging
;
3293 new->in_struct
= x
->in_struct
;
3303 /* Process each of our operands recursively. If any have changed, make a
3305 fmt
= GET_RTX_FORMAT (code
);
3306 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3310 new = eliminate_regs (XEXP (x
, i
), mem_mode
, insn
);
3311 if (new != XEXP (x
, i
) && ! copied
)
3313 rtx new_x
= rtx_alloc (code
);
3314 bcopy ((char *) x
, (char *) new_x
,
3315 (sizeof (*new_x
) - sizeof (new_x
->fld
)
3316 + sizeof (new_x
->fld
[0]) * GET_RTX_LENGTH (code
)));
3322 else if (*fmt
== 'E')
3325 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3327 new = eliminate_regs (XVECEXP (x
, i
, j
), mem_mode
, insn
);
3328 if (new != XVECEXP (x
, i
, j
) && ! copied_vec
)
3330 rtvec new_v
= gen_rtvec_vv (XVECLEN (x
, i
),
3334 rtx new_x
= rtx_alloc (code
);
3335 bcopy ((char *) x
, (char *) new_x
,
3336 (sizeof (*new_x
) - sizeof (new_x
->fld
)
3337 + (sizeof (new_x
->fld
[0])
3338 * GET_RTX_LENGTH (code
))));
3342 XVEC (x
, i
) = new_v
;
3345 XVECEXP (x
, i
, j
) = new;
3353 /* Scan INSN and eliminate all eliminable registers in it.
3355 If REPLACE is nonzero, do the replacement destructively. Also
3356 delete the insn as dead it if it is setting an eliminable register.
3358 If REPLACE is zero, do all our allocations in reload_obstack.
3360 If no eliminations were done and this insn doesn't require any elimination
3361 processing (these are not identical conditions: it might be updating sp,
3362 but not referencing fp; this needs to be seen during reload_as_needed so
3363 that the offset between fp and sp can be taken into consideration), zero
3364 is returned. Otherwise, 1 is returned. */
3367 eliminate_regs_in_insn (insn
, replace
)
3371 rtx old_body
= PATTERN (insn
);
3372 rtx old_set
= single_set (insn
);
3375 struct elim_table
*ep
;
3378 push_obstacks (&reload_obstack
, &reload_obstack
);
3380 if (old_set
!= 0 && GET_CODE (SET_DEST (old_set
)) == REG
3381 && REGNO (SET_DEST (old_set
)) < FIRST_PSEUDO_REGISTER
)
3383 /* Check for setting an eliminable register. */
3384 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3385 if (ep
->from_rtx
== SET_DEST (old_set
) && ep
->can_eliminate
)
3387 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3388 /* If this is setting the frame pointer register to the
3389 hardware frame pointer register and this is an elimination
3390 that will be done (tested above), this insn is really
3391 adjusting the frame pointer downward to compensate for
3392 the adjustment done before a nonlocal goto. */
3393 if (ep
->from
== FRAME_POINTER_REGNUM
3394 && ep
->to
== HARD_FRAME_POINTER_REGNUM
)
3396 rtx src
= SET_SRC (old_set
);
3398 rtx prev_insn
, prev_set
;
3400 if (src
== ep
->to_rtx
)
3402 else if (GET_CODE (src
) == PLUS
3403 && GET_CODE (XEXP (src
, 0)) == CONST_INT
)
3404 offset
= INTVAL (XEXP (src
, 0)), ok
= 1;
3405 else if ((prev_insn
= prev_nonnote_insn (insn
)) != 0
3406 && (prev_set
= single_set (prev_insn
)) != 0
3407 && rtx_equal_p (SET_DEST (prev_set
), src
))
3409 src
= SET_SRC (prev_set
);
3410 if (src
== ep
->to_rtx
)
3412 else if (GET_CODE (src
) == PLUS
3413 && GET_CODE (XEXP (src
, 0)) == CONST_INT
3414 && XEXP (src
, 1) == ep
->to_rtx
)
3415 offset
= INTVAL (XEXP (src
, 0)), ok
= 1;
3416 else if (GET_CODE (src
) == PLUS
3417 && GET_CODE (XEXP (src
, 1)) == CONST_INT
3418 && XEXP (src
, 0) == ep
->to_rtx
)
3419 offset
= INTVAL (XEXP (src
, 1)), ok
= 1;
3427 = plus_constant (ep
->to_rtx
, offset
- ep
->offset
);
3429 /* First see if this insn remains valid when we
3430 make the change. If not, keep the INSN_CODE
3431 the same and let reload fit it up. */
3432 validate_change (insn
, &SET_SRC (old_set
), src
, 1);
3433 validate_change (insn
, &SET_DEST (old_set
),
3435 if (! apply_change_group ())
3437 SET_SRC (old_set
) = src
;
3438 SET_DEST (old_set
) = ep
->to_rtx
;
3448 /* In this case this insn isn't serving a useful purpose. We
3449 will delete it in reload_as_needed once we know that this
3450 elimination is, in fact, being done.
3452 If REPLACE isn't set, we can't delete this insn, but needn't
3453 process it since it won't be used unless something changes. */
3455 delete_dead_insn (insn
);
3460 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3461 in the insn is the negative of the offset in FROM. Substitute
3462 (set (reg) (reg to)) for the insn and change its code.
3464 We have to do this here, rather than in eliminate_regs, do that we can
3465 change the insn code. */
3467 if (GET_CODE (SET_SRC (old_set
)) == PLUS
3468 && GET_CODE (XEXP (SET_SRC (old_set
), 0)) == REG
3469 && GET_CODE (XEXP (SET_SRC (old_set
), 1)) == CONST_INT
)
3470 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3472 if (ep
->from_rtx
== XEXP (SET_SRC (old_set
), 0)
3473 && ep
->can_eliminate
)
3475 /* We must stop at the first elimination that will be used.
3476 If this one would replace the PLUS with a REG, do it
3477 now. Otherwise, quit the loop and let eliminate_regs
3478 do its normal replacement. */
3479 if (ep
->offset
== - INTVAL (XEXP (SET_SRC (old_set
), 1)))
3481 /* We assume here that we don't need a PARALLEL of
3482 any CLOBBERs for this assignment. There's not
3483 much we can do if we do need it. */
3484 PATTERN (insn
) = gen_rtx_SET (VOIDmode
,
3487 INSN_CODE (insn
) = -1;
3496 old_asm_operands_vec
= 0;
3498 /* Replace the body of this insn with a substituted form. If we changed
3499 something, return non-zero.
3501 If we are replacing a body that was a (set X (plus Y Z)), try to
3502 re-recognize the insn. We do this in case we had a simple addition
3503 but now can do this as a load-address. This saves an insn in this
3506 new_body
= eliminate_regs (old_body
, 0, replace
? insn
: NULL_RTX
);
3507 if (new_body
!= old_body
)
3509 /* If we aren't replacing things permanently and we changed something,
3510 make another copy to ensure that all the RTL is new. Otherwise
3511 things can go wrong if find_reload swaps commutative operands
3512 and one is inside RTL that has been copied while the other is not. */
3514 /* Don't copy an asm_operands because (1) there's no need and (2)
3515 copy_rtx can't do it properly when there are multiple outputs. */
3516 if (! replace
&& asm_noperands (old_body
) < 0)
3517 new_body
= copy_rtx (new_body
);
3519 /* If we had a move insn but now we don't, rerecognize it. This will
3520 cause spurious re-recognition if the old move had a PARALLEL since
3521 the new one still will, but we can't call single_set without
3522 having put NEW_BODY into the insn and the re-recognition won't
3523 hurt in this rare case. */
3525 && ((GET_CODE (SET_SRC (old_set
)) == REG
3526 && (GET_CODE (new_body
) != SET
3527 || GET_CODE (SET_SRC (new_body
)) != REG
))
3528 /* If this was a load from or store to memory, compare
3529 the MEM in recog_operand to the one in the insn. If they
3530 are not equal, then rerecognize the insn. */
3532 && ((GET_CODE (SET_SRC (old_set
)) == MEM
3533 && SET_SRC (old_set
) != recog_operand
[1])
3534 || (GET_CODE (SET_DEST (old_set
)) == MEM
3535 && SET_DEST (old_set
) != recog_operand
[0])))
3536 /* If this was an add insn before, rerecognize. */
3537 || GET_CODE (SET_SRC (old_set
)) == PLUS
))
3539 if (! validate_change (insn
, &PATTERN (insn
), new_body
, 0))
3540 /* If recognition fails, store the new body anyway.
3541 It's normal to have recognition failures here
3542 due to bizarre memory addresses; reloading will fix them. */
3543 PATTERN (insn
) = new_body
;
3546 PATTERN (insn
) = new_body
;
3551 /* Loop through all elimination pairs. See if any have changed and
3552 recalculate the number not at initial offset.
3554 Compute the maximum offset (minimum offset if the stack does not
3555 grow downward) for each elimination pair.
3557 We also detect a cases where register elimination cannot be done,
3558 namely, if a register would be both changed and referenced outside a MEM
3559 in the resulting insn since such an insn is often undefined and, even if
3560 not, we cannot know what meaning will be given to it. Note that it is
3561 valid to have a register used in an address in an insn that changes it
3562 (presumably with a pre- or post-increment or decrement).
3564 If anything changes, return nonzero. */
3566 num_not_at_initial_offset
= 0;
3567 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3569 if (ep
->previous_offset
!= ep
->offset
&& ep
->ref_outside_mem
)
3570 ep
->can_eliminate
= 0;
3572 ep
->ref_outside_mem
= 0;
3574 if (ep
->previous_offset
!= ep
->offset
)
3577 ep
->previous_offset
= ep
->offset
;
3578 if (ep
->can_eliminate
&& ep
->offset
!= ep
->initial_offset
)
3579 num_not_at_initial_offset
++;
3581 #ifdef STACK_GROWS_DOWNWARD
3582 ep
->max_offset
= MAX (ep
->max_offset
, ep
->offset
);
3584 ep
->max_offset
= MIN (ep
->max_offset
, ep
->offset
);
3589 /* If we changed something, perform elimination in REG_NOTES. This is
3590 needed even when REPLACE is zero because a REG_DEAD note might refer
3591 to a register that we eliminate and could cause a different number
3592 of spill registers to be needed in the final reload pass than in
3594 if (val
&& REG_NOTES (insn
) != 0)
3595 REG_NOTES (insn
) = eliminate_regs (REG_NOTES (insn
), 0, REG_NOTES (insn
));
3603 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3604 replacement we currently believe is valid, mark it as not eliminable if X
3605 modifies DEST in any way other than by adding a constant integer to it.
3607 If DEST is the frame pointer, we do nothing because we assume that
3608 all assignments to the hard frame pointer are nonlocal gotos and are being
3609 done at a time when they are valid and do not disturb anything else.
3610 Some machines want to eliminate a fake argument pointer with either the
3611 frame or stack pointer. Assignments to the hard frame pointer must not
3612 prevent this elimination.
3614 Called via note_stores from reload before starting its passes to scan
3615 the insns of the function. */
3618 mark_not_eliminable (dest
, x
)
3624 /* A SUBREG of a hard register here is just changing its mode. We should
3625 not see a SUBREG of an eliminable hard register, but check just in
3627 if (GET_CODE (dest
) == SUBREG
)
3628 dest
= SUBREG_REG (dest
);
3630 if (dest
== hard_frame_pointer_rtx
)
3633 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
3634 if (reg_eliminate
[i
].can_eliminate
&& dest
== reg_eliminate
[i
].to_rtx
3635 && (GET_CODE (x
) != SET
3636 || GET_CODE (SET_SRC (x
)) != PLUS
3637 || XEXP (SET_SRC (x
), 0) != dest
3638 || GET_CODE (XEXP (SET_SRC (x
), 1)) != CONST_INT
))
3640 reg_eliminate
[i
].can_eliminate_previous
3641 = reg_eliminate
[i
].can_eliminate
= 0;
3646 /* Kick all pseudos out of hard register REGNO.
3647 If GLOBAL is nonzero, try to find someplace else to put them.
3648 If DUMPFILE is nonzero, log actions taken on that file.
3650 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3651 because we found we can't eliminate some register. In the case, no pseudos
3652 are allowed to be in the register, even if they are only in a block that
3653 doesn't require spill registers, unlike the case when we are spilling this
3654 hard reg to produce another spill register.
3656 Return nonzero if any pseudos needed to be kicked out. */
3659 spill_hard_reg (regno
, global
, dumpfile
, cant_eliminate
)
3665 enum reg_class
class = REGNO_REG_CLASS (regno
);
3666 int something_changed
= 0;
3669 SET_HARD_REG_BIT (forbidden_regs
, regno
);
3672 regs_ever_live
[regno
] = 1;
3674 /* Spill every pseudo reg that was allocated to this reg
3675 or to something that overlaps this reg. */
3677 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
3678 if (reg_renumber
[i
] >= 0
3679 && reg_renumber
[i
] <= regno
3681 + HARD_REGNO_NREGS (reg_renumber
[i
],
3682 PSEUDO_REGNO_MODE (i
))
3685 /* If this register belongs solely to a basic block which needed no
3686 spilling of any class that this register is contained in,
3687 leave it be, unless we are spilling this register because
3688 it was a hard register that can't be eliminated. */
3690 if (! cant_eliminate
3691 && basic_block_needs
[0]
3692 && REG_BASIC_BLOCK (i
) >= 0
3693 && basic_block_needs
[(int) class][REG_BASIC_BLOCK (i
)] == 0)
3697 for (p
= reg_class_superclasses
[(int) class];
3698 *p
!= LIM_REG_CLASSES
; p
++)
3699 if (basic_block_needs
[(int) *p
][REG_BASIC_BLOCK (i
)] > 0)
3702 if (*p
== LIM_REG_CLASSES
)
3706 /* Mark it as no longer having a hard register home. */
3707 reg_renumber
[i
] = -1;
3708 /* We will need to scan everything again. */
3709 something_changed
= 1;
3711 retry_global_alloc (i
, forbidden_regs
);
3713 alter_reg (i
, regno
);
3716 if (reg_renumber
[i
] == -1)
3717 fprintf (dumpfile
, " Register %d now on stack.\n\n", i
);
3719 fprintf (dumpfile
, " Register %d now in %d.\n\n",
3720 i
, reg_renumber
[i
]);
3723 for (i
= 0; i
< scratch_list_length
; i
++)
3726 && regno
>= REGNO (scratch_list
[i
])
3727 && regno
< REGNO (scratch_list
[i
])
3728 + HARD_REGNO_NREGS (REGNO (scratch_list
[i
]),
3729 GET_MODE (scratch_list
[i
])))
3731 if (! cant_eliminate
&& basic_block_needs
[0]
3732 && ! basic_block_needs
[(int) class][scratch_block
[i
]])
3736 for (p
= reg_class_superclasses
[(int) class];
3737 *p
!= LIM_REG_CLASSES
; p
++)
3738 if (basic_block_needs
[(int) *p
][scratch_block
[i
]] > 0)
3741 if (*p
== LIM_REG_CLASSES
)
3744 PUT_CODE (scratch_list
[i
], SCRATCH
);
3745 scratch_list
[i
] = 0;
3746 something_changed
= 1;
3751 return something_changed
;
3754 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3755 Also mark any hard registers used to store user variables as
3756 forbidden from being used for spill registers. */
3759 scan_paradoxical_subregs (x
)
3764 register enum rtx_code code
= GET_CODE (x
);
3769 if (SMALL_REGISTER_CLASSES
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3770 && REG_USERVAR_P (x
))
3771 SET_HARD_REG_BIT (forbidden_regs
, REGNO (x
));
3786 if (GET_CODE (SUBREG_REG (x
)) == REG
3787 && GET_MODE_SIZE (GET_MODE (x
)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
3788 reg_max_ref_width
[REGNO (SUBREG_REG (x
))]
3789 = GET_MODE_SIZE (GET_MODE (x
));
3796 fmt
= GET_RTX_FORMAT (code
);
3797 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3800 scan_paradoxical_subregs (XEXP (x
, i
));
3801 else if (fmt
[i
] == 'E')
3804 for (j
= XVECLEN (x
, i
) - 1; j
>=0; j
--)
3805 scan_paradoxical_subregs (XVECEXP (x
, i
, j
));
3811 hard_reg_use_compare (p1p
, p2p
)
3812 const GENERIC_PTR p1p
;
3813 const GENERIC_PTR p2p
;
3815 struct hard_reg_n_uses
*p1
= (struct hard_reg_n_uses
*)p1p
,
3816 *p2
= (struct hard_reg_n_uses
*)p2p
;
3817 int tem
= p1
->uses
- p2
->uses
;
3818 if (tem
!= 0) return tem
;
3819 /* If regs are equally good, sort by regno,
3820 so that the results of qsort leave nothing to chance. */
3821 return p1
->regno
- p2
->regno
;
3824 /* Choose the order to consider regs for use as reload registers
3825 based on how much trouble would be caused by spilling one.
3826 Store them in order of decreasing preference in potential_reload_regs. */
3829 order_regs_for_reload (global
)
3836 struct hard_reg_n_uses hard_reg_n_uses
[FIRST_PSEUDO_REGISTER
];
3838 CLEAR_HARD_REG_SET (bad_spill_regs
);
3840 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3841 potential_reload_regs
[i
] = -1;
3843 /* Count number of uses of each hard reg by pseudo regs allocated to it
3844 and then order them by decreasing use. */
3846 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3848 hard_reg_n_uses
[i
].uses
= 0;
3849 hard_reg_n_uses
[i
].regno
= i
;
3852 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
3854 int regno
= reg_renumber
[i
];
3857 int lim
= regno
+ HARD_REGNO_NREGS (regno
, PSEUDO_REGNO_MODE (i
));
3860 /* If allocated by local-alloc, show more uses since
3861 we're not going to be able to reallocate it, but
3862 we might if allocated by global alloc. */
3863 if (global
&& reg_allocno
[i
] < 0)
3864 hard_reg_n_uses
[regno
].uses
+= (REG_N_REFS (i
) + 1) / 2;
3866 hard_reg_n_uses
[regno
++].uses
+= REG_N_REFS (i
);
3869 large
+= REG_N_REFS (i
);
3872 /* Now fixed registers (which cannot safely be used for reloading)
3873 get a very high use count so they will be considered least desirable.
3874 Registers used explicitly in the rtl code are almost as bad. */
3876 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3880 hard_reg_n_uses
[i
].uses
+= 2 * large
+ 2;
3881 SET_HARD_REG_BIT (bad_spill_regs
, i
);
3883 else if (regs_explicitly_used
[i
])
3885 hard_reg_n_uses
[i
].uses
+= large
+ 1;
3886 if (! SMALL_REGISTER_CLASSES
)
3887 /* ??? We are doing this here because of the potential
3888 that bad code may be generated if a register explicitly
3889 used in an insn was used as a spill register for that
3890 insn. But not using these are spill registers may lose
3891 on some machine. We'll have to see how this works out. */
3892 SET_HARD_REG_BIT (bad_spill_regs
, i
);
3895 hard_reg_n_uses
[HARD_FRAME_POINTER_REGNUM
].uses
+= 2 * large
+ 2;
3896 SET_HARD_REG_BIT (bad_spill_regs
, HARD_FRAME_POINTER_REGNUM
);
3898 #ifdef ELIMINABLE_REGS
3899 /* If registers other than the frame pointer are eliminable, mark them as
3901 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
3903 hard_reg_n_uses
[reg_eliminate
[i
].from
].uses
+= 2 * large
+ 2;
3904 SET_HARD_REG_BIT (bad_spill_regs
, reg_eliminate
[i
].from
);
3908 /* Prefer registers not so far used, for use in temporary loading.
3909 Among them, if REG_ALLOC_ORDER is defined, use that order.
3910 Otherwise, prefer registers not preserved by calls. */
3912 #ifdef REG_ALLOC_ORDER
3913 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3915 int regno
= reg_alloc_order
[i
];
3917 if (hard_reg_n_uses
[regno
].uses
== 0)
3918 potential_reload_regs
[o
++] = regno
;
3921 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3923 if (hard_reg_n_uses
[i
].uses
== 0 && call_used_regs
[i
])
3924 potential_reload_regs
[o
++] = i
;
3926 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3928 if (hard_reg_n_uses
[i
].uses
== 0 && ! call_used_regs
[i
])
3929 potential_reload_regs
[o
++] = i
;
3933 qsort (hard_reg_n_uses
, FIRST_PSEUDO_REGISTER
,
3934 sizeof hard_reg_n_uses
[0], hard_reg_use_compare
);
3936 /* Now add the regs that are already used,
3937 preferring those used less often. The fixed and otherwise forbidden
3938 registers will be at the end of this list. */
3940 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3941 if (hard_reg_n_uses
[i
].uses
!= 0)
3942 potential_reload_regs
[o
++] = hard_reg_n_uses
[i
].regno
;
3945 /* Used in reload_as_needed to sort the spilled regs. */
3948 compare_spill_regs (r1p
, r2p
)
3949 const GENERIC_PTR r1p
;
3950 const GENERIC_PTR r2p
;
3952 short r1
= *(short *)r1p
, r2
= *(short *)r2p
;
3956 /* Reload pseudo-registers into hard regs around each insn as needed.
3957 Additional register load insns are output before the insn that needs it
3958 and perhaps store insns after insns that modify the reloaded pseudo reg.
3960 reg_last_reload_reg and reg_reloaded_contents keep track of
3961 which registers are already available in reload registers.
3962 We update these for the reloads that we perform,
3963 as the insns are scanned. */
3966 reload_as_needed (first
, live_known
)
3976 bzero ((char *) spill_reg_rtx
, sizeof spill_reg_rtx
);
3977 bzero ((char *) spill_reg_store
, sizeof spill_reg_store
);
3978 reg_last_reload_reg
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
3979 bzero ((char *) reg_last_reload_reg
, max_regno
* sizeof (rtx
));
3980 reg_has_output_reload
= (char *) alloca (max_regno
);
3981 CLEAR_HARD_REG_SET (reg_reloaded_valid
);
3983 /* Reset all offsets on eliminable registers to their initial values. */
3984 #ifdef ELIMINABLE_REGS
3985 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
3987 INITIAL_ELIMINATION_OFFSET (reg_eliminate
[i
].from
, reg_eliminate
[i
].to
,
3988 reg_eliminate
[i
].initial_offset
);
3989 reg_eliminate
[i
].previous_offset
3990 = reg_eliminate
[i
].offset
= reg_eliminate
[i
].initial_offset
;
3993 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate
[0].initial_offset
);
3994 reg_eliminate
[0].previous_offset
3995 = reg_eliminate
[0].offset
= reg_eliminate
[0].initial_offset
;
3998 num_not_at_initial_offset
= 0;
4000 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
4001 pack registers with group needs. */
4004 qsort (spill_regs
, n_spills
, sizeof (short), compare_spill_regs
);
4005 for (i
= 0; i
< n_spills
; i
++)
4006 spill_reg_order
[spill_regs
[i
]] = i
;
4009 for (insn
= first
; insn
;)
4011 register rtx next
= NEXT_INSN (insn
);
4013 /* Notice when we move to a new basic block. */
4014 if (live_known
&& this_block
+ 1 < n_basic_blocks
4015 && insn
== basic_block_head
[this_block
+1])
4018 /* If we pass a label, copy the offsets from the label information
4019 into the current offsets of each elimination. */
4020 if (GET_CODE (insn
) == CODE_LABEL
)
4022 num_not_at_initial_offset
= 0;
4023 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
4025 reg_eliminate
[i
].offset
= reg_eliminate
[i
].previous_offset
4026 = offsets_at
[CODE_LABEL_NUMBER (insn
)][i
];
4027 if (reg_eliminate
[i
].can_eliminate
4028 && (reg_eliminate
[i
].offset
4029 != reg_eliminate
[i
].initial_offset
))
4030 num_not_at_initial_offset
++;
4034 else if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
4036 rtx avoid_return_reg
= 0;
4037 rtx oldpat
= PATTERN (insn
);
4039 /* Set avoid_return_reg if this is an insn
4040 that might use the value of a function call. */
4041 if (SMALL_REGISTER_CLASSES
&& GET_CODE (insn
) == CALL_INSN
)
4043 if (GET_CODE (PATTERN (insn
)) == SET
)
4044 after_call
= SET_DEST (PATTERN (insn
));
4045 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
4046 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
4047 after_call
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 0));
4051 else if (SMALL_REGISTER_CLASSES
&& after_call
!= 0
4052 && !(GET_CODE (PATTERN (insn
)) == SET
4053 && SET_DEST (PATTERN (insn
)) == stack_pointer_rtx
)
4054 && GET_CODE (PATTERN (insn
)) != USE
)
4056 if (reg_referenced_p (after_call
, PATTERN (insn
)))
4057 avoid_return_reg
= after_call
;
4061 /* If this is a USE and CLOBBER of a MEM, ensure that any
4062 references to eliminable registers have been removed. */
4064 if ((GET_CODE (PATTERN (insn
)) == USE
4065 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
4066 && GET_CODE (XEXP (PATTERN (insn
), 0)) == MEM
)
4067 XEXP (XEXP (PATTERN (insn
), 0), 0)
4068 = eliminate_regs (XEXP (XEXP (PATTERN (insn
), 0), 0),
4069 GET_MODE (XEXP (PATTERN (insn
), 0)),
4072 /* If we need to do register elimination processing, do so.
4073 This might delete the insn, in which case we are done. */
4074 if (num_eliminable
&& GET_MODE (insn
) == QImode
)
4076 eliminate_regs_in_insn (insn
, 1);
4077 if (GET_CODE (insn
) == NOTE
)
4084 if (GET_MODE (insn
) == VOIDmode
)
4086 /* First find the pseudo regs that must be reloaded for this insn.
4087 This info is returned in the tables reload_... (see reload.h).
4088 Also modify the body of INSN by substituting RELOAD
4089 rtx's for those pseudo regs. */
4092 bzero (reg_has_output_reload
, max_regno
);
4093 CLEAR_HARD_REG_SET (reg_is_output_reload
);
4095 find_reloads (insn
, 1, spill_indirect_levels
, live_known
,
4101 rtx prev
= PREV_INSN (insn
), next
= NEXT_INSN (insn
);
4105 /* If this block has not had spilling done for a
4106 particular clas and we have any non-optionals that need a
4107 spill reg in that class, abort. */
4109 for (class = 0; class < N_REG_CLASSES
; class++)
4110 if (basic_block_needs
[class] != 0
4111 && basic_block_needs
[class][this_block
] == 0)
4112 for (i
= 0; i
< n_reloads
; i
++)
4113 if (class == (int) reload_reg_class
[i
]
4114 && reload_reg_rtx
[i
] == 0
4115 && ! reload_optional
[i
]
4116 && (reload_in
[i
] != 0 || reload_out
[i
] != 0
4117 || reload_secondary_p
[i
] != 0))
4118 fatal_insn ("Non-optional registers need a spill register", insn
);
4120 /* Now compute which reload regs to reload them into. Perhaps
4121 reusing reload regs from previous insns, or else output
4122 load insns to reload them. Maybe output store insns too.
4123 Record the choices of reload reg in reload_reg_rtx. */
4124 choose_reload_regs (insn
, avoid_return_reg
);
4126 /* Merge any reloads that we didn't combine for fear of
4127 increasing the number of spill registers needed but now
4128 discover can be safely merged. */
4129 if (SMALL_REGISTER_CLASSES
)
4130 merge_assigned_reloads (insn
);
4132 /* Generate the insns to reload operands into or out of
4133 their reload regs. */
4134 emit_reload_insns (insn
);
4136 /* Substitute the chosen reload regs from reload_reg_rtx
4137 into the insn's body (or perhaps into the bodies of other
4138 load and store insn that we just made for reloading
4139 and that we moved the structure into). */
4142 /* If this was an ASM, make sure that all the reload insns
4143 we have generated are valid. If not, give an error
4146 if (asm_noperands (PATTERN (insn
)) >= 0)
4147 for (p
= NEXT_INSN (prev
); p
!= next
; p
= NEXT_INSN (p
))
4148 if (p
!= insn
&& GET_RTX_CLASS (GET_CODE (p
)) == 'i'
4149 && (recog_memoized (p
) < 0
4150 || (insn_extract (p
),
4151 ! constrain_operands (INSN_CODE (p
), 1))))
4153 error_for_asm (insn
,
4154 "`asm' operand requires impossible reload");
4156 NOTE_SOURCE_FILE (p
) = 0;
4157 NOTE_LINE_NUMBER (p
) = NOTE_INSN_DELETED
;
4160 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4161 is no longer validly lying around to save a future reload.
4162 Note that this does not detect pseudos that were reloaded
4163 for this insn in order to be stored in
4164 (obeying register constraints). That is correct; such reload
4165 registers ARE still valid. */
4166 note_stores (oldpat
, forget_old_reloads_1
);
4168 /* There may have been CLOBBER insns placed after INSN. So scan
4169 between INSN and NEXT and use them to forget old reloads. */
4170 for (x
= NEXT_INSN (insn
); x
!= next
; x
= NEXT_INSN (x
))
4171 if (GET_CODE (x
) == INSN
&& GET_CODE (PATTERN (x
)) == CLOBBER
)
4172 note_stores (PATTERN (x
), forget_old_reloads_1
);
4175 /* Likewise for regs altered by auto-increment in this insn.
4176 But note that the reg-notes are not changed by reloading:
4177 they still contain the pseudo-regs, not the spill regs. */
4178 for (x
= REG_NOTES (insn
); x
; x
= XEXP (x
, 1))
4179 if (REG_NOTE_KIND (x
) == REG_INC
)
4181 /* See if this pseudo reg was reloaded in this insn.
4182 If so, its last-reload info is still valid
4183 because it is based on this insn's reload. */
4184 for (i
= 0; i
< n_reloads
; i
++)
4185 if (reload_out
[i
] == XEXP (x
, 0))
4189 forget_old_reloads_1 (XEXP (x
, 0), NULL_RTX
);
4193 /* A reload reg's contents are unknown after a label. */
4194 if (GET_CODE (insn
) == CODE_LABEL
)
4195 CLEAR_HARD_REG_SET (reg_reloaded_valid
);
4197 /* Don't assume a reload reg is still good after a call insn
4198 if it is a call-used reg. */
4199 else if (GET_CODE (insn
) == CALL_INSN
)
4200 AND_COMPL_HARD_REG_SET(reg_reloaded_valid
, call_used_reg_set
);
4202 /* In case registers overlap, allow certain insns to invalidate
4203 particular hard registers. */
4205 #ifdef INSN_CLOBBERS_REGNO_P
4206 for (i
= 0 ; i
< FIRST_PSEUDO_REGISTER
; i
++)
4207 if (TEST_HARD_REG_BIT (reg_reloaded_valid
, i
)
4208 && INSN_CLOBBERS_REGNO_P (insn
, i
))
4209 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, i
);
4220 /* Discard all record of any value reloaded from X,
4221 or reloaded in X from someplace else;
4222 unless X is an output reload reg of the current insn.
4224 X may be a hard reg (the reload reg)
4225 or it may be a pseudo reg that was reloaded from. */
4228 forget_old_reloads_1 (x
, ignored
)
4230 rtx ignored ATTRIBUTE_UNUSED
;
4236 /* note_stores does give us subregs of hard regs. */
4237 while (GET_CODE (x
) == SUBREG
)
4239 offset
+= SUBREG_WORD (x
);
4243 if (GET_CODE (x
) != REG
)
4246 regno
= REGNO (x
) + offset
;
4248 if (regno
>= FIRST_PSEUDO_REGISTER
)
4253 nr
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
4254 /* Storing into a spilled-reg invalidates its contents.
4255 This can happen if a block-local pseudo is allocated to that reg
4256 and it wasn't spilled because this block's total need is 0.
4257 Then some insn might have an optional reload and use this reg. */
4258 for (i
= 0; i
< nr
; i
++)
4259 /* But don't do this if the reg actually serves as an output
4260 reload reg in the current instruction. */
4262 || ! TEST_HARD_REG_BIT (reg_is_output_reload
, regno
+ i
))
4263 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, regno
+ i
);
4266 /* Since value of X has changed,
4267 forget any value previously copied from it. */
4270 /* But don't forget a copy if this is the output reload
4271 that establishes the copy's validity. */
4272 if (n_reloads
== 0 || reg_has_output_reload
[regno
+ nr
] == 0)
4273 reg_last_reload_reg
[regno
+ nr
] = 0;
4276 /* For each reload, the mode of the reload register. */
4277 static enum machine_mode reload_mode
[MAX_RELOADS
];
4279 /* For each reload, the largest number of registers it will require. */
4280 static int reload_nregs
[MAX_RELOADS
];
4282 /* Comparison function for qsort to decide which of two reloads
4283 should be handled first. *P1 and *P2 are the reload numbers. */
4286 reload_reg_class_lower (r1p
, r2p
)
4287 const GENERIC_PTR r1p
;
4288 const GENERIC_PTR r2p
;
4290 register int r1
= *(short *)r1p
, r2
= *(short *)r2p
;
4293 /* Consider required reloads before optional ones. */
4294 t
= reload_optional
[r1
] - reload_optional
[r2
];
4298 /* Count all solitary classes before non-solitary ones. */
4299 t
= ((reg_class_size
[(int) reload_reg_class
[r2
]] == 1)
4300 - (reg_class_size
[(int) reload_reg_class
[r1
]] == 1));
4304 /* Aside from solitaires, consider all multi-reg groups first. */
4305 t
= reload_nregs
[r2
] - reload_nregs
[r1
];
4309 /* Consider reloads in order of increasing reg-class number. */
4310 t
= (int) reload_reg_class
[r1
] - (int) reload_reg_class
[r2
];
4314 /* If reloads are equally urgent, sort by reload number,
4315 so that the results of qsort leave nothing to chance. */
4319 /* The following HARD_REG_SETs indicate when each hard register is
4320 used for a reload of various parts of the current insn. */
4322 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4323 static HARD_REG_SET reload_reg_used
;
4324 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4325 static HARD_REG_SET reload_reg_used_in_input_addr
[MAX_RECOG_OPERANDS
];
4326 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4327 static HARD_REG_SET reload_reg_used_in_inpaddr_addr
[MAX_RECOG_OPERANDS
];
4328 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4329 static HARD_REG_SET reload_reg_used_in_output_addr
[MAX_RECOG_OPERANDS
];
4330 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4331 static HARD_REG_SET reload_reg_used_in_outaddr_addr
[MAX_RECOG_OPERANDS
];
4332 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4333 static HARD_REG_SET reload_reg_used_in_input
[MAX_RECOG_OPERANDS
];
4334 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4335 static HARD_REG_SET reload_reg_used_in_output
[MAX_RECOG_OPERANDS
];
4336 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4337 static HARD_REG_SET reload_reg_used_in_op_addr
;
4338 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4339 static HARD_REG_SET reload_reg_used_in_op_addr_reload
;
4340 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4341 static HARD_REG_SET reload_reg_used_in_insn
;
4342 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4343 static HARD_REG_SET reload_reg_used_in_other_addr
;
4345 /* If reg is in use as a reload reg for any sort of reload. */
4346 static HARD_REG_SET reload_reg_used_at_all
;
4348 /* If reg is use as an inherited reload. We just mark the first register
4350 static HARD_REG_SET reload_reg_used_for_inherit
;
4352 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4353 TYPE. MODE is used to indicate how many consecutive regs are
4357 mark_reload_reg_in_use (regno
, opnum
, type
, mode
)
4360 enum reload_type type
;
4361 enum machine_mode mode
;
4363 int nregs
= HARD_REGNO_NREGS (regno
, mode
);
4366 for (i
= regno
; i
< nregs
+ regno
; i
++)
4371 SET_HARD_REG_BIT (reload_reg_used
, i
);
4374 case RELOAD_FOR_INPUT_ADDRESS
:
4375 SET_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], i
);
4378 case RELOAD_FOR_INPADDR_ADDRESS
:
4379 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], i
);
4382 case RELOAD_FOR_OUTPUT_ADDRESS
:
4383 SET_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], i
);
4386 case RELOAD_FOR_OUTADDR_ADDRESS
:
4387 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[opnum
], i
);
4390 case RELOAD_FOR_OPERAND_ADDRESS
:
4391 SET_HARD_REG_BIT (reload_reg_used_in_op_addr
, i
);
4394 case RELOAD_FOR_OPADDR_ADDR
:
4395 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, i
);
4398 case RELOAD_FOR_OTHER_ADDRESS
:
4399 SET_HARD_REG_BIT (reload_reg_used_in_other_addr
, i
);
4402 case RELOAD_FOR_INPUT
:
4403 SET_HARD_REG_BIT (reload_reg_used_in_input
[opnum
], i
);
4406 case RELOAD_FOR_OUTPUT
:
4407 SET_HARD_REG_BIT (reload_reg_used_in_output
[opnum
], i
);
4410 case RELOAD_FOR_INSN
:
4411 SET_HARD_REG_BIT (reload_reg_used_in_insn
, i
);
4415 SET_HARD_REG_BIT (reload_reg_used_at_all
, i
);
4419 /* Similarly, but show REGNO is no longer in use for a reload. */
4422 clear_reload_reg_in_use (regno
, opnum
, type
, mode
)
4425 enum reload_type type
;
4426 enum machine_mode mode
;
4428 int nregs
= HARD_REGNO_NREGS (regno
, mode
);
4431 for (i
= regno
; i
< nregs
+ regno
; i
++)
4436 CLEAR_HARD_REG_BIT (reload_reg_used
, i
);
4439 case RELOAD_FOR_INPUT_ADDRESS
:
4440 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], i
);
4443 case RELOAD_FOR_INPADDR_ADDRESS
:
4444 CLEAR_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], i
);
4447 case RELOAD_FOR_OUTPUT_ADDRESS
:
4448 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], i
);
4451 case RELOAD_FOR_OUTADDR_ADDRESS
:
4452 CLEAR_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[opnum
], i
);
4455 case RELOAD_FOR_OPERAND_ADDRESS
:
4456 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr
, i
);
4459 case RELOAD_FOR_OPADDR_ADDR
:
4460 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, i
);
4463 case RELOAD_FOR_OTHER_ADDRESS
:
4464 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr
, i
);
4467 case RELOAD_FOR_INPUT
:
4468 CLEAR_HARD_REG_BIT (reload_reg_used_in_input
[opnum
], i
);
4471 case RELOAD_FOR_OUTPUT
:
4472 CLEAR_HARD_REG_BIT (reload_reg_used_in_output
[opnum
], i
);
4475 case RELOAD_FOR_INSN
:
4476 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn
, i
);
4482 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4483 specified by OPNUM and TYPE. */
4486 reload_reg_free_p (regno
, opnum
, type
)
4489 enum reload_type type
;
4493 /* In use for a RELOAD_OTHER means it's not available for anything. */
4494 if (TEST_HARD_REG_BIT (reload_reg_used
, regno
))
4500 /* In use for anything means we can't use it for RELOAD_OTHER. */
4501 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
)
4502 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4503 || TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
4506 for (i
= 0; i
< reload_n_operands
; i
++)
4507 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4508 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
4509 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4510 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
4511 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
4512 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4517 case RELOAD_FOR_INPUT
:
4518 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4519 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
))
4522 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
))
4525 /* If it is used for some other input, can't use it. */
4526 for (i
= 0; i
< reload_n_operands
; i
++)
4527 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4530 /* If it is used in a later operand's address, can't use it. */
4531 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4532 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4533 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
))
4538 case RELOAD_FOR_INPUT_ADDRESS
:
4539 /* Can't use a register if it is used for an input address for this
4540 operand or used as an input in an earlier one. */
4541 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], regno
)
4542 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], regno
))
4545 for (i
= 0; i
< opnum
; i
++)
4546 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4551 case RELOAD_FOR_INPADDR_ADDRESS
:
4552 /* Can't use a register if it is used for an input address
4553 for this operand or used as an input in an earlier
4555 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], regno
))
4558 for (i
= 0; i
< opnum
; i
++)
4559 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4564 case RELOAD_FOR_OUTPUT_ADDRESS
:
4565 /* Can't use a register if it is used for an output address for this
4566 operand or used as an output in this or a later operand. */
4567 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], regno
))
4570 for (i
= opnum
; i
< reload_n_operands
; i
++)
4571 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4576 case RELOAD_FOR_OUTADDR_ADDRESS
:
4577 /* Can't use a register if it is used for an output address
4578 for this operand or used as an output in this or a
4580 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[opnum
], regno
))
4583 for (i
= opnum
; i
< reload_n_operands
; i
++)
4584 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4589 case RELOAD_FOR_OPERAND_ADDRESS
:
4590 for (i
= 0; i
< reload_n_operands
; i
++)
4591 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4594 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4595 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
4597 case RELOAD_FOR_OPADDR_ADDR
:
4598 for (i
= 0; i
< reload_n_operands
; i
++)
4599 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4602 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
));
4604 case RELOAD_FOR_OUTPUT
:
4605 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4606 outputs, or an operand address for this or an earlier output. */
4607 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
4610 for (i
= 0; i
< reload_n_operands
; i
++)
4611 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4614 for (i
= 0; i
<= opnum
; i
++)
4615 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4616 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
))
4621 case RELOAD_FOR_INSN
:
4622 for (i
= 0; i
< reload_n_operands
; i
++)
4623 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
4624 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4627 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4628 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
4630 case RELOAD_FOR_OTHER_ADDRESS
:
4631 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4636 /* Return 1 if the value in reload reg REGNO, as used by a reload
4637 needed for the part of the insn specified by OPNUM and TYPE,
4638 is not in use for a reload in any prior part of the insn.
4640 We can assume that the reload reg was already tested for availability
4641 at the time it is needed, and we should not check this again,
4642 in case the reg has already been marked in use. */
4645 reload_reg_free_before_p (regno
, opnum
, type
)
4648 enum reload_type type
;
4654 case RELOAD_FOR_OTHER_ADDRESS
:
4655 /* These always come first. */
4659 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4661 /* If this use is for part of the insn,
4662 check the reg is not in use for any prior part. It is tempting
4663 to try to do this by falling through from objecs that occur
4664 later in the insn to ones that occur earlier, but that will not
4665 correctly take into account the fact that here we MUST ignore
4666 things that would prevent the register from being allocated in
4667 the first place, since we know that it was allocated. */
4669 case RELOAD_FOR_OUTPUT_ADDRESS
:
4670 /* Earlier reloads include RELOAD_FOR_INPADDR_ADDRESS reloads. */
4671 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[opnum
], regno
))
4673 /* ... fall through ... */
4674 case RELOAD_FOR_OUTADDR_ADDRESS
:
4675 /* Earlier reloads are for earlier outputs or their addresses,
4676 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4677 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4679 for (i
= 0; i
< opnum
; i
++)
4680 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4681 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
4682 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4685 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
4688 for (i
= 0; i
< reload_n_operands
; i
++)
4689 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4690 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
4691 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4694 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
)
4695 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4696 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
4698 case RELOAD_FOR_OUTPUT
:
4699 /* This can't be used in the output address for this operand and
4700 anything that can't be used for it, except that we've already
4701 tested for RELOAD_FOR_INSN objects. */
4703 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], regno
)
4704 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[opnum
], regno
))
4707 for (i
= 0; i
< opnum
; i
++)
4708 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4709 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
4710 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4713 for (i
= 0; i
< reload_n_operands
; i
++)
4714 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4715 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
4716 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
4717 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
))
4720 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4722 case RELOAD_FOR_OPERAND_ADDRESS
:
4723 /* Earlier reloads include RELOAD_FOR_OPADDR_ADDR reloads. */
4724 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
))
4727 /* ... fall through ... */
4729 case RELOAD_FOR_OPADDR_ADDR
:
4730 case RELOAD_FOR_INSN
:
4731 /* These can't conflict with inputs, or each other, so all we have to
4732 test is input addresses and the addresses of OTHER items. */
4734 for (i
= 0; i
< reload_n_operands
; i
++)
4735 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4736 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
))
4739 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4741 case RELOAD_FOR_INPUT
:
4742 /* The only things earlier are the address for this and
4743 earlier inputs, other inputs (which we know we don't conflict
4744 with), and addresses of RELOAD_OTHER objects. */
4746 for (i
= 0; i
<= opnum
; i
++)
4747 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4748 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
))
4751 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4753 case RELOAD_FOR_INPUT_ADDRESS
:
4754 /* Earlier reloads include RELOAD_FOR_INPADDR_ADDRESS reloads. */
4755 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], regno
))
4757 /* ... fall through ... */
4758 case RELOAD_FOR_INPADDR_ADDRESS
:
4759 /* Similarly, all we have to check is for use in earlier inputs'
4761 for (i
= 0; i
< opnum
; i
++)
4762 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4763 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
))
4766 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4771 /* Return 1 if the value in reload reg REGNO, as used by a reload
4772 needed for the part of the insn specified by OPNUM and TYPE,
4773 is still available in REGNO at the end of the insn.
4775 We can assume that the reload reg was already tested for availability
4776 at the time it is needed, and we should not check this again,
4777 in case the reg has already been marked in use. */
4780 reload_reg_reaches_end_p (regno
, opnum
, type
)
4783 enum reload_type type
;
4790 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4791 its value must reach the end. */
4794 /* If this use is for part of the insn,
4795 its value reaches if no subsequent part uses the same register.
4796 Just like the above function, don't try to do this with lots
4799 case RELOAD_FOR_OTHER_ADDRESS
:
4800 /* Here we check for everything else, since these don't conflict
4801 with anything else and everything comes later. */
4803 for (i
= 0; i
< reload_n_operands
; i
++)
4804 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4805 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
4806 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
)
4807 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4808 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
4809 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4812 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4813 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4814 && ! TEST_HARD_REG_BIT (reload_reg_used
, regno
));
4816 case RELOAD_FOR_INPUT_ADDRESS
:
4817 case RELOAD_FOR_INPADDR_ADDRESS
:
4818 /* Similar, except that we check only for this and subsequent inputs
4819 and the address of only subsequent inputs and we do not need
4820 to check for RELOAD_OTHER objects since they are known not to
4823 for (i
= opnum
; i
< reload_n_operands
; i
++)
4824 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4827 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4828 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4829 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
))
4832 for (i
= 0; i
< reload_n_operands
; i
++)
4833 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4834 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
4835 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4838 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
))
4841 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4842 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
));
4844 case RELOAD_FOR_INPUT
:
4845 /* Similar to input address, except we start at the next operand for
4846 both input and input address and we do not check for
4847 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4850 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4851 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4852 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
4853 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4856 /* ... fall through ... */
4858 case RELOAD_FOR_OPERAND_ADDRESS
:
4859 /* Check outputs and their addresses. */
4861 for (i
= 0; i
< reload_n_operands
; i
++)
4862 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4863 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
4864 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4869 case RELOAD_FOR_OPADDR_ADDR
:
4870 for (i
= 0; i
< reload_n_operands
; i
++)
4871 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4872 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
4873 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4876 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4877 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
));
4879 case RELOAD_FOR_INSN
:
4880 /* These conflict with other outputs with RELOAD_OTHER. So
4881 we need only check for output addresses. */
4885 /* ... fall through ... */
4887 case RELOAD_FOR_OUTPUT
:
4888 case RELOAD_FOR_OUTPUT_ADDRESS
:
4889 case RELOAD_FOR_OUTADDR_ADDRESS
:
4890 /* We already know these can't conflict with a later output. So the
4891 only thing to check are later output addresses. */
4892 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4893 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4894 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
))
4903 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4906 This function uses the same algorithm as reload_reg_free_p above. */
4909 reloads_conflict (r1
, r2
)
4912 enum reload_type r1_type
= reload_when_needed
[r1
];
4913 enum reload_type r2_type
= reload_when_needed
[r2
];
4914 int r1_opnum
= reload_opnum
[r1
];
4915 int r2_opnum
= reload_opnum
[r2
];
4917 /* RELOAD_OTHER conflicts with everything. */
4918 if (r2_type
== RELOAD_OTHER
)
4921 /* Otherwise, check conflicts differently for each type. */
4925 case RELOAD_FOR_INPUT
:
4926 return (r2_type
== RELOAD_FOR_INSN
4927 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
4928 || r2_type
== RELOAD_FOR_OPADDR_ADDR
4929 || r2_type
== RELOAD_FOR_INPUT
4930 || ((r2_type
== RELOAD_FOR_INPUT_ADDRESS
4931 || r2_type
== RELOAD_FOR_INPADDR_ADDRESS
)
4932 && r2_opnum
> r1_opnum
));
4934 case RELOAD_FOR_INPUT_ADDRESS
:
4935 return ((r2_type
== RELOAD_FOR_INPUT_ADDRESS
&& r1_opnum
== r2_opnum
)
4936 || (r2_type
== RELOAD_FOR_INPUT
&& r2_opnum
< r1_opnum
));
4938 case RELOAD_FOR_INPADDR_ADDRESS
:
4939 return ((r2_type
== RELOAD_FOR_INPADDR_ADDRESS
&& r1_opnum
== r2_opnum
)
4940 || (r2_type
== RELOAD_FOR_INPUT
&& r2_opnum
< r1_opnum
));
4942 case RELOAD_FOR_OUTPUT_ADDRESS
:
4943 return ((r2_type
== RELOAD_FOR_OUTPUT_ADDRESS
&& r2_opnum
== r1_opnum
)
4944 || (r2_type
== RELOAD_FOR_OUTPUT
&& r2_opnum
>= r1_opnum
));
4946 case RELOAD_FOR_OUTADDR_ADDRESS
:
4947 return ((r2_type
== RELOAD_FOR_OUTADDR_ADDRESS
&& r2_opnum
== r1_opnum
)
4948 || (r2_type
== RELOAD_FOR_OUTPUT
&& r2_opnum
>= r1_opnum
));
4950 case RELOAD_FOR_OPERAND_ADDRESS
:
4951 return (r2_type
== RELOAD_FOR_INPUT
|| r2_type
== RELOAD_FOR_INSN
4952 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
);
4954 case RELOAD_FOR_OPADDR_ADDR
:
4955 return (r2_type
== RELOAD_FOR_INPUT
4956 || r2_type
== RELOAD_FOR_OPADDR_ADDR
);
4958 case RELOAD_FOR_OUTPUT
:
4959 return (r2_type
== RELOAD_FOR_INSN
|| r2_type
== RELOAD_FOR_OUTPUT
4960 || ((r2_type
== RELOAD_FOR_OUTPUT_ADDRESS
4961 || r2_type
== RELOAD_FOR_OUTADDR_ADDRESS
)
4962 && r2_opnum
>= r1_opnum
));
4964 case RELOAD_FOR_INSN
:
4965 return (r2_type
== RELOAD_FOR_INPUT
|| r2_type
== RELOAD_FOR_OUTPUT
4966 || r2_type
== RELOAD_FOR_INSN
4967 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
);
4969 case RELOAD_FOR_OTHER_ADDRESS
:
4970 return r2_type
== RELOAD_FOR_OTHER_ADDRESS
;
4980 /* Vector of reload-numbers showing the order in which the reloads should
4982 short reload_order
[MAX_RELOADS
];
4984 /* Indexed by reload number, 1 if incoming value
4985 inherited from previous insns. */
4986 char reload_inherited
[MAX_RELOADS
];
4988 /* For an inherited reload, this is the insn the reload was inherited from,
4989 if we know it. Otherwise, this is 0. */
4990 rtx reload_inheritance_insn
[MAX_RELOADS
];
4992 /* If non-zero, this is a place to get the value of the reload,
4993 rather than using reload_in. */
4994 rtx reload_override_in
[MAX_RELOADS
];
4996 /* For each reload, the hard register number of the register used,
4997 or -1 if we did not need a register for this reload. */
4998 int reload_spill_index
[MAX_RELOADS
];
5000 /* Return 1 if the value in reload reg REGNO, as used by a reload
5001 needed for the part of the insn specified by OPNUM and TYPE,
5002 may be used to load VALUE into it.
5004 Other read-only reloads with the same value do not conflict
5005 unless OUT is non-zero and these other reloads have to live while
5006 output reloads live.
5008 RELOADNUM is the number of the reload we want to load this value for;
5009 a reload does not conflict with itself.
5011 The caller has to make sure that there is no conflict with the return
5014 reload_reg_free_for_value_p (regno
, opnum
, type
, value
, out
, reloadnum
)
5017 enum reload_type type
;
5024 /* We use some pseudo 'time' value to check if the lifetimes of the
5025 new register use would overlap with the one of a previous reload
5026 that is not read-only or uses a different value.
5027 The 'time' used doesn't have to be linear in any shape or form, just
5029 Some reload types use different 'buckets' for each operand.
5030 So there are MAX_RECOG_OPERANDS different time values for each
5032 We compute TIME1 as the time when the register for the prospective
5033 new reload ceases to be live, and TIME2 for each existing
5034 reload as the time when that the reload register of that reload
5036 Where there is little to be gained by exact lifetime calculations,
5037 we just make conservative assumptions, i.e. a longer lifetime;
5038 this is done in the 'default:' cases. */
5041 case RELOAD_FOR_OTHER_ADDRESS
:
5044 /* For each input, we might have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5045 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5046 respectively, to the time values for these, we get distinct time
5047 values. To get distinct time values for each operand, we have to
5048 multiply opnum by at least three. We round that up to four because
5049 multiply by four is often cheaper. */
5050 case RELOAD_FOR_INPADDR_ADDRESS
:
5051 time1
= opnum
* 4 + 1;
5053 case RELOAD_FOR_INPUT_ADDRESS
:
5054 time1
= opnum
* 4 + 2;
5056 case RELOAD_FOR_INPUT
:
5057 /* All RELOAD_FOR_INPUT reloads remain live till just before the
5058 instruction is executed. */
5059 time1
= (MAX_RECOG_OPERANDS
- 1) * 4 + 3;
5061 /* opnum * 4 + 3 < opnum * 4 + 4
5062 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5063 case RELOAD_FOR_OUTPUT_ADDRESS
:
5064 time1
= MAX_RECOG_OPERANDS
* 4 + opnum
;
5067 time1
= MAX_RECOG_OPERANDS
* 5;
5070 for (i
= 0; i
< n_reloads
; i
++)
5072 rtx reg
= reload_reg_rtx
[i
];
5073 if (reg
&& GET_CODE (reg
) == REG
5074 && ((unsigned) regno
- true_regnum (reg
)
5075 <= HARD_REGNO_NREGS (REGNO (reg
), GET_MODE (reg
)) - (unsigned)1)
5079 && reload_when_needed
[i
] != RELOAD_FOR_INPUT
5080 && reload_when_needed
[i
] != RELOAD_FOR_INPUT_ADDRESS
5081 && reload_when_needed
[i
] != RELOAD_FOR_INPADDR_ADDRESS
)
5083 if (! reload_in
[i
] || ! rtx_equal_p (reload_in
[i
], value
)
5087 switch (reload_when_needed
[i
])
5089 case RELOAD_FOR_OTHER_ADDRESS
:
5092 case RELOAD_FOR_INPADDR_ADDRESS
:
5093 time2
= reload_opnum
[i
] * 4 + 1;
5095 case RELOAD_FOR_INPUT_ADDRESS
:
5096 time2
= reload_opnum
[i
] * 4 + 2;
5098 case RELOAD_FOR_INPUT
:
5099 time2
= reload_opnum
[i
] * 4 + 3;
5101 case RELOAD_FOR_OUTPUT
:
5102 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5103 instruction is executed. */
5104 time2
= MAX_RECOG_OPERANDS
* 4;
5106 /* The first RELOAD_FOR_OUTPUT_ADDRESS reload conflicts with the
5107 RELOAD_FOR_OUTPUT reloads, so assign it the same time value. */
5108 case RELOAD_FOR_OUTPUT_ADDRESS
:
5109 time2
= MAX_RECOG_OPERANDS
* 4 + reload_opnum
[i
];
5112 if (! reload_in
[i
] || rtx_equal_p (reload_in
[i
], value
))
5114 time2
= MAX_RECOG_OPERANDS
* 4;
5128 /* Find a spill register to use as a reload register for reload R.
5129 LAST_RELOAD is non-zero if this is the last reload for the insn being
5132 Set reload_reg_rtx[R] to the register allocated.
5134 If NOERROR is nonzero, we return 1 if successful,
5135 or 0 if we couldn't find a spill reg and we didn't change anything. */
5138 allocate_reload_reg (r
, insn
, last_reload
, noerror
)
5150 /* If we put this reload ahead, thinking it is a group,
5151 then insist on finding a group. Otherwise we can grab a
5152 reg that some other reload needs.
5153 (That can happen when we have a 68000 DATA_OR_FP_REG
5154 which is a group of data regs or one fp reg.)
5155 We need not be so restrictive if there are no more reloads
5158 ??? Really it would be nicer to have smarter handling
5159 for that kind of reg class, where a problem like this is normal.
5160 Perhaps those classes should be avoided for reloading
5161 by use of more alternatives. */
5163 int force_group
= reload_nregs
[r
] > 1 && ! last_reload
;
5165 /* If we want a single register and haven't yet found one,
5166 take any reg in the right class and not in use.
5167 If we want a consecutive group, here is where we look for it.
5169 We use two passes so we can first look for reload regs to
5170 reuse, which are already in use for other reloads in this insn,
5171 and only then use additional registers.
5172 I think that maximizing reuse is needed to make sure we don't
5173 run out of reload regs. Suppose we have three reloads, and
5174 reloads A and B can share regs. These need two regs.
5175 Suppose A and B are given different regs.
5176 That leaves none for C. */
5177 for (pass
= 0; pass
< 2; pass
++)
5179 /* I is the index in spill_regs.
5180 We advance it round-robin between insns to use all spill regs
5181 equally, so that inherited reloads have a chance
5182 of leapfrogging each other. Don't do this, however, when we have
5183 group needs and failure would be fatal; if we only have a relatively
5184 small number of spill registers, and more than one of them has
5185 group needs, then by starting in the middle, we may end up
5186 allocating the first one in such a way that we are not left with
5187 sufficient groups to handle the rest. */
5189 if (noerror
|| ! force_group
)
5194 for (count
= 0; count
< n_spills
; count
++)
5196 int class = (int) reload_reg_class
[r
];
5198 i
= (i
+ 1) % n_spills
;
5200 if ((reload_reg_free_p (spill_regs
[i
], reload_opnum
[r
],
5201 reload_when_needed
[r
])
5203 /* We check reload_reg_used to make sure we
5204 don't clobber the return register. */
5205 && ! TEST_HARD_REG_BIT (reload_reg_used
, spill_regs
[i
])
5206 && reload_reg_free_for_value_p (spill_regs
[i
],
5208 reload_when_needed
[r
],
5211 && TEST_HARD_REG_BIT (reg_class_contents
[class], spill_regs
[i
])
5212 && HARD_REGNO_MODE_OK (spill_regs
[i
], reload_mode
[r
])
5213 /* Look first for regs to share, then for unshared. But
5214 don't share regs used for inherited reloads; they are
5215 the ones we want to preserve. */
5217 || (TEST_HARD_REG_BIT (reload_reg_used_at_all
,
5219 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit
,
5222 int nr
= HARD_REGNO_NREGS (spill_regs
[i
], reload_mode
[r
]);
5223 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5224 (on 68000) got us two FP regs. If NR is 1,
5225 we would reject both of them. */
5227 nr
= CLASS_MAX_NREGS (reload_reg_class
[r
], reload_mode
[r
]);
5228 /* If we need only one reg, we have already won. */
5231 /* But reject a single reg if we demand a group. */
5236 /* Otherwise check that as many consecutive regs as we need
5238 Also, don't use for a group registers that are
5239 needed for nongroups. */
5240 if (! TEST_HARD_REG_BIT (counted_for_nongroups
, spill_regs
[i
]))
5243 regno
= spill_regs
[i
] + nr
- 1;
5244 if (!(TEST_HARD_REG_BIT (reg_class_contents
[class], regno
)
5245 && spill_reg_order
[regno
] >= 0
5246 && reload_reg_free_p (regno
, reload_opnum
[r
],
5247 reload_when_needed
[r
])
5248 && ! TEST_HARD_REG_BIT (counted_for_nongroups
,
5258 /* If we found something on pass 1, omit pass 2. */
5259 if (count
< n_spills
)
5263 /* We should have found a spill register by now. */
5264 if (count
== n_spills
)
5271 /* I is the index in SPILL_REG_RTX of the reload register we are to
5272 allocate. Get an rtx for it and find its register number. */
5274 new = spill_reg_rtx
[i
];
5276 if (new == 0 || GET_MODE (new) != reload_mode
[r
])
5277 spill_reg_rtx
[i
] = new
5278 = gen_rtx_REG (reload_mode
[r
], spill_regs
[i
]);
5280 regno
= true_regnum (new);
5282 /* Detect when the reload reg can't hold the reload mode.
5283 This used to be one `if', but Sequent compiler can't handle that. */
5284 if (HARD_REGNO_MODE_OK (regno
, reload_mode
[r
]))
5286 enum machine_mode test_mode
= VOIDmode
;
5288 test_mode
= GET_MODE (reload_in
[r
]);
5289 /* If reload_in[r] has VOIDmode, it means we will load it
5290 in whatever mode the reload reg has: to wit, reload_mode[r].
5291 We have already tested that for validity. */
5292 /* Aside from that, we need to test that the expressions
5293 to reload from or into have modes which are valid for this
5294 reload register. Otherwise the reload insns would be invalid. */
5295 if (! (reload_in
[r
] != 0 && test_mode
!= VOIDmode
5296 && ! HARD_REGNO_MODE_OK (regno
, test_mode
)))
5297 if (! (reload_out
[r
] != 0
5298 && ! HARD_REGNO_MODE_OK (regno
, GET_MODE (reload_out
[r
]))))
5300 /* The reg is OK. */
5303 /* Mark as in use for this insn the reload regs we use
5305 mark_reload_reg_in_use (spill_regs
[i
], reload_opnum
[r
],
5306 reload_when_needed
[r
], reload_mode
[r
]);
5308 reload_reg_rtx
[r
] = new;
5309 reload_spill_index
[r
] = spill_regs
[i
];
5314 /* The reg is not OK. */
5319 if (asm_noperands (PATTERN (insn
)) < 0)
5320 /* It's the compiler's fault. */
5321 fatal_insn ("Could not find a spill register", insn
);
5323 /* It's the user's fault; the operand's mode and constraint
5324 don't match. Disable this reload so we don't crash in final. */
5325 error_for_asm (insn
,
5326 "`asm' operand constraint incompatible with operand size");
5329 reload_reg_rtx
[r
] = 0;
5330 reload_optional
[r
] = 1;
5331 reload_secondary_p
[r
] = 1;
5336 /* Assign hard reg targets for the pseudo-registers we must reload
5337 into hard regs for this insn.
5338 Also output the instructions to copy them in and out of the hard regs.
5340 For machines with register classes, we are responsible for
5341 finding a reload reg in the proper class. */
5344 choose_reload_regs (insn
, avoid_return_reg
)
5346 rtx avoid_return_reg
;
5349 int max_group_size
= 1;
5350 enum reg_class group_class
= NO_REGS
;
5353 rtx save_reload_reg_rtx
[MAX_RELOADS
];
5354 char save_reload_inherited
[MAX_RELOADS
];
5355 rtx save_reload_inheritance_insn
[MAX_RELOADS
];
5356 rtx save_reload_override_in
[MAX_RELOADS
];
5357 int save_reload_spill_index
[MAX_RELOADS
];
5358 HARD_REG_SET save_reload_reg_used
;
5359 HARD_REG_SET save_reload_reg_used_in_input_addr
[MAX_RECOG_OPERANDS
];
5360 HARD_REG_SET save_reload_reg_used_in_inpaddr_addr
[MAX_RECOG_OPERANDS
];
5361 HARD_REG_SET save_reload_reg_used_in_output_addr
[MAX_RECOG_OPERANDS
];
5362 HARD_REG_SET save_reload_reg_used_in_outaddr_addr
[MAX_RECOG_OPERANDS
];
5363 HARD_REG_SET save_reload_reg_used_in_input
[MAX_RECOG_OPERANDS
];
5364 HARD_REG_SET save_reload_reg_used_in_output
[MAX_RECOG_OPERANDS
];
5365 HARD_REG_SET save_reload_reg_used_in_op_addr
;
5366 HARD_REG_SET save_reload_reg_used_in_op_addr_reload
;
5367 HARD_REG_SET save_reload_reg_used_in_insn
;
5368 HARD_REG_SET save_reload_reg_used_in_other_addr
;
5369 HARD_REG_SET save_reload_reg_used_at_all
;
5371 bzero (reload_inherited
, MAX_RELOADS
);
5372 bzero ((char *) reload_inheritance_insn
, MAX_RELOADS
* sizeof (rtx
));
5373 bzero ((char *) reload_override_in
, MAX_RELOADS
* sizeof (rtx
));
5375 CLEAR_HARD_REG_SET (reload_reg_used
);
5376 CLEAR_HARD_REG_SET (reload_reg_used_at_all
);
5377 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr
);
5378 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload
);
5379 CLEAR_HARD_REG_SET (reload_reg_used_in_insn
);
5380 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr
);
5382 for (i
= 0; i
< reload_n_operands
; i
++)
5384 CLEAR_HARD_REG_SET (reload_reg_used_in_output
[i
]);
5385 CLEAR_HARD_REG_SET (reload_reg_used_in_input
[i
]);
5386 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr
[i
]);
5387 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr
[i
]);
5388 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr
[i
]);
5389 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr
[i
]);
5392 /* Don't bother with avoiding the return reg
5393 if we have no mandatory reload that could use it. */
5394 if (SMALL_REGISTER_CLASSES
&& avoid_return_reg
)
5397 int regno
= REGNO (avoid_return_reg
);
5399 = HARD_REGNO_NREGS (regno
, GET_MODE (avoid_return_reg
));
5402 for (r
= regno
; r
< regno
+ nregs
; r
++)
5403 if (spill_reg_order
[r
] >= 0)
5404 for (j
= 0; j
< n_reloads
; j
++)
5405 if (!reload_optional
[j
] && reload_reg_rtx
[j
] == 0
5406 && (reload_in
[j
] != 0 || reload_out
[j
] != 0
5407 || reload_secondary_p
[j
])
5409 TEST_HARD_REG_BIT (reg_class_contents
[(int) reload_reg_class
[j
]], r
))
5412 avoid_return_reg
= 0;
5415 #if 0 /* Not needed, now that we can always retry without inheritance. */
5416 /* See if we have more mandatory reloads than spill regs.
5417 If so, then we cannot risk optimizations that could prevent
5418 reloads from sharing one spill register.
5420 Since we will try finding a better register than reload_reg_rtx
5421 unless it is equal to reload_in or reload_out, count such reloads. */
5424 int tem
= SMALL_REGISTER_CLASSES
? (avoid_return_reg
!= 0): 0;
5425 for (j
= 0; j
< n_reloads
; j
++)
5426 if (! reload_optional
[j
]
5427 && (reload_in
[j
] != 0 || reload_out
[j
] != 0 || reload_secondary_p
[j
])
5428 && (reload_reg_rtx
[j
] == 0
5429 || (! rtx_equal_p (reload_reg_rtx
[j
], reload_in
[j
])
5430 && ! rtx_equal_p (reload_reg_rtx
[j
], reload_out
[j
]))))
5437 /* Don't use the subroutine call return reg for a reload
5438 if we are supposed to avoid it. */
5439 if (SMALL_REGISTER_CLASSES
&& avoid_return_reg
)
5441 int regno
= REGNO (avoid_return_reg
);
5443 = HARD_REGNO_NREGS (regno
, GET_MODE (avoid_return_reg
));
5446 for (r
= regno
; r
< regno
+ nregs
; r
++)
5447 if (spill_reg_order
[r
] >= 0)
5448 SET_HARD_REG_BIT (reload_reg_used
, r
);
5451 /* In order to be certain of getting the registers we need,
5452 we must sort the reloads into order of increasing register class.
5453 Then our grabbing of reload registers will parallel the process
5454 that provided the reload registers.
5456 Also note whether any of the reloads wants a consecutive group of regs.
5457 If so, record the maximum size of the group desired and what
5458 register class contains all the groups needed by this insn. */
5460 for (j
= 0; j
< n_reloads
; j
++)
5462 reload_order
[j
] = j
;
5463 reload_spill_index
[j
] = -1;
5466 = (reload_inmode
[j
] == VOIDmode
5467 || (GET_MODE_SIZE (reload_outmode
[j
])
5468 > GET_MODE_SIZE (reload_inmode
[j
])))
5469 ? reload_outmode
[j
] : reload_inmode
[j
];
5471 reload_nregs
[j
] = CLASS_MAX_NREGS (reload_reg_class
[j
], reload_mode
[j
]);
5473 if (reload_nregs
[j
] > 1)
5475 max_group_size
= MAX (reload_nregs
[j
], max_group_size
);
5476 group_class
= reg_class_superunion
[(int)reload_reg_class
[j
]][(int)group_class
];
5479 /* If we have already decided to use a certain register,
5480 don't use it in another way. */
5481 if (reload_reg_rtx
[j
])
5482 mark_reload_reg_in_use (REGNO (reload_reg_rtx
[j
]), reload_opnum
[j
],
5483 reload_when_needed
[j
], reload_mode
[j
]);
5487 qsort (reload_order
, n_reloads
, sizeof (short), reload_reg_class_lower
);
5489 bcopy ((char *) reload_reg_rtx
, (char *) save_reload_reg_rtx
,
5490 sizeof reload_reg_rtx
);
5491 bcopy (reload_inherited
, save_reload_inherited
, sizeof reload_inherited
);
5492 bcopy ((char *) reload_inheritance_insn
,
5493 (char *) save_reload_inheritance_insn
,
5494 sizeof reload_inheritance_insn
);
5495 bcopy ((char *) reload_override_in
, (char *) save_reload_override_in
,
5496 sizeof reload_override_in
);
5497 bcopy ((char *) reload_spill_index
, (char *) save_reload_spill_index
,
5498 sizeof reload_spill_index
);
5499 COPY_HARD_REG_SET (save_reload_reg_used
, reload_reg_used
);
5500 COPY_HARD_REG_SET (save_reload_reg_used_at_all
, reload_reg_used_at_all
);
5501 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr
,
5502 reload_reg_used_in_op_addr
);
5504 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload
,
5505 reload_reg_used_in_op_addr_reload
);
5507 COPY_HARD_REG_SET (save_reload_reg_used_in_insn
,
5508 reload_reg_used_in_insn
);
5509 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr
,
5510 reload_reg_used_in_other_addr
);
5512 for (i
= 0; i
< reload_n_operands
; i
++)
5514 COPY_HARD_REG_SET (save_reload_reg_used_in_output
[i
],
5515 reload_reg_used_in_output
[i
]);
5516 COPY_HARD_REG_SET (save_reload_reg_used_in_input
[i
],
5517 reload_reg_used_in_input
[i
]);
5518 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr
[i
],
5519 reload_reg_used_in_input_addr
[i
]);
5520 COPY_HARD_REG_SET (save_reload_reg_used_in_inpaddr_addr
[i
],
5521 reload_reg_used_in_inpaddr_addr
[i
]);
5522 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr
[i
],
5523 reload_reg_used_in_output_addr
[i
]);
5524 COPY_HARD_REG_SET (save_reload_reg_used_in_outaddr_addr
[i
],
5525 reload_reg_used_in_outaddr_addr
[i
]);
5528 /* If -O, try first with inheritance, then turning it off.
5529 If not -O, don't do inheritance.
5530 Using inheritance when not optimizing leads to paradoxes
5531 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5532 because one side of the comparison might be inherited. */
5534 for (inheritance
= optimize
> 0; inheritance
>= 0; inheritance
--)
5536 /* Process the reloads in order of preference just found.
5537 Beyond this point, subregs can be found in reload_reg_rtx.
5539 This used to look for an existing reloaded home for all
5540 of the reloads, and only then perform any new reloads.
5541 But that could lose if the reloads were done out of reg-class order
5542 because a later reload with a looser constraint might have an old
5543 home in a register needed by an earlier reload with a tighter constraint.
5545 To solve this, we make two passes over the reloads, in the order
5546 described above. In the first pass we try to inherit a reload
5547 from a previous insn. If there is a later reload that needs a
5548 class that is a proper subset of the class being processed, we must
5549 also allocate a spill register during the first pass.
5551 Then make a second pass over the reloads to allocate any reloads
5552 that haven't been given registers yet. */
5554 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit
);
5556 for (j
= 0; j
< n_reloads
; j
++)
5558 register int r
= reload_order
[j
];
5560 /* Ignore reloads that got marked inoperative. */
5561 if (reload_out
[r
] == 0 && reload_in
[r
] == 0
5562 && ! reload_secondary_p
[r
])
5565 /* If find_reloads chose a to use reload_in or reload_out as a reload
5566 register, we don't need to chose one. Otherwise, try even if it
5567 found one since we might save an insn if we find the value lying
5569 if (reload_in
[r
] != 0 && reload_reg_rtx
[r
] != 0
5570 && (rtx_equal_p (reload_in
[r
], reload_reg_rtx
[r
])
5571 || rtx_equal_p (reload_out
[r
], reload_reg_rtx
[r
])))
5574 #if 0 /* No longer needed for correct operation.
5575 It might give better code, or might not; worth an experiment? */
5576 /* If this is an optional reload, we can't inherit from earlier insns
5577 until we are sure that any non-optional reloads have been allocated.
5578 The following code takes advantage of the fact that optional reloads
5579 are at the end of reload_order. */
5580 if (reload_optional
[r
] != 0)
5581 for (i
= 0; i
< j
; i
++)
5582 if ((reload_out
[reload_order
[i
]] != 0
5583 || reload_in
[reload_order
[i
]] != 0
5584 || reload_secondary_p
[reload_order
[i
]])
5585 && ! reload_optional
[reload_order
[i
]]
5586 && reload_reg_rtx
[reload_order
[i
]] == 0)
5587 allocate_reload_reg (reload_order
[i
], insn
, 0, inheritance
);
5590 /* First see if this pseudo is already available as reloaded
5591 for a previous insn. We cannot try to inherit for reloads
5592 that are smaller than the maximum number of registers needed
5593 for groups unless the register we would allocate cannot be used
5596 We could check here to see if this is a secondary reload for
5597 an object that is already in a register of the desired class.
5598 This would avoid the need for the secondary reload register.
5599 But this is complex because we can't easily determine what
5600 objects might want to be loaded via this reload. So let a
5601 register be allocated here. In `emit_reload_insns' we suppress
5602 one of the loads in the case described above. */
5606 register int regno
= -1;
5607 enum machine_mode mode
;
5609 if (reload_in
[r
] == 0)
5611 else if (GET_CODE (reload_in
[r
]) == REG
)
5613 regno
= REGNO (reload_in
[r
]);
5614 mode
= GET_MODE (reload_in
[r
]);
5616 else if (GET_CODE (reload_in_reg
[r
]) == REG
)
5618 regno
= REGNO (reload_in_reg
[r
]);
5619 mode
= GET_MODE (reload_in_reg
[r
]);
5621 else if (GET_CODE (reload_in
[r
]) == MEM
)
5623 rtx prev
= prev_nonnote_insn (insn
), note
;
5625 if (prev
&& GET_CODE (prev
) == INSN
5626 && GET_CODE (PATTERN (prev
)) == USE
5627 && GET_CODE (XEXP (PATTERN (prev
), 0)) == REG
5628 && (REGNO (XEXP (PATTERN (prev
), 0))
5629 >= FIRST_PSEUDO_REGISTER
)
5630 && (note
= find_reg_note (prev
, REG_EQUAL
, NULL_RTX
))
5631 && GET_CODE (XEXP (note
, 0)) == MEM
)
5633 rtx addr
= XEXP (XEXP (note
, 0), 0);
5635 = (GET_MODE_SIZE (GET_MODE (addr
))
5636 - GET_MODE_SIZE (GET_MODE (reload_in
[r
])));
5638 && rtx_equal_p ((BYTES_BIG_ENDIAN
5639 ? plus_constant (addr
, size_diff
)
5641 XEXP (reload_in
[r
], 0)))
5643 regno
= REGNO (XEXP (PATTERN (prev
), 0));
5644 mode
= GET_MODE (reload_in
[r
]);
5649 /* This won't work, since REGNO can be a pseudo reg number.
5650 Also, it takes much more hair to keep track of all the things
5651 that can invalidate an inherited reload of part of a pseudoreg. */
5652 else if (GET_CODE (reload_in
[r
]) == SUBREG
5653 && GET_CODE (SUBREG_REG (reload_in
[r
])) == REG
)
5654 regno
= REGNO (SUBREG_REG (reload_in
[r
])) + SUBREG_WORD (reload_in
[r
]);
5657 if (regno
>= 0 && reg_last_reload_reg
[regno
] != 0)
5659 i
= REGNO (reg_last_reload_reg
[regno
]);
5661 if (reg_reloaded_contents
[i
] == regno
5662 && TEST_HARD_REG_BIT (reg_reloaded_valid
, i
)
5663 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg
[regno
]))
5664 >= GET_MODE_SIZE (mode
))
5665 && HARD_REGNO_MODE_OK (i
, reload_mode
[r
])
5666 && TEST_HARD_REG_BIT (reg_class_contents
[(int) reload_reg_class
[r
]],
5668 && (reload_nregs
[r
] == max_group_size
5669 || ! TEST_HARD_REG_BIT (reg_class_contents
[(int) group_class
],
5671 && ((reload_reg_free_p (i
, reload_opnum
[r
],
5672 reload_when_needed
[r
])
5673 && reload_reg_free_before_p (i
, reload_opnum
[r
],
5674 reload_when_needed
[r
]))
5675 || reload_reg_free_for_value_p (i
, reload_opnum
[r
],
5676 reload_when_needed
[r
],
5680 /* If a group is needed, verify that all the subsequent
5681 registers still have their values intact. */
5683 = HARD_REGNO_NREGS (i
, reload_mode
[r
]);
5686 for (k
= 1; k
< nr
; k
++)
5687 if (reg_reloaded_contents
[i
+ k
] != regno
5688 || ! TEST_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
))
5695 /* We found a register that contains the
5696 value we need. If this register is the
5697 same as an `earlyclobber' operand of the
5698 current insn, just mark it as a place to
5699 reload from since we can't use it as the
5700 reload register itself. */
5702 for (i1
= 0; i1
< n_earlyclobbers
; i1
++)
5703 if (reg_overlap_mentioned_for_reload_p
5704 (reg_last_reload_reg
[regno
],
5705 reload_earlyclobbers
[i1
]))
5708 if (i1
!= n_earlyclobbers
5709 /* Don't use it if we'd clobber a pseudo reg. */
5710 || (spill_reg_order
[i
] < 0
5712 && ! TEST_HARD_REG_BIT (reg_reloaded_dead
, i
))
5713 /* Don't really use the inherited spill reg
5714 if we need it wider than we've got it. */
5715 || (GET_MODE_SIZE (reload_mode
[r
])
5716 > GET_MODE_SIZE (mode
)))
5717 reload_override_in
[r
] = reg_last_reload_reg
[regno
];
5721 /* We can use this as a reload reg. */
5722 /* Mark the register as in use for this part of
5724 mark_reload_reg_in_use (i
,
5726 reload_when_needed
[r
],
5728 reload_reg_rtx
[r
] = reg_last_reload_reg
[regno
];
5729 reload_inherited
[r
] = 1;
5730 reload_inheritance_insn
[r
]
5731 = reg_reloaded_insn
[i
];
5732 reload_spill_index
[r
] = i
;
5733 for (k
= 0; k
< nr
; k
++)
5734 SET_HARD_REG_BIT (reload_reg_used_for_inherit
,
5742 /* Here's another way to see if the value is already lying around. */
5744 && reload_in
[r
] != 0
5745 && ! reload_inherited
[r
]
5746 && reload_out
[r
] == 0
5747 && (CONSTANT_P (reload_in
[r
])
5748 || GET_CODE (reload_in
[r
]) == PLUS
5749 || GET_CODE (reload_in
[r
]) == REG
5750 || GET_CODE (reload_in
[r
]) == MEM
)
5751 && (reload_nregs
[r
] == max_group_size
5752 || ! reg_classes_intersect_p (reload_reg_class
[r
], group_class
)))
5755 = find_equiv_reg (reload_in
[r
], insn
, reload_reg_class
[r
],
5756 -1, NULL_PTR
, 0, reload_mode
[r
]);
5761 if (GET_CODE (equiv
) == REG
)
5762 regno
= REGNO (equiv
);
5763 else if (GET_CODE (equiv
) == SUBREG
)
5765 /* This must be a SUBREG of a hard register.
5766 Make a new REG since this might be used in an
5767 address and not all machines support SUBREGs
5769 regno
= REGNO (SUBREG_REG (equiv
)) + SUBREG_WORD (equiv
);
5770 equiv
= gen_rtx_REG (reload_mode
[r
], regno
);
5776 /* If we found a spill reg, reject it unless it is free
5777 and of the desired class. */
5779 && ((spill_reg_order
[regno
] >= 0
5780 && ! (reload_reg_free_before_p (regno
, reload_opnum
[r
],
5781 reload_when_needed
[r
])
5782 || reload_reg_free_for_value_p (regno
,
5784 reload_when_needed
[r
],
5787 || ! TEST_HARD_REG_BIT (reg_class_contents
[(int) reload_reg_class
[r
]],
5791 if (equiv
!= 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all
, regno
))
5794 if (equiv
!= 0 && ! HARD_REGNO_MODE_OK (regno
, reload_mode
[r
]))
5797 /* We found a register that contains the value we need.
5798 If this register is the same as an `earlyclobber' operand
5799 of the current insn, just mark it as a place to reload from
5800 since we can't use it as the reload register itself. */
5803 for (i
= 0; i
< n_earlyclobbers
; i
++)
5804 if (reg_overlap_mentioned_for_reload_p (equiv
,
5805 reload_earlyclobbers
[i
]))
5807 reload_override_in
[r
] = equiv
;
5812 /* JRV: If the equiv register we have found is
5813 explicitly clobbered in the current insn, mark but
5814 don't use, as above. */
5816 if (equiv
!= 0 && regno_clobbered_p (regno
, insn
))
5818 reload_override_in
[r
] = equiv
;
5822 /* If we found an equivalent reg, say no code need be generated
5823 to load it, and use it as our reload reg. */
5824 if (equiv
!= 0 && regno
!= HARD_FRAME_POINTER_REGNUM
)
5826 int nr
= HARD_REGNO_NREGS (regno
, reload_mode
[r
]);
5828 reload_reg_rtx
[r
] = equiv
;
5829 reload_inherited
[r
] = 1;
5831 /* If any of the hard registers in EQUIV are spill
5832 registers, mark them as in use for this insn. */
5833 for (k
= 0; k
< nr
; k
++)
5835 i
= spill_reg_order
[regno
+ k
];
5838 mark_reload_reg_in_use (regno
, reload_opnum
[r
],
5839 reload_when_needed
[r
],
5841 SET_HARD_REG_BIT (reload_reg_used_for_inherit
,
5848 /* If we found a register to use already, or if this is an optional
5849 reload, we are done. */
5850 if (reload_reg_rtx
[r
] != 0 || reload_optional
[r
] != 0)
5853 #if 0 /* No longer needed for correct operation. Might or might not
5854 give better code on the average. Want to experiment? */
5856 /* See if there is a later reload that has a class different from our
5857 class that intersects our class or that requires less register
5858 than our reload. If so, we must allocate a register to this
5859 reload now, since that reload might inherit a previous reload
5860 and take the only available register in our class. Don't do this
5861 for optional reloads since they will force all previous reloads
5862 to be allocated. Also don't do this for reloads that have been
5865 for (i
= j
+ 1; i
< n_reloads
; i
++)
5867 int s
= reload_order
[i
];
5869 if ((reload_in
[s
] == 0 && reload_out
[s
] == 0
5870 && ! reload_secondary_p
[s
])
5871 || reload_optional
[s
])
5874 if ((reload_reg_class
[s
] != reload_reg_class
[r
]
5875 && reg_classes_intersect_p (reload_reg_class
[r
],
5876 reload_reg_class
[s
]))
5877 || reload_nregs
[s
] < reload_nregs
[r
])
5884 allocate_reload_reg (r
, insn
, j
== n_reloads
- 1, inheritance
);
5888 /* Now allocate reload registers for anything non-optional that
5889 didn't get one yet. */
5890 for (j
= 0; j
< n_reloads
; j
++)
5892 register int r
= reload_order
[j
];
5894 /* Ignore reloads that got marked inoperative. */
5895 if (reload_out
[r
] == 0 && reload_in
[r
] == 0 && ! reload_secondary_p
[r
])
5898 /* Skip reloads that already have a register allocated or are
5900 if (reload_reg_rtx
[r
] != 0 || reload_optional
[r
])
5903 if (! allocate_reload_reg (r
, insn
, j
== n_reloads
- 1, inheritance
))
5907 /* If that loop got all the way, we have won. */
5912 /* Loop around and try without any inheritance. */
5913 /* First undo everything done by the failed attempt
5914 to allocate with inheritance. */
5915 bcopy ((char *) save_reload_reg_rtx
, (char *) reload_reg_rtx
,
5916 sizeof reload_reg_rtx
);
5917 bcopy ((char *) save_reload_inherited
, (char *) reload_inherited
,
5918 sizeof reload_inherited
);
5919 bcopy ((char *) save_reload_inheritance_insn
,
5920 (char *) reload_inheritance_insn
,
5921 sizeof reload_inheritance_insn
);
5922 bcopy ((char *) save_reload_override_in
, (char *) reload_override_in
,
5923 sizeof reload_override_in
);
5924 bcopy ((char *) save_reload_spill_index
, (char *) reload_spill_index
,
5925 sizeof reload_spill_index
);
5926 COPY_HARD_REG_SET (reload_reg_used
, save_reload_reg_used
);
5927 COPY_HARD_REG_SET (reload_reg_used_at_all
, save_reload_reg_used_at_all
);
5928 COPY_HARD_REG_SET (reload_reg_used_in_op_addr
,
5929 save_reload_reg_used_in_op_addr
);
5930 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload
,
5931 save_reload_reg_used_in_op_addr_reload
);
5932 COPY_HARD_REG_SET (reload_reg_used_in_insn
,
5933 save_reload_reg_used_in_insn
);
5934 COPY_HARD_REG_SET (reload_reg_used_in_other_addr
,
5935 save_reload_reg_used_in_other_addr
);
5937 for (i
= 0; i
< reload_n_operands
; i
++)
5939 COPY_HARD_REG_SET (reload_reg_used_in_input
[i
],
5940 save_reload_reg_used_in_input
[i
]);
5941 COPY_HARD_REG_SET (reload_reg_used_in_output
[i
],
5942 save_reload_reg_used_in_output
[i
]);
5943 COPY_HARD_REG_SET (reload_reg_used_in_input_addr
[i
],
5944 save_reload_reg_used_in_input_addr
[i
]);
5945 COPY_HARD_REG_SET (reload_reg_used_in_inpaddr_addr
[i
],
5946 save_reload_reg_used_in_inpaddr_addr
[i
]);
5947 COPY_HARD_REG_SET (reload_reg_used_in_output_addr
[i
],
5948 save_reload_reg_used_in_output_addr
[i
]);
5949 COPY_HARD_REG_SET (reload_reg_used_in_outaddr_addr
[i
],
5950 save_reload_reg_used_in_outaddr_addr
[i
]);
5954 /* If we thought we could inherit a reload, because it seemed that
5955 nothing else wanted the same reload register earlier in the insn,
5956 verify that assumption, now that all reloads have been assigned. */
5958 for (j
= 0; j
< n_reloads
; j
++)
5960 register int r
= reload_order
[j
];
5962 if (reload_inherited
[r
] && reload_reg_rtx
[r
] != 0
5963 && ! (reload_reg_free_before_p (true_regnum (reload_reg_rtx
[r
]),
5965 reload_when_needed
[r
])
5966 || reload_reg_free_for_value_p (true_regnum (reload_reg_rtx
[r
]),
5968 reload_when_needed
[r
],
5971 reload_inherited
[r
] = 0;
5972 /* If we can inherit a RELOAD_FOR_INPUT, then we do not need its related
5973 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads.
5974 ??? This could be extended to other reload types, but these are
5975 more tricky to handle:
5976 RELOAD_FOR_OTHER_ADDRESS reloads might have been merged, so we
5977 can't eliminate them without a check that *all* references are
5978 now unused due to inheritance.
5979 While RELOAD_FOR_INPADDR_ADDRESS and RELOAD_FOR_OUTADDR_ADDRESS are
5980 not merged, we can't be sure that we have eliminated the use of
5981 that particular reload if we have seen just one
5982 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_OUTPUT_ADDRESS being inherited,
5983 since there might be multiple of the latter two reloads for a single
5985 RELOAD_FOR_OPADDR_ADDR reloads for different operands are not
5986 merged, but might share the same register by courtesy of
5987 reload_reg_free_for_value_p. reload_reg_used_in_op_addr_reload
5988 does not differentiate by opnum, thus calling clear_reload_reg_in_use
5989 for one of these reloads would mark the register as free even though
5990 another RELOAD_FOR_OPADDR_ADDR reload might still use it. */
5991 else if (reload_inherited
[r
] && reload_when_needed
[r
] == RELOAD_FOR_INPUT
)
5993 for (i
= 0; i
< n_reloads
; i
++)
5995 if ((reload_when_needed
[i
] == RELOAD_FOR_INPUT_ADDRESS
5996 || reload_when_needed
[i
] == RELOAD_FOR_INPADDR_ADDRESS
)
5997 && reload_opnum
[i
] == reload_opnum
[r
]
5998 && reload_in
[i
] && reload_reg_rtx
[i
])
6000 int regno
= true_regnum (reload_reg_rtx
[i
]);
6003 if (spill_reg_order
[regno
] >= 0)
6004 clear_reload_reg_in_use (regno
, reload_opnum
[i
],
6005 reload_when_needed
[i
],
6007 reload_reg_rtx
[i
] = 0;
6008 reload_spill_index
[i
] = -1;
6009 remove_replacements (i
);
6014 /* If we found a better place to reload from,
6015 validate it in the same fashion, if it is a reload reg. */
6016 if (reload_override_in
[r
]
6017 && (GET_CODE (reload_override_in
[r
]) == REG
6018 || GET_CODE (reload_override_in
[r
]) == SUBREG
))
6020 int regno
= true_regnum (reload_override_in
[r
]);
6021 if (spill_reg_order
[regno
] >= 0
6022 && ! reload_reg_free_before_p (regno
, reload_opnum
[r
],
6023 reload_when_needed
[r
]))
6024 reload_override_in
[r
] = 0;
6028 /* Now that reload_override_in is known valid,
6029 actually override reload_in. */
6030 for (j
= 0; j
< n_reloads
; j
++)
6031 if (reload_override_in
[j
])
6032 reload_in
[j
] = reload_override_in
[j
];
6034 /* If this reload won't be done because it has been cancelled or is
6035 optional and not inherited, clear reload_reg_rtx so other
6036 routines (such as subst_reloads) don't get confused. */
6037 for (j
= 0; j
< n_reloads
; j
++)
6038 if (reload_reg_rtx
[j
] != 0
6039 && ((reload_optional
[j
] && ! reload_inherited
[j
])
6040 || (reload_in
[j
] == 0 && reload_out
[j
] == 0
6041 && ! reload_secondary_p
[j
])))
6043 int regno
= true_regnum (reload_reg_rtx
[j
]);
6045 if (spill_reg_order
[regno
] >= 0)
6046 clear_reload_reg_in_use (regno
, reload_opnum
[j
],
6047 reload_when_needed
[j
], reload_mode
[j
]);
6048 reload_reg_rtx
[j
] = 0;
6051 /* Record which pseudos and which spill regs have output reloads. */
6052 for (j
= 0; j
< n_reloads
; j
++)
6054 register int r
= reload_order
[j
];
6056 i
= reload_spill_index
[r
];
6058 /* I is nonneg if this reload uses a register.
6059 If reload_reg_rtx[r] is 0, this is an optional reload
6060 that we opted to ignore. */
6061 if (reload_out
[r
] != 0 && GET_CODE (reload_out
[r
]) == REG
6062 && reload_reg_rtx
[r
] != 0)
6064 register int nregno
= REGNO (reload_out
[r
]);
6067 if (nregno
< FIRST_PSEUDO_REGISTER
)
6068 nr
= HARD_REGNO_NREGS (nregno
, reload_mode
[r
]);
6071 reg_has_output_reload
[nregno
+ nr
] = 1;
6075 nr
= HARD_REGNO_NREGS (i
, reload_mode
[r
]);
6077 SET_HARD_REG_BIT (reg_is_output_reload
, i
+ nr
);
6080 if (reload_when_needed
[r
] != RELOAD_OTHER
6081 && reload_when_needed
[r
] != RELOAD_FOR_OUTPUT
6082 && reload_when_needed
[r
] != RELOAD_FOR_INSN
)
6088 /* If SMALL_REGISTER_CLASSES is non-zero, we may not have merged two
6089 reloads of the same item for fear that we might not have enough reload
6090 registers. However, normally they will get the same reload register
6091 and hence actually need not be loaded twice.
6093 Here we check for the most common case of this phenomenon: when we have
6094 a number of reloads for the same object, each of which were allocated
6095 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6096 reload, and is not modified in the insn itself. If we find such,
6097 merge all the reloads and set the resulting reload to RELOAD_OTHER.
6098 This will not increase the number of spill registers needed and will
6099 prevent redundant code. */
6102 merge_assigned_reloads (insn
)
6107 /* Scan all the reloads looking for ones that only load values and
6108 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6109 assigned and not modified by INSN. */
6111 for (i
= 0; i
< n_reloads
; i
++)
6113 int conflicting_input
= 0;
6114 int max_input_address_opnum
= -1;
6115 int min_conflicting_input_opnum
= MAX_RECOG_OPERANDS
;
6117 if (reload_in
[i
] == 0 || reload_when_needed
[i
] == RELOAD_OTHER
6118 || reload_out
[i
] != 0 || reload_reg_rtx
[i
] == 0
6119 || reg_set_p (reload_reg_rtx
[i
], insn
))
6122 /* Look at all other reloads. Ensure that the only use of this
6123 reload_reg_rtx is in a reload that just loads the same value
6124 as we do. Note that any secondary reloads must be of the identical
6125 class since the values, modes, and result registers are the
6126 same, so we need not do anything with any secondary reloads. */
6128 for (j
= 0; j
< n_reloads
; j
++)
6130 if (i
== j
|| reload_reg_rtx
[j
] == 0
6131 || ! reg_overlap_mentioned_p (reload_reg_rtx
[j
],
6135 if (reload_when_needed
[j
] == RELOAD_FOR_INPUT_ADDRESS
6136 && reload_opnum
[j
] > max_input_address_opnum
)
6137 max_input_address_opnum
= reload_opnum
[j
];
6139 /* If the reload regs aren't exactly the same (e.g, different modes)
6140 or if the values are different, we can't merge this reload.
6141 But if it is an input reload, we might still merge
6142 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
6144 if (! rtx_equal_p (reload_reg_rtx
[i
], reload_reg_rtx
[j
])
6145 || reload_out
[j
] != 0 || reload_in
[j
] == 0
6146 || ! rtx_equal_p (reload_in
[i
], reload_in
[j
]))
6148 if (reload_when_needed
[j
] != RELOAD_FOR_INPUT
6149 || ((reload_when_needed
[i
] != RELOAD_FOR_INPUT_ADDRESS
6150 || reload_opnum
[i
] > reload_opnum
[j
])
6151 && reload_when_needed
[i
] != RELOAD_FOR_OTHER_ADDRESS
))
6153 conflicting_input
= 1;
6154 if (min_conflicting_input_opnum
> reload_opnum
[j
])
6155 min_conflicting_input_opnum
= reload_opnum
[j
];
6159 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6160 we, in fact, found any matching reloads. */
6163 && max_input_address_opnum
<= min_conflicting_input_opnum
)
6165 for (j
= 0; j
< n_reloads
; j
++)
6166 if (i
!= j
&& reload_reg_rtx
[j
] != 0
6167 && rtx_equal_p (reload_reg_rtx
[i
], reload_reg_rtx
[j
])
6168 && (! conflicting_input
6169 || reload_when_needed
[j
] == RELOAD_FOR_INPUT_ADDRESS
6170 || reload_when_needed
[j
] == RELOAD_FOR_OTHER_ADDRESS
))
6172 reload_when_needed
[i
] = RELOAD_OTHER
;
6174 reload_spill_index
[j
] = -1;
6175 transfer_replacements (i
, j
);
6178 /* If this is now RELOAD_OTHER, look for any reloads that load
6179 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
6180 if they were for inputs, RELOAD_OTHER for outputs. Note that
6181 this test is equivalent to looking for reloads for this operand
6184 if (reload_when_needed
[i
] == RELOAD_OTHER
)
6185 for (j
= 0; j
< n_reloads
; j
++)
6186 if (reload_in
[j
] != 0
6187 && reload_when_needed
[i
] != RELOAD_OTHER
6188 && reg_overlap_mentioned_for_reload_p (reload_in
[j
],
6190 reload_when_needed
[j
]
6191 = ((reload_when_needed
[i
] == RELOAD_FOR_INPUT_ADDRESS
6192 || reload_when_needed
[i
] == RELOAD_FOR_INPADDR_ADDRESS
)
6193 ? RELOAD_FOR_OTHER_ADDRESS
: RELOAD_OTHER
);
6199 /* Output insns to reload values in and out of the chosen reload regs. */
6202 emit_reload_insns (insn
)
6206 rtx input_reload_insns
[MAX_RECOG_OPERANDS
];
6207 rtx other_input_address_reload_insns
= 0;
6208 rtx other_input_reload_insns
= 0;
6209 rtx input_address_reload_insns
[MAX_RECOG_OPERANDS
];
6210 rtx inpaddr_address_reload_insns
[MAX_RECOG_OPERANDS
];
6211 rtx output_reload_insns
[MAX_RECOG_OPERANDS
];
6212 rtx output_address_reload_insns
[MAX_RECOG_OPERANDS
];
6213 rtx outaddr_address_reload_insns
[MAX_RECOG_OPERANDS
];
6214 rtx operand_reload_insns
= 0;
6215 rtx other_operand_reload_insns
= 0;
6216 rtx other_output_reload_insns
[MAX_RECOG_OPERANDS
];
6217 rtx following_insn
= NEXT_INSN (insn
);
6218 rtx before_insn
= insn
;
6220 /* Values to be put in spill_reg_store are put here first. */
6221 rtx new_spill_reg_store
[FIRST_PSEUDO_REGISTER
];
6222 HARD_REG_SET reg_reloaded_died
;
6224 CLEAR_HARD_REG_SET (reg_reloaded_died
);
6226 for (j
= 0; j
< reload_n_operands
; j
++)
6227 input_reload_insns
[j
] = input_address_reload_insns
[j
]
6228 = inpaddr_address_reload_insns
[j
]
6229 = output_reload_insns
[j
] = output_address_reload_insns
[j
]
6230 = outaddr_address_reload_insns
[j
]
6231 = other_output_reload_insns
[j
] = 0;
6233 /* Now output the instructions to copy the data into and out of the
6234 reload registers. Do these in the order that the reloads were reported,
6235 since reloads of base and index registers precede reloads of operands
6236 and the operands may need the base and index registers reloaded. */
6238 for (j
= 0; j
< n_reloads
; j
++)
6241 rtx oldequiv_reg
= 0;
6242 rtx this_reload_insn
= 0;
6243 int expect_occurrences
= 1;
6245 if (reload_spill_index
[j
] >= 0)
6246 new_spill_reg_store
[reload_spill_index
[j
]] = 0;
6249 if (old
!= 0 && ! reload_inherited
[j
]
6250 && ! rtx_equal_p (reload_reg_rtx
[j
], old
)
6251 && reload_reg_rtx
[j
] != 0)
6253 register rtx reloadreg
= reload_reg_rtx
[j
];
6255 enum machine_mode mode
;
6258 /* Determine the mode to reload in.
6259 This is very tricky because we have three to choose from.
6260 There is the mode the insn operand wants (reload_inmode[J]).
6261 There is the mode of the reload register RELOADREG.
6262 There is the intrinsic mode of the operand, which we could find
6263 by stripping some SUBREGs.
6264 It turns out that RELOADREG's mode is irrelevant:
6265 we can change that arbitrarily.
6267 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6268 then the reload reg may not support QImode moves, so use SImode.
6269 If foo is in memory due to spilling a pseudo reg, this is safe,
6270 because the QImode value is in the least significant part of a
6271 slot big enough for a SImode. If foo is some other sort of
6272 memory reference, then it is impossible to reload this case,
6273 so previous passes had better make sure this never happens.
6275 Then consider a one-word union which has SImode and one of its
6276 members is a float, being fetched as (SUBREG:SF union:SI).
6277 We must fetch that as SFmode because we could be loading into
6278 a float-only register. In this case OLD's mode is correct.
6280 Consider an immediate integer: it has VOIDmode. Here we need
6281 to get a mode from something else.
6283 In some cases, there is a fourth mode, the operand's
6284 containing mode. If the insn specifies a containing mode for
6285 this operand, it overrides all others.
6287 I am not sure whether the algorithm here is always right,
6288 but it does the right things in those cases. */
6290 mode
= GET_MODE (old
);
6291 if (mode
== VOIDmode
)
6292 mode
= reload_inmode
[j
];
6294 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6295 /* If we need a secondary register for this operation, see if
6296 the value is already in a register in that class. Don't
6297 do this if the secondary register will be used as a scratch
6300 if (reload_secondary_in_reload
[j
] >= 0
6301 && reload_secondary_in_icode
[j
] == CODE_FOR_nothing
6304 = find_equiv_reg (old
, insn
,
6305 reload_reg_class
[reload_secondary_in_reload
[j
]],
6306 -1, NULL_PTR
, 0, mode
);
6309 /* If reloading from memory, see if there is a register
6310 that already holds the same value. If so, reload from there.
6311 We can pass 0 as the reload_reg_p argument because
6312 any other reload has either already been emitted,
6313 in which case find_equiv_reg will see the reload-insn,
6314 or has yet to be emitted, in which case it doesn't matter
6315 because we will use this equiv reg right away. */
6317 if (oldequiv
== 0 && optimize
6318 && (GET_CODE (old
) == MEM
6319 || (GET_CODE (old
) == REG
6320 && REGNO (old
) >= FIRST_PSEUDO_REGISTER
6321 && reg_renumber
[REGNO (old
)] < 0)))
6322 oldequiv
= find_equiv_reg (old
, insn
, ALL_REGS
,
6323 -1, NULL_PTR
, 0, mode
);
6327 int regno
= true_regnum (oldequiv
);
6329 /* If OLDEQUIV is a spill register, don't use it for this
6330 if any other reload needs it at an earlier stage of this insn
6331 or at this stage. */
6332 if (spill_reg_order
[regno
] >= 0
6333 && (! reload_reg_free_p (regno
, reload_opnum
[j
],
6334 reload_when_needed
[j
])
6335 || ! reload_reg_free_before_p (regno
, reload_opnum
[j
],
6336 reload_when_needed
[j
])))
6339 /* If OLDEQUIV is not a spill register,
6340 don't use it if any other reload wants it. */
6341 if (spill_reg_order
[regno
] < 0)
6344 for (k
= 0; k
< n_reloads
; k
++)
6345 if (reload_reg_rtx
[k
] != 0 && k
!= j
6346 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx
[k
],
6354 /* If it is no cheaper to copy from OLDEQUIV into the
6355 reload register than it would be to move from memory,
6356 don't use it. Likewise, if we need a secondary register
6360 && ((REGNO_REG_CLASS (regno
) != reload_reg_class
[j
]
6361 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno
),
6362 reload_reg_class
[j
])
6363 >= MEMORY_MOVE_COST (mode
, REGNO_REG_CLASS (regno
),
6365 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6366 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class
[j
],
6370 #ifdef SECONDARY_MEMORY_NEEDED
6371 || SECONDARY_MEMORY_NEEDED (reload_reg_class
[j
],
6372 REGNO_REG_CLASS (regno
),
6381 else if (GET_CODE (oldequiv
) == REG
)
6382 oldequiv_reg
= oldequiv
;
6383 else if (GET_CODE (oldequiv
) == SUBREG
)
6384 oldequiv_reg
= SUBREG_REG (oldequiv
);
6386 /* If we are reloading from a register that was recently stored in
6387 with an output-reload, see if we can prove there was
6388 actually no need to store the old value in it. */
6390 if (optimize
&& GET_CODE (oldequiv
) == REG
6391 && REGNO (oldequiv
) < FIRST_PSEUDO_REGISTER
6392 && spill_reg_store
[REGNO (oldequiv
)]
6393 && GET_CODE (old
) == REG
&& dead_or_set_p (insn
, old
)
6394 /* This is unsafe if operand occurs more than once in current
6395 insn. Perhaps some occurrences weren't reloaded. */
6396 && count_occurrences (PATTERN (insn
), old
) == 1)
6397 delete_output_reload (insn
, j
, spill_reg_store
[REGNO (oldequiv
)]);
6399 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6400 then load RELOADREG from OLDEQUIV. Note that we cannot use
6401 gen_lowpart_common since it can do the wrong thing when
6402 RELOADREG has a multi-word mode. Note that RELOADREG
6403 must always be a REG here. */
6405 if (GET_MODE (reloadreg
) != mode
)
6406 reloadreg
= gen_rtx_REG (mode
, REGNO (reloadreg
));
6407 while (GET_CODE (oldequiv
) == SUBREG
&& GET_MODE (oldequiv
) != mode
)
6408 oldequiv
= SUBREG_REG (oldequiv
);
6409 if (GET_MODE (oldequiv
) != VOIDmode
6410 && mode
!= GET_MODE (oldequiv
))
6411 oldequiv
= gen_rtx_SUBREG (mode
, oldequiv
, 0);
6413 /* Switch to the right place to emit the reload insns. */
6414 switch (reload_when_needed
[j
])
6417 where
= &other_input_reload_insns
;
6419 case RELOAD_FOR_INPUT
:
6420 where
= &input_reload_insns
[reload_opnum
[j
]];
6422 case RELOAD_FOR_INPUT_ADDRESS
:
6423 where
= &input_address_reload_insns
[reload_opnum
[j
]];
6425 case RELOAD_FOR_INPADDR_ADDRESS
:
6426 where
= &inpaddr_address_reload_insns
[reload_opnum
[j
]];
6428 case RELOAD_FOR_OUTPUT_ADDRESS
:
6429 where
= &output_address_reload_insns
[reload_opnum
[j
]];
6431 case RELOAD_FOR_OUTADDR_ADDRESS
:
6432 where
= &outaddr_address_reload_insns
[reload_opnum
[j
]];
6434 case RELOAD_FOR_OPERAND_ADDRESS
:
6435 where
= &operand_reload_insns
;
6437 case RELOAD_FOR_OPADDR_ADDR
:
6438 where
= &other_operand_reload_insns
;
6440 case RELOAD_FOR_OTHER_ADDRESS
:
6441 where
= &other_input_address_reload_insns
;
6447 push_to_sequence (*where
);
6450 /* Auto-increment addresses must be reloaded in a special way. */
6451 if (GET_CODE (oldequiv
) == POST_INC
6452 || GET_CODE (oldequiv
) == POST_DEC
6453 || GET_CODE (oldequiv
) == PRE_INC
6454 || GET_CODE (oldequiv
) == PRE_DEC
)
6456 /* We are not going to bother supporting the case where a
6457 incremented register can't be copied directly from
6458 OLDEQUIV since this seems highly unlikely. */
6459 if (reload_secondary_in_reload
[j
] >= 0)
6461 /* Prevent normal processing of this reload. */
6463 /* Output a special code sequence for this case. */
6464 inc_for_reload (reloadreg
, oldequiv
, reload_inc
[j
]);
6467 /* If we are reloading a pseudo-register that was set by the previous
6468 insn, see if we can get rid of that pseudo-register entirely
6469 by redirecting the previous insn into our reload register. */
6471 else if (optimize
&& GET_CODE (old
) == REG
6472 && REGNO (old
) >= FIRST_PSEUDO_REGISTER
6473 && dead_or_set_p (insn
, old
)
6474 /* This is unsafe if some other reload
6475 uses the same reg first. */
6476 && reload_reg_free_before_p (REGNO (reloadreg
),
6478 reload_when_needed
[j
]))
6480 rtx temp
= PREV_INSN (insn
);
6481 while (temp
&& GET_CODE (temp
) == NOTE
)
6482 temp
= PREV_INSN (temp
);
6484 && GET_CODE (temp
) == INSN
6485 && GET_CODE (PATTERN (temp
)) == SET
6486 && SET_DEST (PATTERN (temp
)) == old
6487 /* Make sure we can access insn_operand_constraint. */
6488 && asm_noperands (PATTERN (temp
)) < 0
6489 /* This is unsafe if prev insn rejects our reload reg. */
6490 && constraint_accepts_reg_p (insn_operand_constraint
[recog_memoized (temp
)][0],
6492 /* This is unsafe if operand occurs more than once in current
6493 insn. Perhaps some occurrences aren't reloaded. */
6494 && count_occurrences (PATTERN (insn
), old
) == 1
6495 /* Don't risk splitting a matching pair of operands. */
6496 && ! reg_mentioned_p (old
, SET_SRC (PATTERN (temp
))))
6498 /* Store into the reload register instead of the pseudo. */
6499 SET_DEST (PATTERN (temp
)) = reloadreg
;
6500 /* If these are the only uses of the pseudo reg,
6501 pretend for GDB it lives in the reload reg we used. */
6502 if (REG_N_DEATHS (REGNO (old
)) == 1
6503 && REG_N_SETS (REGNO (old
)) == 1)
6505 reg_renumber
[REGNO (old
)] = REGNO (reload_reg_rtx
[j
]);
6506 alter_reg (REGNO (old
), -1);
6512 /* We can't do that, so output an insn to load RELOADREG. */
6516 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6517 rtx second_reload_reg
= 0;
6518 enum insn_code icode
;
6520 /* If we have a secondary reload, pick up the secondary register
6521 and icode, if any. If OLDEQUIV and OLD are different or
6522 if this is an in-out reload, recompute whether or not we
6523 still need a secondary register and what the icode should
6524 be. If we still need a secondary register and the class or
6525 icode is different, go back to reloading from OLD if using
6526 OLDEQUIV means that we got the wrong type of register. We
6527 cannot have different class or icode due to an in-out reload
6528 because we don't make such reloads when both the input and
6529 output need secondary reload registers. */
6531 if (reload_secondary_in_reload
[j
] >= 0)
6533 int secondary_reload
= reload_secondary_in_reload
[j
];
6534 rtx real_oldequiv
= oldequiv
;
6537 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6538 and similarly for OLD.
6539 See comments in get_secondary_reload in reload.c. */
6540 if (GET_CODE (oldequiv
) == REG
6541 && REGNO (oldequiv
) >= FIRST_PSEUDO_REGISTER
6542 && reg_equiv_mem
[REGNO (oldequiv
)] != 0)
6543 real_oldequiv
= reg_equiv_mem
[REGNO (oldequiv
)];
6545 if (GET_CODE (old
) == REG
6546 && REGNO (old
) >= FIRST_PSEUDO_REGISTER
6547 && reg_equiv_mem
[REGNO (old
)] != 0)
6548 real_old
= reg_equiv_mem
[REGNO (old
)];
6550 second_reload_reg
= reload_reg_rtx
[secondary_reload
];
6551 icode
= reload_secondary_in_icode
[j
];
6553 if ((old
!= oldequiv
&& ! rtx_equal_p (old
, oldequiv
))
6554 || (reload_in
[j
] != 0 && reload_out
[j
] != 0))
6556 enum reg_class new_class
6557 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class
[j
],
6558 mode
, real_oldequiv
);
6560 if (new_class
== NO_REGS
)
6561 second_reload_reg
= 0;
6564 enum insn_code new_icode
;
6565 enum machine_mode new_mode
;
6567 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) new_class
],
6568 REGNO (second_reload_reg
)))
6569 oldequiv
= old
, real_oldequiv
= real_old
;
6572 new_icode
= reload_in_optab
[(int) mode
];
6573 if (new_icode
!= CODE_FOR_nothing
6574 && ((insn_operand_predicate
[(int) new_icode
][0]
6575 && ! ((*insn_operand_predicate
[(int) new_icode
][0])
6577 || (insn_operand_predicate
[(int) new_icode
][1]
6578 && ! ((*insn_operand_predicate
[(int) new_icode
][1])
6579 (real_oldequiv
, mode
)))))
6580 new_icode
= CODE_FOR_nothing
;
6582 if (new_icode
== CODE_FOR_nothing
)
6585 new_mode
= insn_operand_mode
[(int) new_icode
][2];
6587 if (GET_MODE (second_reload_reg
) != new_mode
)
6589 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg
),
6591 oldequiv
= old
, real_oldequiv
= real_old
;
6594 = gen_rtx_REG (new_mode
,
6595 REGNO (second_reload_reg
));
6601 /* If we still need a secondary reload register, check
6602 to see if it is being used as a scratch or intermediate
6603 register and generate code appropriately. If we need
6604 a scratch register, use REAL_OLDEQUIV since the form of
6605 the insn may depend on the actual address if it is
6608 if (second_reload_reg
)
6610 if (icode
!= CODE_FOR_nothing
)
6612 emit_insn (GEN_FCN (icode
) (reloadreg
, real_oldequiv
,
6613 second_reload_reg
));
6618 /* See if we need a scratch register to load the
6619 intermediate register (a tertiary reload). */
6620 enum insn_code tertiary_icode
6621 = reload_secondary_in_icode
[secondary_reload
];
6623 if (tertiary_icode
!= CODE_FOR_nothing
)
6625 rtx third_reload_reg
6626 = reload_reg_rtx
[reload_secondary_in_reload
[secondary_reload
]];
6628 emit_insn ((GEN_FCN (tertiary_icode
)
6629 (second_reload_reg
, real_oldequiv
,
6630 third_reload_reg
)));
6633 gen_reload (second_reload_reg
, oldequiv
,
6635 reload_when_needed
[j
]);
6637 oldequiv
= second_reload_reg
;
6643 if (! special
&& ! rtx_equal_p (reloadreg
, oldequiv
))
6644 gen_reload (reloadreg
, oldequiv
, reload_opnum
[j
],
6645 reload_when_needed
[j
]);
6647 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6648 /* We may have to make a REG_DEAD note for the secondary reload
6649 register in the insns we just made. Find the last insn that
6650 mentioned the register. */
6651 if (! special
&& second_reload_reg
6652 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg
)))
6656 for (prev
= get_last_insn (); prev
;
6657 prev
= PREV_INSN (prev
))
6658 if (GET_RTX_CLASS (GET_CODE (prev
) == 'i')
6659 && reg_overlap_mentioned_for_reload_p (second_reload_reg
,
6662 REG_NOTES (prev
) = gen_rtx_EXPR_LIST (REG_DEAD
,
6671 this_reload_insn
= get_last_insn ();
6672 /* End this sequence. */
6673 *where
= get_insns ();
6677 /* When inheriting a wider reload, we have a MEM in reload_in[j],
6678 e.g. inheriting a SImode output reload for
6679 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
6680 if (optimize
&& reload_inherited
[j
] && reload_in
[j
]
6681 && GET_CODE (reload_in
[j
]) == MEM
6682 && reload_spill_index
[j
] >= 0
6683 && TEST_HARD_REG_BIT (reg_reloaded_valid
, reload_spill_index
[j
]))
6686 = count_occurrences (PATTERN (insn
), reload_in
[j
]) == 1 ? 0 : -1;
6688 = regno_reg_rtx
[reg_reloaded_contents
[reload_spill_index
[j
]]];
6690 /* Add a note saying the input reload reg
6691 dies in this insn, if anyone cares. */
6692 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6694 && reload_reg_rtx
[j
] != old
6695 && reload_reg_rtx
[j
] != 0
6696 && reload_out
[j
] == 0
6697 && ! reload_inherited
[j
]
6698 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx
[j
])))
6700 register rtx reloadreg
= reload_reg_rtx
[j
];
6703 /* We can't abort here because we need to support this for sched.c.
6704 It's not terrible to miss a REG_DEAD note, but we should try
6705 to figure out how to do this correctly. */
6706 /* The code below is incorrect for address-only reloads. */
6707 if (reload_when_needed
[j
] != RELOAD_OTHER
6708 && reload_when_needed
[j
] != RELOAD_FOR_INPUT
)
6712 /* Add a death note to this insn, for an input reload. */
6714 if ((reload_when_needed
[j
] == RELOAD_OTHER
6715 || reload_when_needed
[j
] == RELOAD_FOR_INPUT
)
6716 && ! dead_or_set_p (insn
, reloadreg
))
6718 = gen_rtx_EXPR_LIST (REG_DEAD
,
6719 reloadreg
, REG_NOTES (insn
));
6722 /* When we inherit a reload, the last marked death of the reload reg
6723 may no longer really be a death. */
6724 if (reload_reg_rtx
[j
] != 0
6725 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx
[j
]))
6726 && reload_inherited
[j
])
6728 /* Handle inheriting an output reload.
6729 Remove the death note from the output reload insn. */
6730 if (reload_spill_index
[j
] >= 0
6731 && GET_CODE (reload_in
[j
]) == REG
6732 && spill_reg_store
[reload_spill_index
[j
]] != 0
6733 && find_regno_note (spill_reg_store
[reload_spill_index
[j
]],
6734 REG_DEAD
, REGNO (reload_reg_rtx
[j
])))
6735 remove_death (REGNO (reload_reg_rtx
[j
]),
6736 spill_reg_store
[reload_spill_index
[j
]]);
6737 /* Likewise for input reloads that were inherited. */
6738 else if (reload_spill_index
[j
] >= 0
6739 && GET_CODE (reload_in
[j
]) == REG
6740 && spill_reg_store
[reload_spill_index
[j
]] == 0
6741 && reload_inheritance_insn
[j
] != 0
6742 && find_regno_note (reload_inheritance_insn
[j
], REG_DEAD
,
6743 REGNO (reload_reg_rtx
[j
])))
6744 remove_death (REGNO (reload_reg_rtx
[j
]),
6745 reload_inheritance_insn
[j
]);
6750 /* We got this register from find_equiv_reg.
6751 Search back for its last death note and get rid of it.
6752 But don't search back too far.
6753 Don't go past a place where this reg is set,
6754 since a death note before that remains valid. */
6755 for (prev
= PREV_INSN (insn
);
6756 prev
&& GET_CODE (prev
) != CODE_LABEL
;
6757 prev
= PREV_INSN (prev
))
6758 if (GET_RTX_CLASS (GET_CODE (prev
)) == 'i'
6759 && dead_or_set_p (prev
, reload_reg_rtx
[j
]))
6761 if (find_regno_note (prev
, REG_DEAD
,
6762 REGNO (reload_reg_rtx
[j
])))
6763 remove_death (REGNO (reload_reg_rtx
[j
]), prev
);
6769 /* We might have used find_equiv_reg above to choose an alternate
6770 place from which to reload. If so, and it died, we need to remove
6771 that death and move it to one of the insns we just made. */
6773 if (oldequiv_reg
!= 0
6774 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg
)))
6778 for (prev
= PREV_INSN (insn
); prev
&& GET_CODE (prev
) != CODE_LABEL
;
6779 prev
= PREV_INSN (prev
))
6780 if (GET_RTX_CLASS (GET_CODE (prev
)) == 'i'
6781 && dead_or_set_p (prev
, oldequiv_reg
))
6783 if (find_regno_note (prev
, REG_DEAD
, REGNO (oldequiv_reg
)))
6785 for (prev1
= this_reload_insn
;
6786 prev1
; prev1
= PREV_INSN (prev1
))
6787 if (GET_RTX_CLASS (GET_CODE (prev1
) == 'i')
6788 && reg_overlap_mentioned_for_reload_p (oldequiv_reg
,
6791 REG_NOTES (prev1
) = gen_rtx_EXPR_LIST (REG_DEAD
,
6796 remove_death (REGNO (oldequiv_reg
), prev
);
6803 /* If we are reloading a register that was recently stored in with an
6804 output-reload, see if we can prove there was
6805 actually no need to store the old value in it. */
6807 if (optimize
&& reload_inherited
[j
] && reload_spill_index
[j
] >= 0
6808 && reload_in
[j
] != 0
6809 && GET_CODE (reload_in
[j
]) == REG
6811 /* There doesn't seem to be any reason to restrict this to pseudos
6812 and doing so loses in the case where we are copying from a
6813 register of the wrong class. */
6814 && REGNO (reload_in
[j
]) >= FIRST_PSEUDO_REGISTER
6816 && spill_reg_store
[reload_spill_index
[j
]] != 0
6817 /* This is unsafe if some other reload uses the same reg first. */
6818 && reload_reg_free_before_p (reload_spill_index
[j
],
6819 reload_opnum
[j
], reload_when_needed
[j
])
6820 && dead_or_set_p (insn
, reload_in
[j
])
6821 /* This is unsafe if operand occurs more than once in current
6822 insn. Perhaps some occurrences weren't reloaded. */
6823 && (count_occurrences (PATTERN (insn
), reload_in
[j
])
6824 == expect_occurrences
))
6825 delete_output_reload (insn
, j
,
6826 spill_reg_store
[reload_spill_index
[j
]]);
6828 /* Input-reloading is done. Now do output-reloading,
6829 storing the value from the reload-register after the main insn
6830 if reload_out[j] is nonzero.
6832 ??? At some point we need to support handling output reloads of
6833 JUMP_INSNs or insns that set cc0. */
6834 old
= reload_out
[j
];
6836 && reload_reg_rtx
[j
] != old
6837 && reload_reg_rtx
[j
] != 0)
6839 register rtx reloadreg
= reload_reg_rtx
[j
];
6840 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6841 register rtx second_reloadreg
= 0;
6844 enum machine_mode mode
;
6847 /* An output operand that dies right away does need a reload,
6848 but need not be copied from it. Show the new location in the
6850 if ((GET_CODE (old
) == REG
|| GET_CODE (old
) == SCRATCH
)
6851 && (note
= find_reg_note (insn
, REG_UNUSED
, old
)) != 0)
6853 XEXP (note
, 0) = reload_reg_rtx
[j
];
6856 /* Likewise for a SUBREG of an operand that dies. */
6857 else if (GET_CODE (old
) == SUBREG
6858 && GET_CODE (SUBREG_REG (old
)) == REG
6859 && 0 != (note
= find_reg_note (insn
, REG_UNUSED
,
6862 XEXP (note
, 0) = gen_lowpart_common (GET_MODE (old
),
6866 else if (GET_CODE (old
) == SCRATCH
)
6867 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6868 but we don't want to make an output reload. */
6872 /* Strip off of OLD any size-increasing SUBREGs such as
6873 (SUBREG:SI foo:QI 0). */
6875 while (GET_CODE (old
) == SUBREG
&& SUBREG_WORD (old
) == 0
6876 && (GET_MODE_SIZE (GET_MODE (old
))
6877 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old
)))))
6878 old
= SUBREG_REG (old
);
6881 /* If is a JUMP_INSN, we can't support output reloads yet. */
6882 if (GET_CODE (insn
) == JUMP_INSN
)
6885 if (reload_when_needed
[j
] == RELOAD_OTHER
)
6888 push_to_sequence (output_reload_insns
[reload_opnum
[j
]]);
6890 /* Determine the mode to reload in.
6891 See comments above (for input reloading). */
6893 mode
= GET_MODE (old
);
6894 if (mode
== VOIDmode
)
6896 /* VOIDmode should never happen for an output. */
6897 if (asm_noperands (PATTERN (insn
)) < 0)
6898 /* It's the compiler's fault. */
6899 fatal_insn ("VOIDmode on an output", insn
);
6900 error_for_asm (insn
, "output operand is constant in `asm'");
6901 /* Prevent crash--use something we know is valid. */
6903 old
= gen_rtx_REG (mode
, REGNO (reloadreg
));
6906 if (GET_MODE (reloadreg
) != mode
)
6907 reloadreg
= gen_rtx_REG (mode
, REGNO (reloadreg
));
6909 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6911 /* If we need two reload regs, set RELOADREG to the intermediate
6912 one, since it will be stored into OLD. We might need a secondary
6913 register only for an input reload, so check again here. */
6915 if (reload_secondary_out_reload
[j
] >= 0)
6919 if (GET_CODE (old
) == REG
&& REGNO (old
) >= FIRST_PSEUDO_REGISTER
6920 && reg_equiv_mem
[REGNO (old
)] != 0)
6921 real_old
= reg_equiv_mem
[REGNO (old
)];
6923 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class
[j
],
6927 second_reloadreg
= reloadreg
;
6928 reloadreg
= reload_reg_rtx
[reload_secondary_out_reload
[j
]];
6930 /* See if RELOADREG is to be used as a scratch register
6931 or as an intermediate register. */
6932 if (reload_secondary_out_icode
[j
] != CODE_FOR_nothing
)
6934 emit_insn ((GEN_FCN (reload_secondary_out_icode
[j
])
6935 (real_old
, second_reloadreg
, reloadreg
)));
6940 /* See if we need both a scratch and intermediate reload
6943 int secondary_reload
= reload_secondary_out_reload
[j
];
6944 enum insn_code tertiary_icode
6945 = reload_secondary_out_icode
[secondary_reload
];
6947 if (GET_MODE (reloadreg
) != mode
)
6948 reloadreg
= gen_rtx_REG (mode
, REGNO (reloadreg
));
6950 if (tertiary_icode
!= CODE_FOR_nothing
)
6953 = reload_reg_rtx
[reload_secondary_out_reload
[secondary_reload
]];
6956 /* Copy primary reload reg to secondary reload reg.
6957 (Note that these have been swapped above, then
6958 secondary reload reg to OLD using our insn. */
6960 /* If REAL_OLD is a paradoxical SUBREG, remove it
6961 and try to put the opposite SUBREG on
6963 if (GET_CODE (real_old
) == SUBREG
6964 && (GET_MODE_SIZE (GET_MODE (real_old
))
6965 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old
))))
6966 && 0 != (tem
= gen_lowpart_common
6967 (GET_MODE (SUBREG_REG (real_old
)),
6969 real_old
= SUBREG_REG (real_old
), reloadreg
= tem
;
6971 gen_reload (reloadreg
, second_reloadreg
,
6972 reload_opnum
[j
], reload_when_needed
[j
]);
6973 emit_insn ((GEN_FCN (tertiary_icode
)
6974 (real_old
, reloadreg
, third_reloadreg
)));
6979 /* Copy between the reload regs here and then to
6982 gen_reload (reloadreg
, second_reloadreg
,
6983 reload_opnum
[j
], reload_when_needed
[j
]);
6989 /* Output the last reload insn. */
6994 /* Don't output the last reload if OLD is not the dest of
6995 INSN and is in the src and is clobbered by INSN. */
6996 if (! flag_expensive_optimizations
6997 || GET_CODE (old
) != REG
6998 || !(set
= single_set (insn
))
6999 || rtx_equal_p (old
, SET_DEST (set
))
7000 || !reg_mentioned_p (old
, SET_SRC (set
))
7001 || !regno_clobbered_p (REGNO (old
), insn
))
7002 gen_reload (old
, reloadreg
, reload_opnum
[j
],
7003 reload_when_needed
[j
]);
7006 #ifdef PRESERVE_DEATH_INFO_REGNO_P
7007 /* If final will look at death notes for this reg,
7008 put one on the last output-reload insn to use it. Similarly
7009 for any secondary register. */
7010 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg
)))
7011 for (p
= get_last_insn (); p
; p
= PREV_INSN (p
))
7012 if (GET_RTX_CLASS (GET_CODE (p
)) == 'i'
7013 && reg_overlap_mentioned_for_reload_p (reloadreg
,
7015 REG_NOTES (p
) = gen_rtx_EXPR_LIST (REG_DEAD
,
7016 reloadreg
, REG_NOTES (p
));
7018 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
7019 if (! special
&& second_reloadreg
7020 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg
)))
7021 for (p
= get_last_insn (); p
; p
= PREV_INSN (p
))
7022 if (GET_RTX_CLASS (GET_CODE (p
)) == 'i'
7023 && reg_overlap_mentioned_for_reload_p (second_reloadreg
,
7025 REG_NOTES (p
) = gen_rtx_EXPR_LIST (REG_DEAD
,
7030 /* Look at all insns we emitted, just to be safe. */
7031 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
7032 if (GET_RTX_CLASS (GET_CODE (p
)) == 'i')
7034 rtx pat
= PATTERN (p
);
7036 /* If this output reload doesn't come from a spill reg,
7037 clear any memory of reloaded copies of the pseudo reg.
7038 If this output reload comes from a spill reg,
7039 reg_has_output_reload will make this do nothing. */
7040 note_stores (pat
, forget_old_reloads_1
);
7042 if (reg_mentioned_p (reload_reg_rtx
[j
], pat
))
7044 if (reload_spill_index
[j
] < 0
7045 && GET_CODE (pat
) == SET
7046 && SET_SRC (pat
) == reload_reg_rtx
[j
])
7048 int src
= REGNO (SET_SRC (pat
));
7050 reload_spill_index
[j
] = src
;
7051 SET_HARD_REG_BIT (reg_is_output_reload
, src
);
7052 if (find_regno_note (insn
, REG_DEAD
, src
))
7053 SET_HARD_REG_BIT (reg_reloaded_died
, src
);
7055 if (reload_spill_index
[j
] >= 0)
7056 new_spill_reg_store
[reload_spill_index
[j
]] = p
;
7060 if (reload_when_needed
[j
] == RELOAD_OTHER
)
7062 emit_insns (other_output_reload_insns
[reload_opnum
[j
]]);
7063 other_output_reload_insns
[reload_opnum
[j
]] = get_insns ();
7066 output_reload_insns
[reload_opnum
[j
]] = get_insns ();
7072 /* Now write all the insns we made for reloads in the order expected by
7073 the allocation functions. Prior to the insn being reloaded, we write
7074 the following reloads:
7076 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7078 RELOAD_OTHER reloads.
7080 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7081 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7082 RELOAD_FOR_INPUT reload for the operand.
7084 RELOAD_FOR_OPADDR_ADDRS reloads.
7086 RELOAD_FOR_OPERAND_ADDRESS reloads.
7088 After the insn being reloaded, we write the following:
7090 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7091 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7092 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7093 reloads for the operand. The RELOAD_OTHER output reloads are
7094 output in descending order by reload number. */
7096 emit_insns_before (other_input_address_reload_insns
, before_insn
);
7097 emit_insns_before (other_input_reload_insns
, before_insn
);
7099 for (j
= 0; j
< reload_n_operands
; j
++)
7101 emit_insns_before (inpaddr_address_reload_insns
[j
], before_insn
);
7102 emit_insns_before (input_address_reload_insns
[j
], before_insn
);
7103 emit_insns_before (input_reload_insns
[j
], before_insn
);
7106 emit_insns_before (other_operand_reload_insns
, before_insn
);
7107 emit_insns_before (operand_reload_insns
, before_insn
);
7109 for (j
= 0; j
< reload_n_operands
; j
++)
7111 emit_insns_before (outaddr_address_reload_insns
[j
], following_insn
);
7112 emit_insns_before (output_address_reload_insns
[j
], following_insn
);
7113 emit_insns_before (output_reload_insns
[j
], following_insn
);
7114 emit_insns_before (other_output_reload_insns
[j
], following_insn
);
7117 /* Move death notes from INSN
7118 to output-operand-address and output reload insns. */
7119 #ifdef PRESERVE_DEATH_INFO_REGNO_P
7122 /* Loop over those insns, last ones first. */
7123 for (insn1
= PREV_INSN (following_insn
); insn1
!= insn
;
7124 insn1
= PREV_INSN (insn1
))
7125 if (GET_CODE (insn1
) == INSN
&& GET_CODE (PATTERN (insn1
)) == SET
)
7127 rtx source
= SET_SRC (PATTERN (insn1
));
7128 rtx dest
= SET_DEST (PATTERN (insn1
));
7130 /* The note we will examine next. */
7131 rtx reg_notes
= REG_NOTES (insn
);
7132 /* The place that pointed to this note. */
7133 rtx
*prev_reg_note
= ®_NOTES (insn
);
7135 /* If the note is for something used in the source of this
7136 reload insn, or in the output address, move the note. */
7139 rtx next_reg_notes
= XEXP (reg_notes
, 1);
7140 if (REG_NOTE_KIND (reg_notes
) == REG_DEAD
7141 && GET_CODE (XEXP (reg_notes
, 0)) == REG
7142 && ((GET_CODE (dest
) != REG
7143 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes
, 0),
7145 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes
, 0),
7148 *prev_reg_note
= next_reg_notes
;
7149 XEXP (reg_notes
, 1) = REG_NOTES (insn1
);
7150 REG_NOTES (insn1
) = reg_notes
;
7153 prev_reg_note
= &XEXP (reg_notes
, 1);
7155 reg_notes
= next_reg_notes
;
7161 /* For all the spill regs newly reloaded in this instruction,
7162 record what they were reloaded from, so subsequent instructions
7163 can inherit the reloads.
7165 Update spill_reg_store for the reloads of this insn.
7166 Copy the elements that were updated in the loop above. */
7168 for (j
= 0; j
< n_reloads
; j
++)
7170 register int r
= reload_order
[j
];
7171 register int i
= reload_spill_index
[r
];
7173 /* I is nonneg if this reload used a register.
7174 If reload_reg_rtx[r] is 0, this is an optional reload
7175 that we opted to ignore. */
7177 if (i
>= 0 && reload_reg_rtx
[r
] != 0)
7180 = HARD_REGNO_NREGS (i
, GET_MODE (reload_reg_rtx
[r
]));
7182 int part_reaches_end
= 0;
7183 int all_reaches_end
= 1;
7185 /* For a multi register reload, we need to check if all or part
7186 of the value lives to the end. */
7187 for (k
= 0; k
< nr
; k
++)
7189 if (reload_reg_reaches_end_p (i
+ k
, reload_opnum
[r
],
7190 reload_when_needed
[r
]))
7191 part_reaches_end
= 1;
7193 all_reaches_end
= 0;
7196 /* Ignore reloads that don't reach the end of the insn in
7198 if (all_reaches_end
)
7200 /* First, clear out memory of what used to be in this spill reg.
7201 If consecutive registers are used, clear them all. */
7203 for (k
= 0; k
< nr
; k
++)
7204 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
);
7206 /* Maybe the spill reg contains a copy of reload_out. */
7207 if (reload_out
[r
] != 0 && GET_CODE (reload_out
[r
]) == REG
)
7209 register int nregno
= REGNO (reload_out
[r
]);
7210 int nnr
= (nregno
>= FIRST_PSEUDO_REGISTER
? 1
7211 : HARD_REGNO_NREGS (nregno
,
7212 GET_MODE (reload_reg_rtx
[r
])));
7214 spill_reg_store
[i
] = new_spill_reg_store
[i
];
7215 reg_last_reload_reg
[nregno
] = reload_reg_rtx
[r
];
7217 /* If NREGNO is a hard register, it may occupy more than
7218 one register. If it does, say what is in the
7219 rest of the registers assuming that both registers
7220 agree on how many words the object takes. If not,
7221 invalidate the subsequent registers. */
7223 if (nregno
< FIRST_PSEUDO_REGISTER
)
7224 for (k
= 1; k
< nnr
; k
++)
7225 reg_last_reload_reg
[nregno
+ k
]
7227 ? gen_rtx_REG (reg_raw_mode
[REGNO (reload_reg_rtx
[r
]) + k
],
7228 REGNO (reload_reg_rtx
[r
]) + k
)
7231 /* Now do the inverse operation. */
7232 for (k
= 0; k
< nr
; k
++)
7234 CLEAR_HARD_REG_BIT (reg_reloaded_dead
, i
+ k
);
7235 reg_reloaded_contents
[i
+ k
]
7236 = (nregno
>= FIRST_PSEUDO_REGISTER
|| nr
!= nnr
7239 reg_reloaded_insn
[i
+ k
] = insn
;
7240 SET_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
);
7244 /* Maybe the spill reg contains a copy of reload_in. Only do
7245 something if there will not be an output reload for
7246 the register being reloaded. */
7247 else if (reload_out
[r
] == 0
7248 && reload_in
[r
] != 0
7249 && spill_reg_order
[i
] >= 0
7250 && ((GET_CODE (reload_in
[r
]) == REG
7251 && ! reg_has_output_reload
[REGNO (reload_in
[r
])])
7252 || (GET_CODE (reload_in_reg
[r
]) == REG
7253 && ! reg_has_output_reload
[REGNO (reload_in_reg
[r
])])))
7255 register int nregno
;
7258 if (GET_CODE (reload_in
[r
]) == REG
)
7259 nregno
= REGNO (reload_in
[r
]);
7261 nregno
= REGNO (reload_in_reg
[r
]);
7263 nnr
= (nregno
>= FIRST_PSEUDO_REGISTER
? 1
7264 : HARD_REGNO_NREGS (nregno
,
7265 GET_MODE (reload_reg_rtx
[r
])));
7267 reg_last_reload_reg
[nregno
] = reload_reg_rtx
[r
];
7269 if (nregno
< FIRST_PSEUDO_REGISTER
)
7270 for (k
= 1; k
< nnr
; k
++)
7271 reg_last_reload_reg
[nregno
+ k
]
7273 ? gen_rtx_REG (reg_raw_mode
[REGNO (reload_reg_rtx
[r
]) + k
],
7274 REGNO (reload_reg_rtx
[r
]) + k
)
7277 /* Unless we inherited this reload, show we haven't
7278 recently done a store. */
7279 if (! reload_inherited
[r
])
7280 spill_reg_store
[i
] = 0;
7282 for (k
= 0; k
< nr
; k
++)
7284 CLEAR_HARD_REG_BIT (reg_reloaded_dead
, i
+ k
);
7285 reg_reloaded_contents
[i
+ k
]
7286 = (nregno
>= FIRST_PSEUDO_REGISTER
|| nr
!= nnr
7289 reg_reloaded_insn
[i
+ k
] = insn
;
7290 SET_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
);
7295 /* However, if part of the reload reaches the end, then we must
7296 invalidate the old info for the part that survives to the end. */
7297 else if (part_reaches_end
)
7299 for (k
= 0; k
< nr
; k
++)
7300 if (reload_reg_reaches_end_p (i
+ k
,
7302 reload_when_needed
[r
]))
7303 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
);
7307 /* The following if-statement was #if 0'd in 1.34 (or before...).
7308 It's reenabled in 1.35 because supposedly nothing else
7309 deals with this problem. */
7311 /* If a register gets output-reloaded from a non-spill register,
7312 that invalidates any previous reloaded copy of it.
7313 But forget_old_reloads_1 won't get to see it, because
7314 it thinks only about the original insn. So invalidate it here. */
7315 if (i
< 0 && reload_out
[r
] != 0 && GET_CODE (reload_out
[r
]) == REG
)
7317 register int nregno
= REGNO (reload_out
[r
]);
7318 if (nregno
>= FIRST_PSEUDO_REGISTER
)
7319 reg_last_reload_reg
[nregno
] = 0;
7322 int num_regs
= HARD_REGNO_NREGS (nregno
,GET_MODE (reload_out
[r
]));
7324 while (num_regs
-- > 0)
7325 reg_last_reload_reg
[nregno
+ num_regs
] = 0;
7329 IOR_HARD_REG_SET (reg_reloaded_dead
, reg_reloaded_died
);
7332 /* Emit code to perform a reload from IN (which may be a reload register) to
7333 OUT (which may also be a reload register). IN or OUT is from operand
7334 OPNUM with reload type TYPE.
7336 Returns first insn emitted. */
7339 gen_reload (out
, in
, opnum
, type
)
7343 enum reload_type type
;
7345 rtx last
= get_last_insn ();
7348 /* If IN is a paradoxical SUBREG, remove it and try to put the
7349 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7350 if (GET_CODE (in
) == SUBREG
7351 && (GET_MODE_SIZE (GET_MODE (in
))
7352 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
))))
7353 && (tem
= gen_lowpart_common (GET_MODE (SUBREG_REG (in
)), out
)) != 0)
7354 in
= SUBREG_REG (in
), out
= tem
;
7355 else if (GET_CODE (out
) == SUBREG
7356 && (GET_MODE_SIZE (GET_MODE (out
))
7357 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out
))))
7358 && (tem
= gen_lowpart_common (GET_MODE (SUBREG_REG (out
)), in
)) != 0)
7359 out
= SUBREG_REG (out
), in
= tem
;
7361 /* How to do this reload can get quite tricky. Normally, we are being
7362 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7363 register that didn't get a hard register. In that case we can just
7364 call emit_move_insn.
7366 We can also be asked to reload a PLUS that adds a register or a MEM to
7367 another register, constant or MEM. This can occur during frame pointer
7368 elimination and while reloading addresses. This case is handled by
7369 trying to emit a single insn to perform the add. If it is not valid,
7370 we use a two insn sequence.
7372 Finally, we could be called to handle an 'o' constraint by putting
7373 an address into a register. In that case, we first try to do this
7374 with a named pattern of "reload_load_address". If no such pattern
7375 exists, we just emit a SET insn and hope for the best (it will normally
7376 be valid on machines that use 'o').
7378 This entire process is made complex because reload will never
7379 process the insns we generate here and so we must ensure that
7380 they will fit their constraints and also by the fact that parts of
7381 IN might be being reloaded separately and replaced with spill registers.
7382 Because of this, we are, in some sense, just guessing the right approach
7383 here. The one listed above seems to work.
7385 ??? At some point, this whole thing needs to be rethought. */
7387 if (GET_CODE (in
) == PLUS
7388 && (GET_CODE (XEXP (in
, 0)) == REG
7389 || GET_CODE (XEXP (in
, 0)) == SUBREG
7390 || GET_CODE (XEXP (in
, 0)) == MEM
)
7391 && (GET_CODE (XEXP (in
, 1)) == REG
7392 || GET_CODE (XEXP (in
, 1)) == SUBREG
7393 || CONSTANT_P (XEXP (in
, 1))
7394 || GET_CODE (XEXP (in
, 1)) == MEM
))
7396 /* We need to compute the sum of a register or a MEM and another
7397 register, constant, or MEM, and put it into the reload
7398 register. The best possible way of doing this is if the machine
7399 has a three-operand ADD insn that accepts the required operands.
7401 The simplest approach is to try to generate such an insn and see if it
7402 is recognized and matches its constraints. If so, it can be used.
7404 It might be better not to actually emit the insn unless it is valid,
7405 but we need to pass the insn as an operand to `recog' and
7406 `insn_extract' and it is simpler to emit and then delete the insn if
7407 not valid than to dummy things up. */
7409 rtx op0
, op1
, tem
, insn
;
7412 op0
= find_replacement (&XEXP (in
, 0));
7413 op1
= find_replacement (&XEXP (in
, 1));
7415 /* Since constraint checking is strict, commutativity won't be
7416 checked, so we need to do that here to avoid spurious failure
7417 if the add instruction is two-address and the second operand
7418 of the add is the same as the reload reg, which is frequently
7419 the case. If the insn would be A = B + A, rearrange it so
7420 it will be A = A + B as constrain_operands expects. */
7422 if (GET_CODE (XEXP (in
, 1)) == REG
7423 && REGNO (out
) == REGNO (XEXP (in
, 1)))
7424 tem
= op0
, op0
= op1
, op1
= tem
;
7426 if (op0
!= XEXP (in
, 0) || op1
!= XEXP (in
, 1))
7427 in
= gen_rtx_PLUS (GET_MODE (in
), op0
, op1
);
7429 insn
= emit_insn (gen_rtx_SET (VOIDmode
, out
, in
));
7430 code
= recog_memoized (insn
);
7434 insn_extract (insn
);
7435 /* We want constrain operands to treat this insn strictly in
7436 its validity determination, i.e., the way it would after reload
7438 if (constrain_operands (code
, 1))
7442 delete_insns_since (last
);
7444 /* If that failed, we must use a conservative two-insn sequence.
7445 use move to copy constant, MEM, or pseudo register to the reload
7446 register since "move" will be able to handle an arbitrary operand,
7447 unlike add which can't, in general. Then add the registers.
7449 If there is another way to do this for a specific machine, a
7450 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7453 if (CONSTANT_P (op1
) || GET_CODE (op1
) == MEM
|| GET_CODE (op1
) == SUBREG
7454 || (GET_CODE (op1
) == REG
7455 && REGNO (op1
) >= FIRST_PSEUDO_REGISTER
))
7456 tem
= op0
, op0
= op1
, op1
= tem
;
7458 gen_reload (out
, op0
, opnum
, type
);
7460 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7461 This fixes a problem on the 32K where the stack pointer cannot
7462 be used as an operand of an add insn. */
7464 if (rtx_equal_p (op0
, op1
))
7467 insn
= emit_insn (gen_add2_insn (out
, op1
));
7469 /* If that failed, copy the address register to the reload register.
7470 Then add the constant to the reload register. */
7472 code
= recog_memoized (insn
);
7476 insn_extract (insn
);
7477 /* We want constrain operands to treat this insn strictly in
7478 its validity determination, i.e., the way it would after reload
7480 if (constrain_operands (code
, 1))
7482 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
7484 = gen_rtx_EXPR_LIST (REG_EQUIV
, in
, REG_NOTES (insn
));
7489 delete_insns_since (last
);
7491 gen_reload (out
, op1
, opnum
, type
);
7492 insn
= emit_insn (gen_add2_insn (out
, op0
));
7493 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_EQUIV
, in
, REG_NOTES (insn
));
7496 #ifdef SECONDARY_MEMORY_NEEDED
7497 /* If we need a memory location to do the move, do it that way. */
7498 else if (GET_CODE (in
) == REG
&& REGNO (in
) < FIRST_PSEUDO_REGISTER
7499 && GET_CODE (out
) == REG
&& REGNO (out
) < FIRST_PSEUDO_REGISTER
7500 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in
)),
7501 REGNO_REG_CLASS (REGNO (out
)),
7504 /* Get the memory to use and rewrite both registers to its mode. */
7505 rtx loc
= get_secondary_mem (in
, GET_MODE (out
), opnum
, type
);
7507 if (GET_MODE (loc
) != GET_MODE (out
))
7508 out
= gen_rtx_REG (GET_MODE (loc
), REGNO (out
));
7510 if (GET_MODE (loc
) != GET_MODE (in
))
7511 in
= gen_rtx_REG (GET_MODE (loc
), REGNO (in
));
7513 gen_reload (loc
, in
, opnum
, type
);
7514 gen_reload (out
, loc
, opnum
, type
);
7518 /* If IN is a simple operand, use gen_move_insn. */
7519 else if (GET_RTX_CLASS (GET_CODE (in
)) == 'o' || GET_CODE (in
) == SUBREG
)
7520 emit_insn (gen_move_insn (out
, in
));
7522 #ifdef HAVE_reload_load_address
7523 else if (HAVE_reload_load_address
)
7524 emit_insn (gen_reload_load_address (out
, in
));
7527 /* Otherwise, just write (set OUT IN) and hope for the best. */
7529 emit_insn (gen_rtx_SET (VOIDmode
, out
, in
));
7531 /* Return the first insn emitted.
7532 We can not just return get_last_insn, because there may have
7533 been multiple instructions emitted. Also note that gen_move_insn may
7534 emit more than one insn itself, so we can not assume that there is one
7535 insn emitted per emit_insn_before call. */
7537 return last
? NEXT_INSN (last
) : get_insns ();
7540 /* Delete a previously made output-reload
7541 whose result we now believe is not needed.
7542 First we double-check.
7544 INSN is the insn now being processed.
7545 OUTPUT_RELOAD_INSN is the insn of the output reload.
7546 J is the reload-number for this insn. */
7549 delete_output_reload (insn
, j
, output_reload_insn
)
7552 rtx output_reload_insn
;
7556 /* Get the raw pseudo-register referred to. */
7558 rtx reg
= reload_in
[j
];
7559 while (GET_CODE (reg
) == SUBREG
)
7560 reg
= SUBREG_REG (reg
);
7562 /* If the pseudo-reg we are reloading is no longer referenced
7563 anywhere between the store into it and here,
7564 and no jumps or labels intervene, then the value can get
7565 here through the reload reg alone.
7566 Otherwise, give up--return. */
7567 for (i1
= NEXT_INSN (output_reload_insn
);
7568 i1
!= insn
; i1
= NEXT_INSN (i1
))
7570 if (GET_CODE (i1
) == CODE_LABEL
|| GET_CODE (i1
) == JUMP_INSN
)
7572 if ((GET_CODE (i1
) == INSN
|| GET_CODE (i1
) == CALL_INSN
)
7573 && reg_mentioned_p (reg
, PATTERN (i1
)))
7575 /* If this is just a single USE with an REG_EQUAL note in front
7576 of INSN, this is no problem, because this mentions just the
7577 address that we are using here.
7578 But if there is more than one such USE, the insn might use
7579 the operand directly, or another reload might do that.
7580 This is analogous to the count_occurences check in the callers. */
7581 int num_occurences
= 0;
7583 while (GET_CODE (i1
) == INSN
&& GET_CODE (PATTERN (i1
)) == USE
7584 && find_reg_note (i1
, REG_EQUAL
, NULL_RTX
))
7586 num_occurences
+= rtx_equal_p (reg
, XEXP (PATTERN (i1
), 0)) != 0;
7587 i1
= NEXT_INSN (i1
);
7589 if (num_occurences
== 1 && i1
== insn
)
7595 /* The caller has already checked that REG dies or is set in INSN.
7596 It has also checked that we are optimizing, and thus some inaccurancies
7597 in the debugging information are acceptable.
7598 So we could just delete output_reload_insn.
7599 But in some cases we can improve the debugging information without
7600 sacrificing optimization - maybe even improving the code:
7601 See if the pseudo reg has been completely replaced
7602 with reload regs. If so, delete the store insn
7603 and forget we had a stack slot for the pseudo. */
7604 if (reload_out
[j
] != reload_in
[j
]
7605 && REG_N_DEATHS (REGNO (reg
)) == 1
7606 && REG_BASIC_BLOCK (REGNO (reg
)) >= 0
7607 && find_regno_note (insn
, REG_DEAD
, REGNO (reg
)))
7611 /* We know that it was used only between here
7612 and the beginning of the current basic block.
7613 (We also know that the last use before INSN was
7614 the output reload we are thinking of deleting, but never mind that.)
7615 Search that range; see if any ref remains. */
7616 for (i2
= PREV_INSN (insn
); i2
; i2
= PREV_INSN (i2
))
7618 rtx set
= single_set (i2
);
7620 /* Uses which just store in the pseudo don't count,
7621 since if they are the only uses, they are dead. */
7622 if (set
!= 0 && SET_DEST (set
) == reg
)
7624 if (GET_CODE (i2
) == CODE_LABEL
7625 || GET_CODE (i2
) == JUMP_INSN
)
7627 if ((GET_CODE (i2
) == INSN
|| GET_CODE (i2
) == CALL_INSN
)
7628 && reg_mentioned_p (reg
, PATTERN (i2
)))
7630 /* Some other ref remains; just delete the output reload we
7632 delete_insn (output_reload_insn
);
7637 /* Delete the now-dead stores into this pseudo. */
7638 for (i2
= PREV_INSN (insn
); i2
; i2
= PREV_INSN (i2
))
7640 rtx set
= single_set (i2
);
7642 if (set
!= 0 && SET_DEST (set
) == reg
)
7644 /* This might be a basic block head,
7645 thus don't use delete_insn. */
7646 PUT_CODE (i2
, NOTE
);
7647 NOTE_SOURCE_FILE (i2
) = 0;
7648 NOTE_LINE_NUMBER (i2
) = NOTE_INSN_DELETED
;
7650 if (GET_CODE (i2
) == CODE_LABEL
7651 || GET_CODE (i2
) == JUMP_INSN
)
7655 /* For the debugging info,
7656 say the pseudo lives in this reload reg. */
7657 reg_renumber
[REGNO (reg
)] = REGNO (reload_reg_rtx
[j
]);
7658 alter_reg (REGNO (reg
), -1);
7660 delete_insn (output_reload_insn
);
7664 /* Output reload-insns to reload VALUE into RELOADREG.
7665 VALUE is an autoincrement or autodecrement RTX whose operand
7666 is a register or memory location;
7667 so reloading involves incrementing that location.
7669 INC_AMOUNT is the number to increment or decrement by (always positive).
7670 This cannot be deduced from VALUE. */
7673 inc_for_reload (reloadreg
, value
, inc_amount
)
7678 /* REG or MEM to be copied and incremented. */
7679 rtx incloc
= XEXP (value
, 0);
7680 /* Nonzero if increment after copying. */
7681 int post
= (GET_CODE (value
) == POST_DEC
|| GET_CODE (value
) == POST_INC
);
7687 /* No hard register is equivalent to this register after
7688 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7689 we could inc/dec that register as well (maybe even using it for
7690 the source), but I'm not sure it's worth worrying about. */
7691 if (GET_CODE (incloc
) == REG
)
7692 reg_last_reload_reg
[REGNO (incloc
)] = 0;
7694 if (GET_CODE (value
) == PRE_DEC
|| GET_CODE (value
) == POST_DEC
)
7695 inc_amount
= - inc_amount
;
7697 inc
= GEN_INT (inc_amount
);
7699 /* If this is post-increment, first copy the location to the reload reg. */
7701 emit_insn (gen_move_insn (reloadreg
, incloc
));
7703 /* See if we can directly increment INCLOC. Use a method similar to that
7706 last
= get_last_insn ();
7707 add_insn
= emit_insn (gen_rtx_SET (VOIDmode
, incloc
,
7708 gen_rtx_PLUS (GET_MODE (incloc
),
7711 code
= recog_memoized (add_insn
);
7714 insn_extract (add_insn
);
7715 if (constrain_operands (code
, 1))
7717 /* If this is a pre-increment and we have incremented the value
7718 where it lives, copy the incremented value to RELOADREG to
7719 be used as an address. */
7722 emit_insn (gen_move_insn (reloadreg
, incloc
));
7728 delete_insns_since (last
);
7730 /* If couldn't do the increment directly, must increment in RELOADREG.
7731 The way we do this depends on whether this is pre- or post-increment.
7732 For pre-increment, copy INCLOC to the reload register, increment it
7733 there, then save back. */
7737 emit_insn (gen_move_insn (reloadreg
, incloc
));
7738 emit_insn (gen_add2_insn (reloadreg
, inc
));
7739 emit_insn (gen_move_insn (incloc
, reloadreg
));
7744 Because this might be a jump insn or a compare, and because RELOADREG
7745 may not be available after the insn in an input reload, we must do
7746 the incrementation before the insn being reloaded for.
7748 We have already copied INCLOC to RELOADREG. Increment the copy in
7749 RELOADREG, save that back, then decrement RELOADREG so it has
7750 the original value. */
7752 emit_insn (gen_add2_insn (reloadreg
, inc
));
7753 emit_insn (gen_move_insn (incloc
, reloadreg
));
7754 emit_insn (gen_add2_insn (reloadreg
, GEN_INT (-inc_amount
)));
7760 /* Return 1 if we are certain that the constraint-string STRING allows
7761 the hard register REG. Return 0 if we can't be sure of this. */
7764 constraint_accepts_reg_p (string
, reg
)
7769 int regno
= true_regnum (reg
);
7772 /* Initialize for first alternative. */
7774 /* Check that each alternative contains `g' or `r'. */
7776 switch (c
= *string
++)
7779 /* If an alternative lacks `g' or `r', we lose. */
7782 /* If an alternative lacks `g' or `r', we lose. */
7785 /* Initialize for next alternative. */
7790 /* Any general reg wins for this alternative. */
7791 if (TEST_HARD_REG_BIT (reg_class_contents
[(int) GENERAL_REGS
], regno
))
7795 /* Any reg in specified class wins for this alternative. */
7797 enum reg_class
class = REG_CLASS_FROM_LETTER (c
);
7799 if (TEST_HARD_REG_BIT (reg_class_contents
[(int) class], regno
))
7805 /* Return the number of places FIND appears within X, but don't count
7806 an occurrence if some SET_DEST is FIND. */
7809 count_occurrences (x
, find
)
7810 register rtx x
, find
;
7813 register enum rtx_code code
;
7814 register char *format_ptr
;
7822 code
= GET_CODE (x
);
7837 if (SET_DEST (x
) == find
)
7838 return count_occurrences (SET_SRC (x
), find
);
7845 format_ptr
= GET_RTX_FORMAT (code
);
7848 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
7850 switch (*format_ptr
++)
7853 count
+= count_occurrences (XEXP (x
, i
), find
);
7857 if (XVEC (x
, i
) != NULL
)
7859 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
7860 count
+= count_occurrences (XVECEXP (x
, i
, j
), find
);
7868 /* This array holds values which are equivalent to a hard register
7869 during reload_cse_regs. Each array element is an EXPR_LIST of
7870 values. Each time a hard register is set, we set the corresponding
7871 array element to the value. Each time a hard register is copied
7872 into memory, we add the memory location to the corresponding array
7873 element. We don't store values or memory addresses with side
7874 effects in this array.
7876 If the value is a CONST_INT, then the mode of the containing
7877 EXPR_LIST is the mode in which that CONST_INT was referenced.
7879 We sometimes clobber a specific entry in a list. In that case, we
7880 just set XEXP (list-entry, 0) to 0. */
7882 static rtx
*reg_values
;
7884 /* This is a preallocated REG rtx which we use as a temporary in
7885 reload_cse_invalidate_regno, so that we don't need to allocate a
7886 new one each time through a loop in that function. */
7888 static rtx invalidate_regno_rtx
;
7890 /* This is a set of registers for which we must remove REG_DEAD notes in
7891 previous insns, because our modifications made them invalid. That can
7892 happen if we introduced the register into the current insn, or we deleted
7893 the current insn which used to set the register. */
7895 static HARD_REG_SET no_longer_dead_regs
;
7897 /* Invalidate any entries in reg_values which depend on REGNO,
7898 including those for REGNO itself. This is called if REGNO is
7899 changing. If CLOBBER is true, then always forget anything we
7900 currently know about REGNO. MODE is the mode of the assignment to
7901 REGNO, which is used to determine how many hard registers are being
7902 changed. If MODE is VOIDmode, then only REGNO is being changed;
7903 this is used when invalidating call clobbered registers across a
7907 reload_cse_invalidate_regno (regno
, mode
, clobber
)
7909 enum machine_mode mode
;
7915 /* Our callers don't always go through true_regnum; we may see a
7916 pseudo-register here from a CLOBBER or the like. We probably
7917 won't ever see a pseudo-register that has a real register number,
7918 for we check anyhow for safety. */
7919 if (regno
>= FIRST_PSEUDO_REGISTER
)
7920 regno
= reg_renumber
[regno
];
7924 if (mode
== VOIDmode
)
7925 endregno
= regno
+ 1;
7927 endregno
= regno
+ HARD_REGNO_NREGS (regno
, mode
);
7930 for (i
= regno
; i
< endregno
; i
++)
7933 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
7937 for (x
= reg_values
[i
]; x
; x
= XEXP (x
, 1))
7939 if (XEXP (x
, 0) != 0
7940 && refers_to_regno_p (regno
, endregno
, XEXP (x
, 0), NULL_PTR
))
7942 /* If this is the only entry on the list, clear
7943 reg_values[i]. Otherwise, just clear this entry on
7945 if (XEXP (x
, 1) == 0 && x
== reg_values
[i
])
7955 /* We must look at earlier registers, in case REGNO is part of a
7956 multi word value but is not the first register. If an earlier
7957 register has a value in a mode which overlaps REGNO, then we must
7958 invalidate that earlier register. Note that we do not need to
7959 check REGNO or later registers (we must not check REGNO itself,
7960 because we would incorrectly conclude that there was a conflict). */
7962 for (i
= 0; i
< regno
; i
++)
7966 for (x
= reg_values
[i
]; x
; x
= XEXP (x
, 1))
7968 if (XEXP (x
, 0) != 0)
7970 PUT_MODE (invalidate_regno_rtx
, GET_MODE (x
));
7971 REGNO (invalidate_regno_rtx
) = i
;
7972 if (refers_to_regno_p (regno
, endregno
, invalidate_regno_rtx
,
7975 reload_cse_invalidate_regno (i
, VOIDmode
, 1);
7983 /* The memory at address MEM_BASE is being changed.
7984 Return whether this change will invalidate VAL. */
7987 reload_cse_mem_conflict_p (mem_base
, val
)
7995 code
= GET_CODE (val
);
7998 /* Get rid of a few simple cases quickly. */
8011 if (GET_MODE (mem_base
) == BLKmode
8012 || GET_MODE (val
) == BLKmode
)
8014 if (anti_dependence (val
, mem_base
))
8016 /* The address may contain nested MEMs. */
8023 fmt
= GET_RTX_FORMAT (code
);
8025 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
8029 if (reload_cse_mem_conflict_p (mem_base
, XEXP (val
, i
)))
8032 else if (fmt
[i
] == 'E')
8036 for (j
= 0; j
< XVECLEN (val
, i
); j
++)
8037 if (reload_cse_mem_conflict_p (mem_base
, XVECEXP (val
, i
, j
)))
8045 /* Invalidate any entries in reg_values which are changed because of a
8046 store to MEM_RTX. If this is called because of a non-const call
8047 instruction, MEM_RTX is (mem:BLK const0_rtx). */
8050 reload_cse_invalidate_mem (mem_rtx
)
8055 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
8059 for (x
= reg_values
[i
]; x
; x
= XEXP (x
, 1))
8061 if (XEXP (x
, 0) != 0
8062 && reload_cse_mem_conflict_p (mem_rtx
, XEXP (x
, 0)))
8064 /* If this is the only entry on the list, clear
8065 reg_values[i]. Otherwise, just clear this entry on
8067 if (XEXP (x
, 1) == 0 && x
== reg_values
[i
])
8078 /* Invalidate DEST, which is being assigned to or clobbered. The
8079 second parameter exists so that this function can be passed to
8080 note_stores; it is ignored. */
8083 reload_cse_invalidate_rtx (dest
, ignore
)
8085 rtx ignore ATTRIBUTE_UNUSED
;
8087 while (GET_CODE (dest
) == STRICT_LOW_PART
8088 || GET_CODE (dest
) == SIGN_EXTRACT
8089 || GET_CODE (dest
) == ZERO_EXTRACT
8090 || GET_CODE (dest
) == SUBREG
)
8091 dest
= XEXP (dest
, 0);
8093 if (GET_CODE (dest
) == REG
)
8094 reload_cse_invalidate_regno (REGNO (dest
), GET_MODE (dest
), 1);
8095 else if (GET_CODE (dest
) == MEM
)
8096 reload_cse_invalidate_mem (dest
);
8099 /* Possibly delete death notes on the insns before INSN if modifying INSN
8100 extended the lifespan of the registers. */
8103 reload_cse_delete_death_notes (insn
)
8108 for (dreg
= 0; dreg
< FIRST_PSEUDO_REGISTER
; dreg
++)
8112 if (! TEST_HARD_REG_BIT (no_longer_dead_regs
, dreg
))
8115 for (trial
= prev_nonnote_insn (insn
);
8117 && GET_CODE (trial
) != CODE_LABEL
8118 && GET_CODE (trial
) != BARRIER
);
8119 trial
= prev_nonnote_insn (trial
))
8121 if (find_regno_note (trial
, REG_DEAD
, dreg
))
8123 remove_death (dreg
, trial
);
8130 /* Record that the current insn uses hard reg REGNO in mode MODE. This
8131 will be used in reload_cse_delete_death_notes to delete prior REG_DEAD
8132 notes for this register. */
8135 reload_cse_no_longer_dead (regno
, mode
)
8137 enum machine_mode mode
;
8139 int nregs
= HARD_REGNO_NREGS (regno
, mode
);
8142 SET_HARD_REG_BIT (no_longer_dead_regs
, regno
);
8148 /* Do a very simple CSE pass over the hard registers.
8150 This function detects no-op moves where we happened to assign two
8151 different pseudo-registers to the same hard register, and then
8152 copied one to the other. Reload will generate a useless
8153 instruction copying a register to itself.
8155 This function also detects cases where we load a value from memory
8156 into two different registers, and (if memory is more expensive than
8157 registers) changes it to simply copy the first register into the
8160 Another optimization is performed that scans the operands of each
8161 instruction to see whether the value is already available in a
8162 hard register. It then replaces the operand with the hard register
8163 if possible, much like an optional reload would. */
8166 reload_cse_regs (first
)
8174 init_alias_analysis ();
8176 reg_values
= (rtx
*) alloca (FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
8177 bzero ((char *)reg_values
, FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
8179 /* Create our EXPR_LIST structures on reload_obstack, so that we can
8180 free them when we are done. */
8181 push_obstacks (&reload_obstack
, &reload_obstack
);
8182 firstobj
= (char *) obstack_alloc (&reload_obstack
, 0);
8184 /* We pass this to reload_cse_invalidate_mem to invalidate all of
8185 memory for a non-const call instruction. */
8186 callmem
= gen_rtx_MEM (BLKmode
, const0_rtx
);
8188 /* This is used in reload_cse_invalidate_regno to avoid consing a
8189 new REG in a loop in that function. */
8190 invalidate_regno_rtx
= gen_rtx_REG (VOIDmode
, 0);
8192 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
8196 if (GET_CODE (insn
) == CODE_LABEL
)
8198 /* Forget all the register values at a code label. We don't
8199 try to do anything clever around jumps. */
8200 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
8206 #ifdef NON_SAVING_SETJMP
8207 if (NON_SAVING_SETJMP
&& GET_CODE (insn
) == NOTE
8208 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_SETJMP
)
8210 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
8217 if (GET_RTX_CLASS (GET_CODE (insn
)) != 'i')
8220 CLEAR_HARD_REG_SET (no_longer_dead_regs
);
8222 /* If this is a call instruction, forget anything stored in a
8223 call clobbered register, or, if this is not a const call, in
8225 if (GET_CODE (insn
) == CALL_INSN
)
8227 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
8228 if (call_used_regs
[i
])
8229 reload_cse_invalidate_regno (i
, VOIDmode
, 1);
8231 if (! CONST_CALL_P (insn
))
8232 reload_cse_invalidate_mem (callmem
);
8235 body
= PATTERN (insn
);
8236 if (GET_CODE (body
) == SET
)
8239 if (reload_cse_noop_set_p (body
, insn
))
8241 PUT_CODE (insn
, NOTE
);
8242 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
8243 NOTE_SOURCE_FILE (insn
) = 0;
8244 reload_cse_delete_death_notes (insn
);
8246 /* We're done with this insn. */
8250 /* It's not a no-op, but we can try to simplify it. */
8251 CLEAR_HARD_REG_SET (no_longer_dead_regs
);
8252 count
+= reload_cse_simplify_set (body
, insn
);
8254 if (count
> 0 && apply_change_group ())
8255 reload_cse_delete_death_notes (insn
);
8256 else if (reload_cse_simplify_operands (insn
))
8257 reload_cse_delete_death_notes (insn
);
8259 reload_cse_record_set (body
, body
);
8261 else if (GET_CODE (body
) == PARALLEL
)
8265 /* If every action in a PARALLEL is a noop, we can delete
8266 the entire PARALLEL. */
8267 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
8268 if ((GET_CODE (XVECEXP (body
, 0, i
)) != SET
8269 || ! reload_cse_noop_set_p (XVECEXP (body
, 0, i
), insn
))
8270 && GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
8274 PUT_CODE (insn
, NOTE
);
8275 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
8276 NOTE_SOURCE_FILE (insn
) = 0;
8277 reload_cse_delete_death_notes (insn
);
8279 /* We're done with this insn. */
8283 /* It's not a no-op, but we can try to simplify it. */
8284 CLEAR_HARD_REG_SET (no_longer_dead_regs
);
8285 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
8286 if (GET_CODE (XVECEXP (body
, 0, i
)) == SET
)
8287 count
+= reload_cse_simplify_set (XVECEXP (body
, 0, i
), insn
);
8289 if (count
> 0 && apply_change_group ())
8290 reload_cse_delete_death_notes (insn
);
8291 else if (reload_cse_simplify_operands (insn
))
8292 reload_cse_delete_death_notes (insn
);
8294 /* Look through the PARALLEL and record the values being
8295 set, if possible. Also handle any CLOBBERs. */
8296 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
8298 rtx x
= XVECEXP (body
, 0, i
);
8300 if (GET_CODE (x
) == SET
)
8301 reload_cse_record_set (x
, body
);
8303 note_stores (x
, reload_cse_invalidate_rtx
);
8307 note_stores (body
, reload_cse_invalidate_rtx
);
8310 /* Clobber any registers which appear in REG_INC notes. We
8311 could keep track of the changes to their values, but it is
8312 unlikely to help. */
8316 for (x
= REG_NOTES (insn
); x
; x
= XEXP (x
, 1))
8317 if (REG_NOTE_KIND (x
) == REG_INC
)
8318 reload_cse_invalidate_rtx (XEXP (x
, 0), NULL_RTX
);
8322 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
8323 after we have processed the insn. */
8324 if (GET_CODE (insn
) == CALL_INSN
)
8328 for (x
= CALL_INSN_FUNCTION_USAGE (insn
); x
; x
= XEXP (x
, 1))
8329 if (GET_CODE (XEXP (x
, 0)) == CLOBBER
)
8330 reload_cse_invalidate_rtx (XEXP (XEXP (x
, 0), 0), NULL_RTX
);
8334 /* Free all the temporary structures we created, and go back to the
8335 regular obstacks. */
8336 obstack_free (&reload_obstack
, firstobj
);
8340 /* Return whether the values known for REGNO are equal to VAL. MODE
8341 is the mode of the object that VAL is being copied to; this matters
8342 if VAL is a CONST_INT. */
8345 reload_cse_regno_equal_p (regno
, val
, mode
)
8348 enum machine_mode mode
;
8355 for (x
= reg_values
[regno
]; x
; x
= XEXP (x
, 1))
8356 if (XEXP (x
, 0) != 0
8357 && rtx_equal_p (XEXP (x
, 0), val
)
8358 && (! flag_float_store
|| GET_CODE (XEXP (x
, 0)) != MEM
8359 || GET_MODE_CLASS (GET_MODE (x
)) != MODE_FLOAT
)
8360 && (GET_CODE (val
) != CONST_INT
8361 || mode
== GET_MODE (x
)
8362 || (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (GET_MODE (x
))
8363 /* On a big endian machine if the value spans more than
8364 one register then this register holds the high part of
8365 it and we can't use it.
8367 ??? We should also compare with the high part of the
8369 && !(WORDS_BIG_ENDIAN
8370 && HARD_REGNO_NREGS (regno
, GET_MODE (x
)) > 1)
8371 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
8372 GET_MODE_BITSIZE (GET_MODE (x
))))))
8378 /* See whether a single set is a noop. SET is the set instruction we
8379 are should check, and INSN is the instruction from which it came. */
8382 reload_cse_noop_set_p (set
, insn
)
8387 enum machine_mode dest_mode
;
8391 src
= SET_SRC (set
);
8392 dest
= SET_DEST (set
);
8393 dest_mode
= GET_MODE (dest
);
8395 if (side_effects_p (src
))
8398 dreg
= true_regnum (dest
);
8399 sreg
= true_regnum (src
);
8401 /* Check for setting a register to itself. In this case, we don't
8402 have to worry about REG_DEAD notes. */
8403 if (dreg
>= 0 && dreg
== sreg
)
8409 /* Check for setting a register to itself. */
8413 /* Check for setting a register to a value which we already know
8414 is in the register. */
8415 else if (reload_cse_regno_equal_p (dreg
, src
, dest_mode
))
8418 /* Check for setting a register DREG to another register SREG
8419 where SREG is equal to a value which is already in DREG. */
8424 for (x
= reg_values
[sreg
]; x
; x
= XEXP (x
, 1))
8428 if (XEXP (x
, 0) == 0)
8431 if (dest_mode
== GET_MODE (x
))
8433 else if (GET_MODE_BITSIZE (dest_mode
)
8434 < GET_MODE_BITSIZE (GET_MODE (x
)))
8435 tmp
= gen_lowpart_common (dest_mode
, XEXP (x
, 0));
8440 && reload_cse_regno_equal_p (dreg
, tmp
, dest_mode
))
8448 else if (GET_CODE (dest
) == MEM
)
8450 /* Check for storing a register to memory when we know that the
8451 register is equivalent to the memory location. */
8453 && reload_cse_regno_equal_p (sreg
, dest
, dest_mode
)
8454 && ! side_effects_p (dest
))
8458 /* If we can delete this SET, then we need to look for an earlier
8459 REG_DEAD note on DREG, and remove it if it exists. */
8460 if (ret
&& dreg
>= 0)
8462 if (! find_regno_note (insn
, REG_UNUSED
, dreg
))
8463 reload_cse_no_longer_dead (dreg
, dest_mode
);
8469 /* Try to simplify a single SET instruction. SET is the set pattern.
8470 INSN is the instruction it came from.
8471 This function only handles one case: if we set a register to a value
8472 which is not a register, we try to find that value in some other register
8473 and change the set into a register copy. */
8476 reload_cse_simplify_set (set
, insn
)
8482 enum machine_mode dest_mode
;
8483 enum reg_class dclass
;
8486 dreg
= true_regnum (SET_DEST (set
));
8490 src
= SET_SRC (set
);
8491 if (side_effects_p (src
) || true_regnum (src
) >= 0)
8494 dclass
= REGNO_REG_CLASS (dreg
);
8496 /* If memory loads are cheaper than register copies, don't change them. */
8497 if (GET_CODE (src
) == MEM
8498 && MEMORY_MOVE_COST (GET_MODE (src
), dclass
, 1) < 2)
8501 /* If the constant is cheaper than a register, don't change it. */
8502 if (CONSTANT_P (src
)
8503 && rtx_cost (src
, SET
) < 2)
8506 dest_mode
= GET_MODE (SET_DEST (set
));
8507 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
8510 && REGISTER_MOVE_COST (REGNO_REG_CLASS (i
), dclass
) == 2
8511 && reload_cse_regno_equal_p (i
, src
, dest_mode
))
8515 /* Pop back to the real obstacks while changing the insn. */
8518 validated
= validate_change (insn
, &SET_SRC (set
),
8519 gen_rtx_REG (dest_mode
, i
), 1);
8521 /* Go back to the obstack we are using for temporary
8523 push_obstacks (&reload_obstack
, &reload_obstack
);
8525 if (validated
&& ! find_regno_note (insn
, REG_UNUSED
, i
))
8527 reload_cse_no_longer_dead (i
, dest_mode
);
8535 /* Try to replace operands in INSN with equivalent values that are already
8536 in registers. This can be viewed as optional reloading.
8538 For each non-register operand in the insn, see if any hard regs are
8539 known to be equivalent to that operand. Record the alternatives which
8540 can accept these hard registers. Among all alternatives, select the
8541 ones which are better or equal to the one currently matching, where
8542 "better" is in terms of '?' and '!' constraints. Among the remaining
8543 alternatives, select the one which replaces most operands with
8547 reload_cse_simplify_operands (insn
)
8550 #ifdef REGISTER_CONSTRAINTS
8551 int insn_code_number
, n_operands
, n_alternatives
;
8554 char *constraints
[MAX_RECOG_OPERANDS
];
8556 /* Vector recording how bad an alternative is. */
8557 int *alternative_reject
;
8558 /* Vector recording how many registers can be introduced by choosing
8559 this alternative. */
8560 int *alternative_nregs
;
8561 /* Array of vectors recording, for each operand and each alternative,
8562 which hard register to substitute, or -1 if the operand should be
8564 int *op_alt_regno
[MAX_RECOG_OPERANDS
];
8565 /* Array of alternatives, sorted in order of decreasing desirability. */
8566 int *alternative_order
;
8567 rtx reg
= gen_rtx_REG (VOIDmode
, -1);
8569 /* Find out some information about this insn. */
8570 insn_code_number
= recog_memoized (insn
);
8571 /* We don't modify asm instructions. */
8572 if (insn_code_number
< 0)
8575 n_operands
= insn_n_operands
[insn_code_number
];
8576 n_alternatives
= insn_n_alternatives
[insn_code_number
];
8578 if (n_alternatives
== 0 || n_operands
== 0)
8580 insn_extract (insn
);
8582 /* Figure out which alternative currently matches. */
8583 if (! constrain_operands (insn_code_number
, 1))
8586 alternative_reject
= (int *) alloca (n_alternatives
* sizeof (int));
8587 alternative_nregs
= (int *) alloca (n_alternatives
* sizeof (int));
8588 alternative_order
= (int *) alloca (n_alternatives
* sizeof (int));
8589 bzero ((char *)alternative_reject
, n_alternatives
* sizeof (int));
8590 bzero ((char *)alternative_nregs
, n_alternatives
* sizeof (int));
8592 for (i
= 0; i
< n_operands
; i
++)
8594 enum machine_mode mode
;
8598 op_alt_regno
[i
] = (int *) alloca (n_alternatives
* sizeof (int));
8599 for (j
= 0; j
< n_alternatives
; j
++)
8600 op_alt_regno
[i
][j
] = -1;
8602 p
= constraints
[i
] = insn_operand_constraint
[insn_code_number
][i
];
8603 mode
= insn_operand_mode
[insn_code_number
][i
];
8605 /* Add the reject values for each alternative given by the constraints
8606 for this operand. */
8614 alternative_reject
[j
] += 3;
8616 alternative_reject
[j
] += 300;
8619 /* We won't change operands which are already registers. We
8620 also don't want to modify output operands. */
8621 regno
= true_regnum (recog_operand
[i
]);
8623 || constraints
[i
][0] == '='
8624 || constraints
[i
][0] == '+')
8627 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
8629 int class = (int) NO_REGS
;
8631 if (! reload_cse_regno_equal_p (regno
, recog_operand
[i
], mode
))
8634 REGNO (reg
) = regno
;
8635 PUT_MODE (reg
, mode
);
8637 /* We found a register equal to this operand. Now look for all
8638 alternatives that can accept this register and have not been
8639 assigned a register they can use yet. */
8648 case '=': case '+': case '?':
8649 case '#': case '&': case '!':
8651 case '0': case '1': case '2': case '3': case '4':
8652 case 'm': case '<': case '>': case 'V': case 'o':
8653 case 'E': case 'F': case 'G': case 'H':
8654 case 's': case 'i': case 'n':
8655 case 'I': case 'J': case 'K': case 'L':
8656 case 'M': case 'N': case 'O': case 'P':
8657 #ifdef EXTRA_CONSTRAINT
8658 case 'Q': case 'R': case 'S': case 'T': case 'U':
8661 /* These don't say anything we care about. */
8665 class = reg_class_subunion
[(int) class][(int) GENERAL_REGS
];
8670 = reg_class_subunion
[(int) class][(int) REG_CLASS_FROM_LETTER (c
)];
8673 case ',': case '\0':
8674 /* See if REGNO fits this alternative, and set it up as the
8675 replacement register if we don't have one for this
8676 alternative yet and the operand being replaced is not
8677 a cheap CONST_INT. */
8678 if (op_alt_regno
[i
][j
] == -1
8679 && reg_fits_class_p (reg
, class, 0, mode
)
8680 && (GET_CODE (recog_operand
[i
]) != CONST_INT
8681 || rtx_cost (recog_operand
[i
], SET
) > rtx_cost (reg
, SET
)))
8683 alternative_nregs
[j
]++;
8684 op_alt_regno
[i
][j
] = regno
;
8696 /* Record all alternatives which are better or equal to the currently
8697 matching one in the alternative_order array. */
8698 for (i
= j
= 0; i
< n_alternatives
; i
++)
8699 if (alternative_reject
[i
] <= alternative_reject
[which_alternative
])
8700 alternative_order
[j
++] = i
;
8703 /* Sort it. Given a small number of alternatives, a dumb algorithm
8704 won't hurt too much. */
8705 for (i
= 0; i
< n_alternatives
- 1; i
++)
8708 int best_reject
= alternative_reject
[alternative_order
[i
]];
8709 int best_nregs
= alternative_nregs
[alternative_order
[i
]];
8712 for (j
= i
+ 1; j
< n_alternatives
; j
++)
8714 int this_reject
= alternative_reject
[alternative_order
[j
]];
8715 int this_nregs
= alternative_nregs
[alternative_order
[j
]];
8717 if (this_reject
< best_reject
8718 || (this_reject
== best_reject
&& this_nregs
< best_nregs
))
8721 best_reject
= this_reject
;
8722 best_nregs
= this_nregs
;
8726 tmp
= alternative_order
[best
];
8727 alternative_order
[best
] = alternative_order
[i
];
8728 alternative_order
[i
] = tmp
;
8731 /* Substitute the operands as determined by op_alt_regno for the best
8733 j
= alternative_order
[0];
8734 CLEAR_HARD_REG_SET (no_longer_dead_regs
);
8736 /* Pop back to the real obstacks while changing the insn. */
8739 for (i
= 0; i
< n_operands
; i
++)
8741 enum machine_mode mode
= insn_operand_mode
[insn_code_number
][i
];
8742 if (op_alt_regno
[i
][j
] == -1)
8745 reload_cse_no_longer_dead (op_alt_regno
[i
][j
], mode
);
8746 validate_change (insn
, recog_operand_loc
[i
],
8747 gen_rtx_REG (mode
, op_alt_regno
[i
][j
]), 1);
8750 for (i
= insn_n_dups
[insn_code_number
] - 1; i
>= 0; i
--)
8752 int op
= recog_dup_num
[i
];
8753 enum machine_mode mode
= insn_operand_mode
[insn_code_number
][op
];
8755 if (op_alt_regno
[op
][j
] == -1)
8758 reload_cse_no_longer_dead (op_alt_regno
[op
][j
], mode
);
8759 validate_change (insn
, recog_dup_loc
[i
],
8760 gen_rtx_REG (mode
, op_alt_regno
[op
][j
]), 1);
8763 /* Go back to the obstack we are using for temporary
8765 push_obstacks (&reload_obstack
, &reload_obstack
);
8767 return apply_change_group ();
8773 /* These two variables are used to pass information from
8774 reload_cse_record_set to reload_cse_check_clobber. */
8776 static int reload_cse_check_clobbered
;
8777 static rtx reload_cse_check_src
;
8779 /* See if DEST overlaps with RELOAD_CSE_CHECK_SRC. If it does, set
8780 RELOAD_CSE_CHECK_CLOBBERED. This is called via note_stores. The
8781 second argument, which is passed by note_stores, is ignored. */
8784 reload_cse_check_clobber (dest
, ignore
)
8786 rtx ignore ATTRIBUTE_UNUSED
;
8788 if (reg_overlap_mentioned_p (dest
, reload_cse_check_src
))
8789 reload_cse_check_clobbered
= 1;
8792 /* Record the result of a SET instruction. SET is the set pattern.
8793 BODY is the pattern of the insn that it came from. */
8796 reload_cse_record_set (set
, body
)
8802 enum machine_mode dest_mode
;
8804 dest
= SET_DEST (set
);
8805 src
= SET_SRC (set
);
8806 dreg
= true_regnum (dest
);
8807 sreg
= true_regnum (src
);
8808 dest_mode
= GET_MODE (dest
);
8810 /* Some machines don't define AUTO_INC_DEC, but they still use push
8811 instructions. We need to catch that case here in order to
8812 invalidate the stack pointer correctly. Note that invalidating
8813 the stack pointer is different from invalidating DEST. */
8815 while (GET_CODE (x
) == SUBREG
8816 || GET_CODE (x
) == ZERO_EXTRACT
8817 || GET_CODE (x
) == SIGN_EXTRACT
8818 || GET_CODE (x
) == STRICT_LOW_PART
)
8820 if (push_operand (x
, GET_MODE (x
)))
8822 reload_cse_invalidate_rtx (stack_pointer_rtx
, NULL_RTX
);
8823 reload_cse_invalidate_rtx (dest
, NULL_RTX
);
8827 /* We can only handle an assignment to a register, or a store of a
8828 register to a memory location. For other cases, we just clobber
8829 the destination. We also have to just clobber if there are side
8830 effects in SRC or DEST. */
8831 if ((dreg
< 0 && GET_CODE (dest
) != MEM
)
8832 || side_effects_p (src
)
8833 || side_effects_p (dest
))
8835 reload_cse_invalidate_rtx (dest
, NULL_RTX
);
8840 /* We don't try to handle values involving CC, because it's a pain
8841 to keep track of when they have to be invalidated. */
8842 if (reg_mentioned_p (cc0_rtx
, src
)
8843 || reg_mentioned_p (cc0_rtx
, dest
))
8845 reload_cse_invalidate_rtx (dest
, NULL_RTX
);
8850 /* If BODY is a PARALLEL, then we need to see whether the source of
8851 SET is clobbered by some other instruction in the PARALLEL. */
8852 if (GET_CODE (body
) == PARALLEL
)
8856 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
8860 x
= XVECEXP (body
, 0, i
);
8864 reload_cse_check_clobbered
= 0;
8865 reload_cse_check_src
= src
;
8866 note_stores (x
, reload_cse_check_clobber
);
8867 if (reload_cse_check_clobbered
)
8869 reload_cse_invalidate_rtx (dest
, NULL_RTX
);
8879 /* This is an assignment to a register. Update the value we
8880 have stored for the register. */
8885 /* This is a copy from one register to another. Any values
8886 which were valid for SREG are now valid for DREG. If the
8887 mode changes, we use gen_lowpart_common to extract only
8888 the part of the value that is copied. */
8889 reg_values
[dreg
] = 0;
8890 for (x
= reg_values
[sreg
]; x
; x
= XEXP (x
, 1))
8894 if (XEXP (x
, 0) == 0)
8896 if (dest_mode
== GET_MODE (XEXP (x
, 0)))
8898 else if (GET_MODE_BITSIZE (dest_mode
)
8899 > GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))))
8902 tmp
= gen_lowpart_common (dest_mode
, XEXP (x
, 0));
8904 reg_values
[dreg
] = gen_rtx_EXPR_LIST (dest_mode
, tmp
,
8909 reg_values
[dreg
] = gen_rtx_EXPR_LIST (dest_mode
, src
, NULL_RTX
);
8911 /* We've changed DREG, so invalidate any values held by other
8912 registers that depend upon it. */
8913 reload_cse_invalidate_regno (dreg
, dest_mode
, 0);
8915 /* If this assignment changes more than one hard register,
8916 forget anything we know about the others. */
8917 for (i
= 1; i
< HARD_REGNO_NREGS (dreg
, dest_mode
); i
++)
8918 reg_values
[dreg
+ i
] = 0;
8920 else if (GET_CODE (dest
) == MEM
)
8922 /* Invalidate conflicting memory locations. */
8923 reload_cse_invalidate_mem (dest
);
8925 /* If we're storing a register to memory, add DEST to the list
8927 if (sreg
>= 0 && ! side_effects_p (dest
))
8928 reg_values
[sreg
] = gen_rtx_EXPR_LIST (dest_mode
, dest
,
8933 /* We should have bailed out earlier. */