1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
32 #include "insn-config.h"
38 #include "basic-block.h"
47 /* This file contains the reload pass of the compiler, which is
48 run after register allocation has been done. It checks that
49 each insn is valid (operands required to be in registers really
50 are in registers of the proper class) and fixes up invalid ones
51 by copying values temporarily into registers for the insns
54 The results of register allocation are described by the vector
55 reg_renumber; the insns still contain pseudo regs, but reg_renumber
56 can be used to find which hard reg, if any, a pseudo reg is in.
58 The technique we always use is to free up a few hard regs that are
59 called ``reload regs'', and for each place where a pseudo reg
60 must be in a hard reg, copy it temporarily into one of the reload regs.
62 Reload regs are allocated locally for every instruction that needs
63 reloads. When there are pseudos which are allocated to a register that
64 has been chosen as a reload reg, such pseudos must be ``spilled''.
65 This means that they go to other hard regs, or to stack slots if no other
66 available hard regs can be found. Spilling can invalidate more
67 insns, requiring additional need for reloads, so we must keep checking
68 until the process stabilizes.
70 For machines with different classes of registers, we must keep track
71 of the register class needed for each reload, and make sure that
72 we allocate enough reload registers of each class.
74 The file reload.c contains the code that checks one insn for
75 validity and reports the reloads that it needs. This file
76 is in charge of scanning the entire rtl code, accumulating the
77 reload needs, spilling, assigning reload registers to use for
78 fixing up each insn, and generating the new insns to copy values
79 into the reload registers. */
81 /* During reload_as_needed, element N contains a REG rtx for the hard reg
82 into which reg N has been reloaded (perhaps for a previous insn). */
83 static rtx
*reg_last_reload_reg
;
85 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
86 for an output reload that stores into reg N. */
87 static char *reg_has_output_reload
;
89 /* Indicates which hard regs are reload-registers for an output reload
90 in the current insn. */
91 static HARD_REG_SET reg_is_output_reload
;
93 /* Element N is the constant value to which pseudo reg N is equivalent,
94 or zero if pseudo reg N is not equivalent to a constant.
95 find_reloads looks at this in order to replace pseudo reg N
96 with the constant it stands for. */
97 rtx
*reg_equiv_constant
;
99 /* Element N is a memory location to which pseudo reg N is equivalent,
100 prior to any register elimination (such as frame pointer to stack
101 pointer). Depending on whether or not it is a valid address, this value
102 is transferred to either reg_equiv_address or reg_equiv_mem. */
103 rtx
*reg_equiv_memory_loc
;
105 /* We allocate reg_equiv_memory_loc inside a varray so that the garbage
106 collector can keep track of what is inside. */
107 varray_type reg_equiv_memory_loc_varray
;
109 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
110 This is used when the address is not valid as a memory address
111 (because its displacement is too big for the machine.) */
112 rtx
*reg_equiv_address
;
114 /* Element N is the memory slot to which pseudo reg N is equivalent,
115 or zero if pseudo reg N is not equivalent to a memory slot. */
118 /* Widest width in which each pseudo reg is referred to (via subreg). */
119 static unsigned int *reg_max_ref_width
;
121 /* Element N is the list of insns that initialized reg N from its equivalent
122 constant or memory slot. */
123 static rtx
*reg_equiv_init
;
125 /* Vector to remember old contents of reg_renumber before spilling. */
126 static short *reg_old_renumber
;
128 /* During reload_as_needed, element N contains the last pseudo regno reloaded
129 into hard register N. If that pseudo reg occupied more than one register,
130 reg_reloaded_contents points to that pseudo for each spill register in
131 use; all of these must remain set for an inheritance to occur. */
132 static int reg_reloaded_contents
[FIRST_PSEUDO_REGISTER
];
134 /* During reload_as_needed, element N contains the insn for which
135 hard register N was last used. Its contents are significant only
136 when reg_reloaded_valid is set for this register. */
137 static rtx reg_reloaded_insn
[FIRST_PSEUDO_REGISTER
];
139 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
140 static HARD_REG_SET reg_reloaded_valid
;
141 /* Indicate if the register was dead at the end of the reload.
142 This is only valid if reg_reloaded_contents is set and valid. */
143 static HARD_REG_SET reg_reloaded_dead
;
145 /* Indicate whether the register's current value is one that is not
146 safe to retain across a call, even for registers that are normally
148 static HARD_REG_SET reg_reloaded_call_part_clobbered
;
150 /* Number of spill-regs so far; number of valid elements of spill_regs. */
153 /* In parallel with spill_regs, contains REG rtx's for those regs.
154 Holds the last rtx used for any given reg, or 0 if it has never
155 been used for spilling yet. This rtx is reused, provided it has
157 static rtx spill_reg_rtx
[FIRST_PSEUDO_REGISTER
];
159 /* In parallel with spill_regs, contains nonzero for a spill reg
160 that was stored after the last time it was used.
161 The precise value is the insn generated to do the store. */
162 static rtx spill_reg_store
[FIRST_PSEUDO_REGISTER
];
164 /* This is the register that was stored with spill_reg_store. This is a
165 copy of reload_out / reload_out_reg when the value was stored; if
166 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
167 static rtx spill_reg_stored_to
[FIRST_PSEUDO_REGISTER
];
169 /* This table is the inverse mapping of spill_regs:
170 indexed by hard reg number,
171 it contains the position of that reg in spill_regs,
172 or -1 for something that is not in spill_regs.
174 ?!? This is no longer accurate. */
175 static short spill_reg_order
[FIRST_PSEUDO_REGISTER
];
177 /* This reg set indicates registers that can't be used as spill registers for
178 the currently processed insn. These are the hard registers which are live
179 during the insn, but not allocated to pseudos, as well as fixed
181 static HARD_REG_SET bad_spill_regs
;
183 /* These are the hard registers that can't be used as spill register for any
184 insn. This includes registers used for user variables and registers that
185 we can't eliminate. A register that appears in this set also can't be used
186 to retry register allocation. */
187 static HARD_REG_SET bad_spill_regs_global
;
189 /* Describes order of use of registers for reloading
190 of spilled pseudo-registers. `n_spills' is the number of
191 elements that are actually valid; new ones are added at the end.
193 Both spill_regs and spill_reg_order are used on two occasions:
194 once during find_reload_regs, where they keep track of the spill registers
195 for a single insn, but also during reload_as_needed where they show all
196 the registers ever used by reload. For the latter case, the information
197 is calculated during finish_spills. */
198 static short spill_regs
[FIRST_PSEUDO_REGISTER
];
200 /* This vector of reg sets indicates, for each pseudo, which hard registers
201 may not be used for retrying global allocation because the register was
202 formerly spilled from one of them. If we allowed reallocating a pseudo to
203 a register that it was already allocated to, reload might not
205 static HARD_REG_SET
*pseudo_previous_regs
;
207 /* This vector of reg sets indicates, for each pseudo, which hard
208 registers may not be used for retrying global allocation because they
209 are used as spill registers during one of the insns in which the
211 static HARD_REG_SET
*pseudo_forbidden_regs
;
213 /* All hard regs that have been used as spill registers for any insn are
214 marked in this set. */
215 static HARD_REG_SET used_spill_regs
;
217 /* Index of last register assigned as a spill register. We allocate in
218 a round-robin fashion. */
219 static int last_spill_reg
;
221 /* Nonzero if indirect addressing is supported on the machine; this means
222 that spilling (REG n) does not require reloading it into a register in
223 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
224 value indicates the level of indirect addressing supported, e.g., two
225 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
227 static char spill_indirect_levels
;
229 /* Nonzero if indirect addressing is supported when the innermost MEM is
230 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
231 which these are valid is the same as spill_indirect_levels, above. */
232 char indirect_symref_ok
;
234 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
235 char double_reg_address_ok
;
237 /* Record the stack slot for each spilled hard register. */
238 static rtx spill_stack_slot
[FIRST_PSEUDO_REGISTER
];
240 /* Width allocated so far for that stack slot. */
241 static unsigned int spill_stack_slot_width
[FIRST_PSEUDO_REGISTER
];
243 /* Record which pseudos needed to be spilled. */
244 static regset_head spilled_pseudos
;
246 /* Used for communication between order_regs_for_reload and count_pseudo.
247 Used to avoid counting one pseudo twice. */
248 static regset_head pseudos_counted
;
250 /* First uid used by insns created by reload in this function.
251 Used in find_equiv_reg. */
252 int reload_first_uid
;
254 /* Flag set by local-alloc or global-alloc if anything is live in
255 a call-clobbered reg across calls. */
256 int caller_save_needed
;
258 /* Set to 1 while reload_as_needed is operating.
259 Required by some machines to handle any generated moves differently. */
260 int reload_in_progress
= 0;
262 /* These arrays record the insn_code of insns that may be needed to
263 perform input and output reloads of special objects. They provide a
264 place to pass a scratch register. */
265 enum insn_code reload_in_optab
[NUM_MACHINE_MODES
];
266 enum insn_code reload_out_optab
[NUM_MACHINE_MODES
];
268 /* This obstack is used for allocation of rtl during register elimination.
269 The allocated storage can be freed once find_reloads has processed the
271 struct obstack reload_obstack
;
273 /* Points to the beginning of the reload_obstack. All insn_chain structures
274 are allocated first. */
275 char *reload_startobj
;
277 /* The point after all insn_chain structures. Used to quickly deallocate
278 memory allocated in copy_reloads during calculate_needs_all_insns. */
279 char *reload_firstobj
;
281 /* This points before all local rtl generated by register elimination.
282 Used to quickly free all memory after processing one insn. */
283 static char *reload_insn_firstobj
;
285 /* List of insn_chain instructions, one for every insn that reload needs to
287 struct insn_chain
*reload_insn_chain
;
289 /* List of all insns needing reloads. */
290 static struct insn_chain
*insns_need_reload
;
292 /* This structure is used to record information about register eliminations.
293 Each array entry describes one possible way of eliminating a register
294 in favor of another. If there is more than one way of eliminating a
295 particular register, the most preferred should be specified first. */
299 int from
; /* Register number to be eliminated. */
300 int to
; /* Register number used as replacement. */
301 HOST_WIDE_INT initial_offset
; /* Initial difference between values. */
302 int can_eliminate
; /* Nonzero if this elimination can be done. */
303 int can_eliminate_previous
; /* Value of CAN_ELIMINATE in previous scan over
304 insns made by reload. */
305 HOST_WIDE_INT offset
; /* Current offset between the two regs. */
306 HOST_WIDE_INT previous_offset
;/* Offset at end of previous insn. */
307 int ref_outside_mem
; /* "to" has been referenced outside a MEM. */
308 rtx from_rtx
; /* REG rtx for the register to be eliminated.
309 We cannot simply compare the number since
310 we might then spuriously replace a hard
311 register corresponding to a pseudo
312 assigned to the reg to be eliminated. */
313 rtx to_rtx
; /* REG rtx for the replacement. */
316 static struct elim_table
*reg_eliminate
= 0;
318 /* This is an intermediate structure to initialize the table. It has
319 exactly the members provided by ELIMINABLE_REGS. */
320 static const struct elim_table_1
324 } reg_eliminate_1
[] =
326 /* If a set of eliminable registers was specified, define the table from it.
327 Otherwise, default to the normal case of the frame pointer being
328 replaced by the stack pointer. */
330 #ifdef ELIMINABLE_REGS
333 {{ FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
}};
336 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
338 /* Record the number of pending eliminations that have an offset not equal
339 to their initial offset. If nonzero, we use a new copy of each
340 replacement result in any insns encountered. */
341 int num_not_at_initial_offset
;
343 /* Count the number of registers that we may be able to eliminate. */
344 static int num_eliminable
;
345 /* And the number of registers that are equivalent to a constant that
346 can be eliminated to frame_pointer / arg_pointer + constant. */
347 static int num_eliminable_invariants
;
349 /* For each label, we record the offset of each elimination. If we reach
350 a label by more than one path and an offset differs, we cannot do the
351 elimination. This information is indexed by the difference of the
352 number of the label and the first label number. We can't offset the
353 pointer itself as this can cause problems on machines with segmented
354 memory. The first table is an array of flags that records whether we
355 have yet encountered a label and the second table is an array of arrays,
356 one entry in the latter array for each elimination. */
358 static int first_label_num
;
359 static char *offsets_known_at
;
360 static HOST_WIDE_INT (*offsets_at
)[NUM_ELIMINABLE_REGS
];
362 /* Number of labels in the current function. */
364 static int num_labels
;
366 static void replace_pseudos_in (rtx
*, enum machine_mode
, rtx
);
367 static void maybe_fix_stack_asms (void);
368 static void copy_reloads (struct insn_chain
*);
369 static void calculate_needs_all_insns (int);
370 static int find_reg (struct insn_chain
*, int);
371 static void find_reload_regs (struct insn_chain
*);
372 static void select_reload_regs (void);
373 static void delete_caller_save_insns (void);
375 static void spill_failure (rtx
, enum reg_class
);
376 static void count_spilled_pseudo (int, int, int);
377 static void delete_dead_insn (rtx
);
378 static void alter_reg (int, int);
379 static void set_label_offsets (rtx
, rtx
, int);
380 static void check_eliminable_occurrences (rtx
);
381 static void elimination_effects (rtx
, enum machine_mode
);
382 static int eliminate_regs_in_insn (rtx
, int);
383 static void update_eliminable_offsets (void);
384 static void mark_not_eliminable (rtx
, rtx
, void *);
385 static void set_initial_elim_offsets (void);
386 static void verify_initial_elim_offsets (void);
387 static void set_initial_label_offsets (void);
388 static void set_offsets_for_label (rtx
);
389 static void init_elim_table (void);
390 static void update_eliminables (HARD_REG_SET
*);
391 static void spill_hard_reg (unsigned int, int);
392 static int finish_spills (int);
393 static void scan_paradoxical_subregs (rtx
);
394 static void count_pseudo (int);
395 static void order_regs_for_reload (struct insn_chain
*);
396 static void reload_as_needed (int);
397 static void forget_old_reloads_1 (rtx
, rtx
, void *);
398 static int reload_reg_class_lower (const void *, const void *);
399 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type
,
401 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type
,
403 static int reload_reg_free_p (unsigned int, int, enum reload_type
);
404 static int reload_reg_free_for_value_p (int, int, int, enum reload_type
,
406 static int free_for_value_p (int, enum machine_mode
, int, enum reload_type
,
408 static int function_invariant_p (rtx
);
409 static int reload_reg_reaches_end_p (unsigned int, int, enum reload_type
);
410 static int allocate_reload_reg (struct insn_chain
*, int, int);
411 static int conflicts_with_override (rtx
);
412 static void failed_reload (rtx
, int);
413 static int set_reload_reg (int, int);
414 static void choose_reload_regs_init (struct insn_chain
*, rtx
*);
415 static void choose_reload_regs (struct insn_chain
*);
416 static void merge_assigned_reloads (rtx
);
417 static void emit_input_reload_insns (struct insn_chain
*, struct reload
*,
419 static void emit_output_reload_insns (struct insn_chain
*, struct reload
*,
421 static void do_input_reload (struct insn_chain
*, struct reload
*, int);
422 static void do_output_reload (struct insn_chain
*, struct reload
*, int);
423 static bool inherit_piecemeal_p (int, int);
424 static void emit_reload_insns (struct insn_chain
*);
425 static void delete_output_reload (rtx
, int, int);
426 static void delete_address_reloads (rtx
, rtx
);
427 static void delete_address_reloads_1 (rtx
, rtx
, rtx
);
428 static rtx
inc_for_reload (rtx
, rtx
, rtx
, int);
430 static void add_auto_inc_notes (rtx
, rtx
);
432 static void copy_eh_notes (rtx
, rtx
);
433 static int reloads_conflict (int, int);
434 static rtx
gen_reload (rtx
, rtx
, int, enum reload_type
);
436 /* Initialize the reload pass once per compilation. */
443 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
444 Set spill_indirect_levels to the number of levels such addressing is
445 permitted, zero if it is not permitted at all. */
448 = gen_rtx_MEM (Pmode
,
451 LAST_VIRTUAL_REGISTER
+ 1),
453 spill_indirect_levels
= 0;
455 while (memory_address_p (QImode
, tem
))
457 spill_indirect_levels
++;
458 tem
= gen_rtx_MEM (Pmode
, tem
);
461 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
463 tem
= gen_rtx_MEM (Pmode
, gen_rtx_SYMBOL_REF (Pmode
, "foo"));
464 indirect_symref_ok
= memory_address_p (QImode
, tem
);
466 /* See if reg+reg is a valid (and offsettable) address. */
468 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
470 tem
= gen_rtx_PLUS (Pmode
,
471 gen_rtx_REG (Pmode
, HARD_FRAME_POINTER_REGNUM
),
472 gen_rtx_REG (Pmode
, i
));
474 /* This way, we make sure that reg+reg is an offsettable address. */
475 tem
= plus_constant (tem
, 4);
477 if (memory_address_p (QImode
, tem
))
479 double_reg_address_ok
= 1;
484 /* Initialize obstack for our rtl allocation. */
485 gcc_obstack_init (&reload_obstack
);
486 reload_startobj
= obstack_alloc (&reload_obstack
, 0);
488 INIT_REG_SET (&spilled_pseudos
);
489 INIT_REG_SET (&pseudos_counted
);
490 VARRAY_RTX_INIT (reg_equiv_memory_loc_varray
, 0, "reg_equiv_memory_loc");
493 /* List of insn chains that are currently unused. */
494 static struct insn_chain
*unused_insn_chains
= 0;
496 /* Allocate an empty insn_chain structure. */
498 new_insn_chain (void)
500 struct insn_chain
*c
;
502 if (unused_insn_chains
== 0)
504 c
= obstack_alloc (&reload_obstack
, sizeof (struct insn_chain
));
505 INIT_REG_SET (&c
->live_throughout
);
506 INIT_REG_SET (&c
->dead_or_set
);
510 c
= unused_insn_chains
;
511 unused_insn_chains
= c
->next
;
513 c
->is_caller_save_insn
= 0;
514 c
->need_operand_change
= 0;
520 /* Small utility function to set all regs in hard reg set TO which are
521 allocated to pseudos in regset FROM. */
524 compute_use_by_pseudos (HARD_REG_SET
*to
, regset from
)
527 reg_set_iterator rsi
;
529 EXECUTE_IF_SET_IN_REG_SET (from
, FIRST_PSEUDO_REGISTER
, regno
, rsi
)
531 int r
= reg_renumber
[regno
];
536 /* reload_combine uses the information from
537 BASIC_BLOCK->global_live_at_start, which might still
538 contain registers that have not actually been allocated
539 since they have an equivalence. */
540 gcc_assert (reload_completed
);
544 nregs
= hard_regno_nregs
[r
][PSEUDO_REGNO_MODE (regno
)];
546 SET_HARD_REG_BIT (*to
, r
+ nregs
);
551 /* Replace all pseudos found in LOC with their corresponding
555 replace_pseudos_in (rtx
*loc
, enum machine_mode mem_mode
, rtx usage
)
568 unsigned int regno
= REGNO (x
);
570 if (regno
< FIRST_PSEUDO_REGISTER
)
573 x
= eliminate_regs (x
, mem_mode
, usage
);
577 replace_pseudos_in (loc
, mem_mode
, usage
);
581 if (reg_equiv_constant
[regno
])
582 *loc
= reg_equiv_constant
[regno
];
583 else if (reg_equiv_mem
[regno
])
584 *loc
= reg_equiv_mem
[regno
];
585 else if (reg_equiv_address
[regno
])
586 *loc
= gen_rtx_MEM (GET_MODE (x
), reg_equiv_address
[regno
]);
589 gcc_assert (!REG_P (regno_reg_rtx
[regno
])
590 || REGNO (regno_reg_rtx
[regno
]) != regno
);
591 *loc
= regno_reg_rtx
[regno
];
596 else if (code
== MEM
)
598 replace_pseudos_in (& XEXP (x
, 0), GET_MODE (x
), usage
);
602 /* Process each of our operands recursively. */
603 fmt
= GET_RTX_FORMAT (code
);
604 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
606 replace_pseudos_in (&XEXP (x
, i
), mem_mode
, usage
);
607 else if (*fmt
== 'E')
608 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
609 replace_pseudos_in (& XVECEXP (x
, i
, j
), mem_mode
, usage
);
613 /* Global variables used by reload and its subroutines. */
615 /* Set during calculate_needs if an insn needs register elimination. */
616 static int something_needs_elimination
;
617 /* Set during calculate_needs if an insn needs an operand changed. */
618 int something_needs_operands_changed
;
620 /* Nonzero means we couldn't get enough spill regs. */
623 /* Main entry point for the reload pass.
625 FIRST is the first insn of the function being compiled.
627 GLOBAL nonzero means we were called from global_alloc
628 and should attempt to reallocate any pseudoregs that we
629 displace from hard regs we will use for reloads.
630 If GLOBAL is zero, we do not have enough information to do that,
631 so any pseudo reg that is spilled must go to the stack.
633 Return value is nonzero if reload failed
634 and we must not do any more for this function. */
637 reload (rtx first
, int global
)
641 struct elim_table
*ep
;
644 /* Make sure even insns with volatile mem refs are recognizable. */
649 reload_firstobj
= obstack_alloc (&reload_obstack
, 0);
651 /* Make sure that the last insn in the chain
652 is not something that needs reloading. */
653 emit_note (NOTE_INSN_DELETED
);
655 /* Enable find_equiv_reg to distinguish insns made by reload. */
656 reload_first_uid
= get_max_uid ();
658 #ifdef SECONDARY_MEMORY_NEEDED
659 /* Initialize the secondary memory table. */
660 clear_secondary_mem ();
663 /* We don't have a stack slot for any spill reg yet. */
664 memset (spill_stack_slot
, 0, sizeof spill_stack_slot
);
665 memset (spill_stack_slot_width
, 0, sizeof spill_stack_slot_width
);
667 /* Initialize the save area information for caller-save, in case some
671 /* Compute which hard registers are now in use
672 as homes for pseudo registers.
673 This is done here rather than (eg) in global_alloc
674 because this point is reached even if not optimizing. */
675 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
678 /* A function that receives a nonlocal goto must save all call-saved
680 if (current_function_has_nonlocal_label
)
681 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
682 if (! call_used_regs
[i
] && ! fixed_regs
[i
] && ! LOCAL_REGNO (i
))
683 regs_ever_live
[i
] = 1;
685 #ifdef NON_SAVING_SETJMP
686 /* A function that calls setjmp should save and restore all the
687 call-saved registers on a system where longjmp clobbers them. */
688 if (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
690 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
691 if (! call_used_regs
[i
])
692 regs_ever_live
[i
] = 1;
696 /* Find all the pseudo registers that didn't get hard regs
697 but do have known equivalent constants or memory slots.
698 These include parameters (known equivalent to parameter slots)
699 and cse'd or loop-moved constant memory addresses.
701 Record constant equivalents in reg_equiv_constant
702 so they will be substituted by find_reloads.
703 Record memory equivalents in reg_mem_equiv so they can
704 be substituted eventually by altering the REG-rtx's. */
706 reg_equiv_constant
= xcalloc (max_regno
, sizeof (rtx
));
707 reg_equiv_mem
= xcalloc (max_regno
, sizeof (rtx
));
708 reg_equiv_init
= xcalloc (max_regno
, sizeof (rtx
));
709 reg_equiv_address
= xcalloc (max_regno
, sizeof (rtx
));
710 reg_max_ref_width
= xcalloc (max_regno
, sizeof (int));
711 reg_old_renumber
= xcalloc (max_regno
, sizeof (short));
712 memcpy (reg_old_renumber
, reg_renumber
, max_regno
* sizeof (short));
713 pseudo_forbidden_regs
= xmalloc (max_regno
* sizeof (HARD_REG_SET
));
714 pseudo_previous_regs
= xcalloc (max_regno
, sizeof (HARD_REG_SET
));
716 CLEAR_HARD_REG_SET (bad_spill_regs_global
);
718 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
719 to. Also find all paradoxical subregs and find largest such for
722 num_eliminable_invariants
= 0;
723 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
725 rtx set
= single_set (insn
);
727 /* We may introduce USEs that we want to remove at the end, so
728 we'll mark them with QImode. Make sure there are no
729 previously-marked insns left by say regmove. */
730 if (INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == USE
731 && GET_MODE (insn
) != VOIDmode
)
732 PUT_MODE (insn
, VOIDmode
);
734 if (set
!= 0 && REG_P (SET_DEST (set
)))
736 rtx note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
);
738 && (! function_invariant_p (XEXP (note
, 0))
740 /* A function invariant is often CONSTANT_P but may
741 include a register. We promise to only pass
742 CONSTANT_P objects to LEGITIMATE_PIC_OPERAND_P. */
743 || (CONSTANT_P (XEXP (note
, 0))
744 && LEGITIMATE_PIC_OPERAND_P (XEXP (note
, 0)))))
746 rtx x
= XEXP (note
, 0);
747 i
= REGNO (SET_DEST (set
));
748 if (i
> LAST_VIRTUAL_REGISTER
)
750 /* It can happen that a REG_EQUIV note contains a MEM
751 that is not a legitimate memory operand. As later
752 stages of reload assume that all addresses found
753 in the reg_equiv_* arrays were originally legitimate,
754 we ignore such REG_EQUIV notes. */
755 if (memory_operand (x
, VOIDmode
))
757 /* Always unshare the equivalence, so we can
758 substitute into this insn without touching the
760 reg_equiv_memory_loc
[i
] = copy_rtx (x
);
762 else if (function_invariant_p (x
))
764 if (GET_CODE (x
) == PLUS
)
766 /* This is PLUS of frame pointer and a constant,
767 and might be shared. Unshare it. */
768 reg_equiv_constant
[i
] = copy_rtx (x
);
769 num_eliminable_invariants
++;
771 else if (x
== frame_pointer_rtx
772 || x
== arg_pointer_rtx
)
774 reg_equiv_constant
[i
] = x
;
775 num_eliminable_invariants
++;
777 else if (LEGITIMATE_CONSTANT_P (x
))
778 reg_equiv_constant
[i
] = x
;
781 reg_equiv_memory_loc
[i
]
782 = force_const_mem (GET_MODE (SET_DEST (set
)), x
);
783 if (!reg_equiv_memory_loc
[i
])
790 /* If this register is being made equivalent to a MEM
791 and the MEM is not SET_SRC, the equivalencing insn
792 is one with the MEM as a SET_DEST and it occurs later.
793 So don't mark this insn now. */
795 || rtx_equal_p (SET_SRC (set
), x
))
797 = gen_rtx_INSN_LIST (VOIDmode
, insn
, reg_equiv_init
[i
]);
802 /* If this insn is setting a MEM from a register equivalent to it,
803 this is the equivalencing insn. */
804 else if (set
&& MEM_P (SET_DEST (set
))
805 && REG_P (SET_SRC (set
))
806 && reg_equiv_memory_loc
[REGNO (SET_SRC (set
))]
807 && rtx_equal_p (SET_DEST (set
),
808 reg_equiv_memory_loc
[REGNO (SET_SRC (set
))]))
809 reg_equiv_init
[REGNO (SET_SRC (set
))]
810 = gen_rtx_INSN_LIST (VOIDmode
, insn
,
811 reg_equiv_init
[REGNO (SET_SRC (set
))]);
814 scan_paradoxical_subregs (PATTERN (insn
));
819 first_label_num
= get_first_label_num ();
820 num_labels
= max_label_num () - first_label_num
;
822 /* Allocate the tables used to store offset information at labels. */
823 /* We used to use alloca here, but the size of what it would try to
824 allocate would occasionally cause it to exceed the stack limit and
825 cause a core dump. */
826 offsets_known_at
= xmalloc (num_labels
);
827 offsets_at
= xmalloc (num_labels
* NUM_ELIMINABLE_REGS
* sizeof (HOST_WIDE_INT
));
829 /* Alter each pseudo-reg rtx to contain its hard reg number.
830 Assign stack slots to the pseudos that lack hard regs or equivalents.
831 Do not touch virtual registers. */
833 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_regno
; i
++)
836 /* If we have some registers we think can be eliminated, scan all insns to
837 see if there is an insn that sets one of these registers to something
838 other than itself plus a constant. If so, the register cannot be
839 eliminated. Doing this scan here eliminates an extra pass through the
840 main reload loop in the most common case where register elimination
842 for (insn
= first
; insn
&& num_eliminable
; insn
= NEXT_INSN (insn
))
844 note_stores (PATTERN (insn
), mark_not_eliminable
, NULL
);
846 maybe_fix_stack_asms ();
848 insns_need_reload
= 0;
849 something_needs_elimination
= 0;
851 /* Initialize to -1, which means take the first spill register. */
854 /* Spill any hard regs that we know we can't eliminate. */
855 CLEAR_HARD_REG_SET (used_spill_regs
);
856 /* There can be multiple ways to eliminate a register;
857 they should be listed adjacently.
858 Elimination for any register fails only if all possible ways fail. */
859 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; )
862 int can_eliminate
= 0;
865 can_eliminate
|= ep
->can_eliminate
;
868 while (ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
] && ep
->from
== from
);
870 spill_hard_reg (from
, 1);
873 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
874 if (frame_pointer_needed
)
875 spill_hard_reg (HARD_FRAME_POINTER_REGNUM
, 1);
877 finish_spills (global
);
879 /* From now on, we may need to generate moves differently. We may also
880 allow modifications of insns which cause them to not be recognized.
881 Any such modifications will be cleaned up during reload itself. */
882 reload_in_progress
= 1;
884 /* This loop scans the entire function each go-round
885 and repeats until one repetition spills no additional hard regs. */
888 int something_changed
;
891 HOST_WIDE_INT starting_frame_size
;
893 /* Round size of stack frame to stack_alignment_needed. This must be done
894 here because the stack size may be a part of the offset computation
895 for register elimination, and there might have been new stack slots
896 created in the last iteration of this loop. */
897 if (cfun
->stack_alignment_needed
)
898 assign_stack_local (BLKmode
, 0, cfun
->stack_alignment_needed
);
900 starting_frame_size
= get_frame_size ();
902 set_initial_elim_offsets ();
903 set_initial_label_offsets ();
905 /* For each pseudo register that has an equivalent location defined,
906 try to eliminate any eliminable registers (such as the frame pointer)
907 assuming initial offsets for the replacement register, which
910 If the resulting location is directly addressable, substitute
911 the MEM we just got directly for the old REG.
913 If it is not addressable but is a constant or the sum of a hard reg
914 and constant, it is probably not addressable because the constant is
915 out of range, in that case record the address; we will generate
916 hairy code to compute the address in a register each time it is
917 needed. Similarly if it is a hard register, but one that is not
918 valid as an address register.
920 If the location is not addressable, but does not have one of the
921 above forms, assign a stack slot. We have to do this to avoid the
922 potential of producing lots of reloads if, e.g., a location involves
923 a pseudo that didn't get a hard register and has an equivalent memory
924 location that also involves a pseudo that didn't get a hard register.
926 Perhaps at some point we will improve reload_when_needed handling
927 so this problem goes away. But that's very hairy. */
929 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
930 if (reg_renumber
[i
] < 0 && reg_equiv_memory_loc
[i
])
932 rtx x
= eliminate_regs (reg_equiv_memory_loc
[i
], 0, NULL_RTX
);
934 if (strict_memory_address_p (GET_MODE (regno_reg_rtx
[i
]),
936 reg_equiv_mem
[i
] = x
, reg_equiv_address
[i
] = 0;
937 else if (CONSTANT_P (XEXP (x
, 0))
938 || (REG_P (XEXP (x
, 0))
939 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
)
940 || (GET_CODE (XEXP (x
, 0)) == PLUS
941 && REG_P (XEXP (XEXP (x
, 0), 0))
942 && (REGNO (XEXP (XEXP (x
, 0), 0))
943 < FIRST_PSEUDO_REGISTER
)
944 && CONSTANT_P (XEXP (XEXP (x
, 0), 1))))
945 reg_equiv_address
[i
] = XEXP (x
, 0), reg_equiv_mem
[i
] = 0;
948 /* Make a new stack slot. Then indicate that something
949 changed so we go back and recompute offsets for
950 eliminable registers because the allocation of memory
951 below might change some offset. reg_equiv_{mem,address}
952 will be set up for this pseudo on the next pass around
954 reg_equiv_memory_loc
[i
] = 0;
955 reg_equiv_init
[i
] = 0;
960 if (caller_save_needed
)
963 /* If we allocated another stack slot, redo elimination bookkeeping. */
964 if (starting_frame_size
!= get_frame_size ())
967 if (caller_save_needed
)
969 save_call_clobbered_regs ();
970 /* That might have allocated new insn_chain structures. */
971 reload_firstobj
= obstack_alloc (&reload_obstack
, 0);
974 calculate_needs_all_insns (global
);
976 CLEAR_REG_SET (&spilled_pseudos
);
979 something_changed
= 0;
981 /* If we allocated any new memory locations, make another pass
982 since it might have changed elimination offsets. */
983 if (starting_frame_size
!= get_frame_size ())
984 something_changed
= 1;
987 HARD_REG_SET to_spill
;
988 CLEAR_HARD_REG_SET (to_spill
);
989 update_eliminables (&to_spill
);
990 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
991 if (TEST_HARD_REG_BIT (to_spill
, i
))
993 spill_hard_reg (i
, 1);
996 /* Regardless of the state of spills, if we previously had
997 a register that we thought we could eliminate, but now can
998 not eliminate, we must run another pass.
1000 Consider pseudos which have an entry in reg_equiv_* which
1001 reference an eliminable register. We must make another pass
1002 to update reg_equiv_* so that we do not substitute in the
1003 old value from when we thought the elimination could be
1005 something_changed
= 1;
1009 select_reload_regs ();
1013 if (insns_need_reload
!= 0 || did_spill
)
1014 something_changed
|= finish_spills (global
);
1016 if (! something_changed
)
1019 if (caller_save_needed
)
1020 delete_caller_save_insns ();
1022 obstack_free (&reload_obstack
, reload_firstobj
);
1025 /* If global-alloc was run, notify it of any register eliminations we have
1028 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
1029 if (ep
->can_eliminate
)
1030 mark_elimination (ep
->from
, ep
->to
);
1032 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1033 If that insn didn't set the register (i.e., it copied the register to
1034 memory), just delete that insn instead of the equivalencing insn plus
1035 anything now dead. If we call delete_dead_insn on that insn, we may
1036 delete the insn that actually sets the register if the register dies
1037 there and that is incorrect. */
1039 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1041 if (reg_renumber
[i
] < 0 && reg_equiv_init
[i
] != 0)
1044 for (list
= reg_equiv_init
[i
]; list
; list
= XEXP (list
, 1))
1046 rtx equiv_insn
= XEXP (list
, 0);
1048 /* If we already deleted the insn or if it may trap, we can't
1049 delete it. The latter case shouldn't happen, but can
1050 if an insn has a variable address, gets a REG_EH_REGION
1051 note added to it, and then gets converted into an load
1052 from a constant address. */
1053 if (NOTE_P (equiv_insn
)
1054 || can_throw_internal (equiv_insn
))
1056 else if (reg_set_p (regno_reg_rtx
[i
], PATTERN (equiv_insn
)))
1057 delete_dead_insn (equiv_insn
);
1059 SET_INSN_DELETED (equiv_insn
);
1064 /* Use the reload registers where necessary
1065 by generating move instructions to move the must-be-register
1066 values into or out of the reload registers. */
1068 if (insns_need_reload
!= 0 || something_needs_elimination
1069 || something_needs_operands_changed
)
1071 HOST_WIDE_INT old_frame_size
= get_frame_size ();
1073 reload_as_needed (global
);
1075 gcc_assert (old_frame_size
== get_frame_size ());
1078 verify_initial_elim_offsets ();
1081 /* If we were able to eliminate the frame pointer, show that it is no
1082 longer live at the start of any basic block. If it ls live by
1083 virtue of being in a pseudo, that pseudo will be marked live
1084 and hence the frame pointer will be known to be live via that
1087 if (! frame_pointer_needed
)
1089 CLEAR_REGNO_REG_SET (bb
->global_live_at_start
,
1090 HARD_FRAME_POINTER_REGNUM
);
1092 /* Come here (with failure set nonzero) if we can't get enough spill regs
1093 and we decide not to abort about it. */
1096 CLEAR_REG_SET (&spilled_pseudos
);
1097 reload_in_progress
= 0;
1099 /* Now eliminate all pseudo regs by modifying them into
1100 their equivalent memory references.
1101 The REG-rtx's for the pseudos are modified in place,
1102 so all insns that used to refer to them now refer to memory.
1104 For a reg that has a reg_equiv_address, all those insns
1105 were changed by reloading so that no insns refer to it any longer;
1106 but the DECL_RTL of a variable decl may refer to it,
1107 and if so this causes the debugging info to mention the variable. */
1109 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1113 if (reg_equiv_mem
[i
])
1114 addr
= XEXP (reg_equiv_mem
[i
], 0);
1116 if (reg_equiv_address
[i
])
1117 addr
= reg_equiv_address
[i
];
1121 if (reg_renumber
[i
] < 0)
1123 rtx reg
= regno_reg_rtx
[i
];
1125 REG_USERVAR_P (reg
) = 0;
1126 PUT_CODE (reg
, MEM
);
1127 XEXP (reg
, 0) = addr
;
1128 if (reg_equiv_memory_loc
[i
])
1129 MEM_COPY_ATTRIBUTES (reg
, reg_equiv_memory_loc
[i
]);
1132 MEM_IN_STRUCT_P (reg
) = MEM_SCALAR_P (reg
) = 0;
1133 MEM_ATTRS (reg
) = 0;
1136 else if (reg_equiv_mem
[i
])
1137 XEXP (reg_equiv_mem
[i
], 0) = addr
;
1141 /* We must set reload_completed now since the cleanup_subreg_operands call
1142 below will re-recognize each insn and reload may have generated insns
1143 which are only valid during and after reload. */
1144 reload_completed
= 1;
1146 /* Make a pass over all the insns and delete all USEs which we inserted
1147 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1148 notes. Delete all CLOBBER insns, except those that refer to the return
1149 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1150 from misarranging variable-array code, and simplify (subreg (reg))
1151 operands. Also remove all REG_RETVAL and REG_LIBCALL notes since they
1152 are no longer useful or accurate. Strip and regenerate REG_INC notes
1153 that may have been moved around. */
1155 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
1161 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn
),
1162 VOIDmode
, CALL_INSN_FUNCTION_USAGE (insn
));
1164 if ((GET_CODE (PATTERN (insn
)) == USE
1165 /* We mark with QImode USEs introduced by reload itself. */
1166 && (GET_MODE (insn
) == QImode
1167 || find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)))
1168 || (GET_CODE (PATTERN (insn
)) == CLOBBER
1169 && (!MEM_P (XEXP (PATTERN (insn
), 0))
1170 || GET_MODE (XEXP (PATTERN (insn
), 0)) != BLKmode
1171 || (GET_CODE (XEXP (XEXP (PATTERN (insn
), 0), 0)) != SCRATCH
1172 && XEXP (XEXP (PATTERN (insn
), 0), 0)
1173 != stack_pointer_rtx
))
1174 && (!REG_P (XEXP (PATTERN (insn
), 0))
1175 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn
), 0)))))
1181 /* Some CLOBBERs may survive until here and still reference unassigned
1182 pseudos with const equivalent, which may in turn cause ICE in later
1183 passes if the reference remains in place. */
1184 if (GET_CODE (PATTERN (insn
)) == CLOBBER
)
1185 replace_pseudos_in (& XEXP (PATTERN (insn
), 0),
1186 VOIDmode
, PATTERN (insn
));
1188 pnote
= ®_NOTES (insn
);
1191 if (REG_NOTE_KIND (*pnote
) == REG_DEAD
1192 || REG_NOTE_KIND (*pnote
) == REG_UNUSED
1193 || REG_NOTE_KIND (*pnote
) == REG_INC
1194 || REG_NOTE_KIND (*pnote
) == REG_RETVAL
1195 || REG_NOTE_KIND (*pnote
) == REG_LIBCALL
)
1196 *pnote
= XEXP (*pnote
, 1);
1198 pnote
= &XEXP (*pnote
, 1);
1202 add_auto_inc_notes (insn
, PATTERN (insn
));
1205 /* And simplify (subreg (reg)) if it appears as an operand. */
1206 cleanup_subreg_operands (insn
);
1209 /* If we are doing stack checking, give a warning if this function's
1210 frame size is larger than we expect. */
1211 if (flag_stack_check
&& ! STACK_CHECK_BUILTIN
)
1213 HOST_WIDE_INT size
= get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE
;
1214 static int verbose_warned
= 0;
1216 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1217 if (regs_ever_live
[i
] && ! fixed_regs
[i
] && call_used_regs
[i
])
1218 size
+= UNITS_PER_WORD
;
1220 if (size
> STACK_CHECK_MAX_FRAME_SIZE
)
1222 warning ("frame size too large for reliable stack checking");
1223 if (! verbose_warned
)
1225 warning ("try reducing the number of local variables");
1231 /* Indicate that we no longer have known memory locations or constants. */
1232 if (reg_equiv_constant
)
1233 free (reg_equiv_constant
);
1234 reg_equiv_constant
= 0;
1235 VARRAY_GROW (reg_equiv_memory_loc_varray
, 0);
1236 reg_equiv_memory_loc
= 0;
1238 if (offsets_known_at
)
1239 free (offsets_known_at
);
1243 free (reg_equiv_mem
);
1244 free (reg_equiv_init
);
1245 free (reg_equiv_address
);
1246 free (reg_max_ref_width
);
1247 free (reg_old_renumber
);
1248 free (pseudo_previous_regs
);
1249 free (pseudo_forbidden_regs
);
1251 CLEAR_HARD_REG_SET (used_spill_regs
);
1252 for (i
= 0; i
< n_spills
; i
++)
1253 SET_HARD_REG_BIT (used_spill_regs
, spill_regs
[i
]);
1255 /* Free all the insn_chain structures at once. */
1256 obstack_free (&reload_obstack
, reload_startobj
);
1257 unused_insn_chains
= 0;
1258 fixup_abnormal_edges ();
1260 /* Replacing pseudos with their memory equivalents might have
1261 created shared rtx. Subsequent passes would get confused
1262 by this, so unshare everything here. */
1263 unshare_all_rtl_again (first
);
1265 #ifdef STACK_BOUNDARY
1266 /* init_emit has set the alignment of the hard frame pointer
1267 to STACK_BOUNDARY. It is very likely no longer valid if
1268 the hard frame pointer was used for register allocation. */
1269 if (!frame_pointer_needed
)
1270 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
) = BITS_PER_UNIT
;
1276 /* Yet another special case. Unfortunately, reg-stack forces people to
1277 write incorrect clobbers in asm statements. These clobbers must not
1278 cause the register to appear in bad_spill_regs, otherwise we'll call
1279 fatal_insn later. We clear the corresponding regnos in the live
1280 register sets to avoid this.
1281 The whole thing is rather sick, I'm afraid. */
1284 maybe_fix_stack_asms (void)
1287 const char *constraints
[MAX_RECOG_OPERANDS
];
1288 enum machine_mode operand_mode
[MAX_RECOG_OPERANDS
];
1289 struct insn_chain
*chain
;
1291 for (chain
= reload_insn_chain
; chain
!= 0; chain
= chain
->next
)
1294 HARD_REG_SET clobbered
, allowed
;
1297 if (! INSN_P (chain
->insn
)
1298 || (noperands
= asm_noperands (PATTERN (chain
->insn
))) < 0)
1300 pat
= PATTERN (chain
->insn
);
1301 if (GET_CODE (pat
) != PARALLEL
)
1304 CLEAR_HARD_REG_SET (clobbered
);
1305 CLEAR_HARD_REG_SET (allowed
);
1307 /* First, make a mask of all stack regs that are clobbered. */
1308 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1310 rtx t
= XVECEXP (pat
, 0, i
);
1311 if (GET_CODE (t
) == CLOBBER
&& STACK_REG_P (XEXP (t
, 0)))
1312 SET_HARD_REG_BIT (clobbered
, REGNO (XEXP (t
, 0)));
1315 /* Get the operand values and constraints out of the insn. */
1316 decode_asm_operands (pat
, recog_data
.operand
, recog_data
.operand_loc
,
1317 constraints
, operand_mode
);
1319 /* For every operand, see what registers are allowed. */
1320 for (i
= 0; i
< noperands
; i
++)
1322 const char *p
= constraints
[i
];
1323 /* For every alternative, we compute the class of registers allowed
1324 for reloading in CLS, and merge its contents into the reg set
1326 int cls
= (int) NO_REGS
;
1332 if (c
== '\0' || c
== ',' || c
== '#')
1334 /* End of one alternative - mark the regs in the current
1335 class, and reset the class. */
1336 IOR_HARD_REG_SET (allowed
, reg_class_contents
[cls
]);
1342 } while (c
!= '\0' && c
!= ',');
1350 case '=': case '+': case '*': case '%': case '?': case '!':
1351 case '0': case '1': case '2': case '3': case '4': case 'm':
1352 case '<': case '>': case 'V': case 'o': case '&': case 'E':
1353 case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1354 case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1359 cls
= (int) reg_class_subunion
[cls
]
1360 [(int) MODE_BASE_REG_CLASS (VOIDmode
)];
1365 cls
= (int) reg_class_subunion
[cls
][(int) GENERAL_REGS
];
1369 if (EXTRA_ADDRESS_CONSTRAINT (c
, p
))
1370 cls
= (int) reg_class_subunion
[cls
]
1371 [(int) MODE_BASE_REG_CLASS (VOIDmode
)];
1373 cls
= (int) reg_class_subunion
[cls
]
1374 [(int) REG_CLASS_FROM_CONSTRAINT (c
, p
)];
1376 p
+= CONSTRAINT_LEN (c
, p
);
1379 /* Those of the registers which are clobbered, but allowed by the
1380 constraints, must be usable as reload registers. So clear them
1381 out of the life information. */
1382 AND_HARD_REG_SET (allowed
, clobbered
);
1383 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1384 if (TEST_HARD_REG_BIT (allowed
, i
))
1386 CLEAR_REGNO_REG_SET (&chain
->live_throughout
, i
);
1387 CLEAR_REGNO_REG_SET (&chain
->dead_or_set
, i
);
1394 /* Copy the global variables n_reloads and rld into the corresponding elts
1397 copy_reloads (struct insn_chain
*chain
)
1399 chain
->n_reloads
= n_reloads
;
1400 chain
->rld
= obstack_alloc (&reload_obstack
,
1401 n_reloads
* sizeof (struct reload
));
1402 memcpy (chain
->rld
, rld
, n_reloads
* sizeof (struct reload
));
1403 reload_insn_firstobj
= obstack_alloc (&reload_obstack
, 0);
1406 /* Walk the chain of insns, and determine for each whether it needs reloads
1407 and/or eliminations. Build the corresponding insns_need_reload list, and
1408 set something_needs_elimination as appropriate. */
1410 calculate_needs_all_insns (int global
)
1412 struct insn_chain
**pprev_reload
= &insns_need_reload
;
1413 struct insn_chain
*chain
, *next
= 0;
1415 something_needs_elimination
= 0;
1417 reload_insn_firstobj
= obstack_alloc (&reload_obstack
, 0);
1418 for (chain
= reload_insn_chain
; chain
!= 0; chain
= next
)
1420 rtx insn
= chain
->insn
;
1424 /* Clear out the shortcuts. */
1425 chain
->n_reloads
= 0;
1426 chain
->need_elim
= 0;
1427 chain
->need_reload
= 0;
1428 chain
->need_operand_change
= 0;
1430 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1431 include REG_LABEL), we need to see what effects this has on the
1432 known offsets at labels. */
1434 if (LABEL_P (insn
) || JUMP_P (insn
)
1435 || (INSN_P (insn
) && REG_NOTES (insn
) != 0))
1436 set_label_offsets (insn
, insn
, 0);
1440 rtx old_body
= PATTERN (insn
);
1441 int old_code
= INSN_CODE (insn
);
1442 rtx old_notes
= REG_NOTES (insn
);
1443 int did_elimination
= 0;
1444 int operands_changed
= 0;
1445 rtx set
= single_set (insn
);
1447 /* Skip insns that only set an equivalence. */
1448 if (set
&& REG_P (SET_DEST (set
))
1449 && reg_renumber
[REGNO (SET_DEST (set
))] < 0
1450 && reg_equiv_constant
[REGNO (SET_DEST (set
))])
1453 /* If needed, eliminate any eliminable registers. */
1454 if (num_eliminable
|| num_eliminable_invariants
)
1455 did_elimination
= eliminate_regs_in_insn (insn
, 0);
1457 /* Analyze the instruction. */
1458 operands_changed
= find_reloads (insn
, 0, spill_indirect_levels
,
1459 global
, spill_reg_order
);
1461 /* If a no-op set needs more than one reload, this is likely
1462 to be something that needs input address reloads. We
1463 can't get rid of this cleanly later, and it is of no use
1464 anyway, so discard it now.
1465 We only do this when expensive_optimizations is enabled,
1466 since this complements reload inheritance / output
1467 reload deletion, and it can make debugging harder. */
1468 if (flag_expensive_optimizations
&& n_reloads
> 1)
1470 rtx set
= single_set (insn
);
1472 && SET_SRC (set
) == SET_DEST (set
)
1473 && REG_P (SET_SRC (set
))
1474 && REGNO (SET_SRC (set
)) >= FIRST_PSEUDO_REGISTER
)
1477 /* Delete it from the reload chain. */
1479 chain
->prev
->next
= next
;
1481 reload_insn_chain
= next
;
1483 next
->prev
= chain
->prev
;
1484 chain
->next
= unused_insn_chains
;
1485 unused_insn_chains
= chain
;
1490 update_eliminable_offsets ();
1492 /* Remember for later shortcuts which insns had any reloads or
1493 register eliminations. */
1494 chain
->need_elim
= did_elimination
;
1495 chain
->need_reload
= n_reloads
> 0;
1496 chain
->need_operand_change
= operands_changed
;
1498 /* Discard any register replacements done. */
1499 if (did_elimination
)
1501 obstack_free (&reload_obstack
, reload_insn_firstobj
);
1502 PATTERN (insn
) = old_body
;
1503 INSN_CODE (insn
) = old_code
;
1504 REG_NOTES (insn
) = old_notes
;
1505 something_needs_elimination
= 1;
1508 something_needs_operands_changed
|= operands_changed
;
1512 copy_reloads (chain
);
1513 *pprev_reload
= chain
;
1514 pprev_reload
= &chain
->next_need_reload
;
1521 /* Comparison function for qsort to decide which of two reloads
1522 should be handled first. *P1 and *P2 are the reload numbers. */
1525 reload_reg_class_lower (const void *r1p
, const void *r2p
)
1527 int r1
= *(const short *) r1p
, r2
= *(const short *) r2p
;
1530 /* Consider required reloads before optional ones. */
1531 t
= rld
[r1
].optional
- rld
[r2
].optional
;
1535 /* Count all solitary classes before non-solitary ones. */
1536 t
= ((reg_class_size
[(int) rld
[r2
].class] == 1)
1537 - (reg_class_size
[(int) rld
[r1
].class] == 1));
1541 /* Aside from solitaires, consider all multi-reg groups first. */
1542 t
= rld
[r2
].nregs
- rld
[r1
].nregs
;
1546 /* Consider reloads in order of increasing reg-class number. */
1547 t
= (int) rld
[r1
].class - (int) rld
[r2
].class;
1551 /* If reloads are equally urgent, sort by reload number,
1552 so that the results of qsort leave nothing to chance. */
1556 /* The cost of spilling each hard reg. */
1557 static int spill_cost
[FIRST_PSEUDO_REGISTER
];
1559 /* When spilling multiple hard registers, we use SPILL_COST for the first
1560 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1561 only the first hard reg for a multi-reg pseudo. */
1562 static int spill_add_cost
[FIRST_PSEUDO_REGISTER
];
1564 /* Update the spill cost arrays, considering that pseudo REG is live. */
1567 count_pseudo (int reg
)
1569 int freq
= REG_FREQ (reg
);
1570 int r
= reg_renumber
[reg
];
1573 if (REGNO_REG_SET_P (&pseudos_counted
, reg
)
1574 || REGNO_REG_SET_P (&spilled_pseudos
, reg
))
1577 SET_REGNO_REG_SET (&pseudos_counted
, reg
);
1579 gcc_assert (r
>= 0);
1581 spill_add_cost
[r
] += freq
;
1583 nregs
= hard_regno_nregs
[r
][PSEUDO_REGNO_MODE (reg
)];
1585 spill_cost
[r
+ nregs
] += freq
;
1588 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1589 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1592 order_regs_for_reload (struct insn_chain
*chain
)
1595 HARD_REG_SET used_by_pseudos
;
1596 HARD_REG_SET used_by_pseudos2
;
1597 reg_set_iterator rsi
;
1599 COPY_HARD_REG_SET (bad_spill_regs
, fixed_reg_set
);
1601 memset (spill_cost
, 0, sizeof spill_cost
);
1602 memset (spill_add_cost
, 0, sizeof spill_add_cost
);
1604 /* Count number of uses of each hard reg by pseudo regs allocated to it
1605 and then order them by decreasing use. First exclude hard registers
1606 that are live in or across this insn. */
1608 REG_SET_TO_HARD_REG_SET (used_by_pseudos
, &chain
->live_throughout
);
1609 REG_SET_TO_HARD_REG_SET (used_by_pseudos2
, &chain
->dead_or_set
);
1610 IOR_HARD_REG_SET (bad_spill_regs
, used_by_pseudos
);
1611 IOR_HARD_REG_SET (bad_spill_regs
, used_by_pseudos2
);
1613 /* Now find out which pseudos are allocated to it, and update
1615 CLEAR_REG_SET (&pseudos_counted
);
1617 EXECUTE_IF_SET_IN_REG_SET
1618 (&chain
->live_throughout
, FIRST_PSEUDO_REGISTER
, i
, rsi
)
1622 EXECUTE_IF_SET_IN_REG_SET
1623 (&chain
->dead_or_set
, FIRST_PSEUDO_REGISTER
, i
, rsi
)
1627 CLEAR_REG_SET (&pseudos_counted
);
1630 /* Vector of reload-numbers showing the order in which the reloads should
1632 static short reload_order
[MAX_RELOADS
];
1634 /* This is used to keep track of the spill regs used in one insn. */
1635 static HARD_REG_SET used_spill_regs_local
;
1637 /* We decided to spill hard register SPILLED, which has a size of
1638 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1639 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1640 update SPILL_COST/SPILL_ADD_COST. */
1643 count_spilled_pseudo (int spilled
, int spilled_nregs
, int reg
)
1645 int r
= reg_renumber
[reg
];
1646 int nregs
= hard_regno_nregs
[r
][PSEUDO_REGNO_MODE (reg
)];
1648 if (REGNO_REG_SET_P (&spilled_pseudos
, reg
)
1649 || spilled
+ spilled_nregs
<= r
|| r
+ nregs
<= spilled
)
1652 SET_REGNO_REG_SET (&spilled_pseudos
, reg
);
1654 spill_add_cost
[r
] -= REG_FREQ (reg
);
1656 spill_cost
[r
+ nregs
] -= REG_FREQ (reg
);
1659 /* Find reload register to use for reload number ORDER. */
1662 find_reg (struct insn_chain
*chain
, int order
)
1664 int rnum
= reload_order
[order
];
1665 struct reload
*rl
= rld
+ rnum
;
1666 int best_cost
= INT_MAX
;
1670 HARD_REG_SET not_usable
;
1671 HARD_REG_SET used_by_other_reload
;
1672 reg_set_iterator rsi
;
1674 COPY_HARD_REG_SET (not_usable
, bad_spill_regs
);
1675 IOR_HARD_REG_SET (not_usable
, bad_spill_regs_global
);
1676 IOR_COMPL_HARD_REG_SET (not_usable
, reg_class_contents
[rl
->class]);
1678 CLEAR_HARD_REG_SET (used_by_other_reload
);
1679 for (k
= 0; k
< order
; k
++)
1681 int other
= reload_order
[k
];
1683 if (rld
[other
].regno
>= 0 && reloads_conflict (other
, rnum
))
1684 for (j
= 0; j
< rld
[other
].nregs
; j
++)
1685 SET_HARD_REG_BIT (used_by_other_reload
, rld
[other
].regno
+ j
);
1688 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1690 unsigned int regno
= i
;
1692 if (! TEST_HARD_REG_BIT (not_usable
, regno
)
1693 && ! TEST_HARD_REG_BIT (used_by_other_reload
, regno
)
1694 && HARD_REGNO_MODE_OK (regno
, rl
->mode
))
1696 int this_cost
= spill_cost
[regno
];
1698 unsigned int this_nregs
= hard_regno_nregs
[regno
][rl
->mode
];
1700 for (j
= 1; j
< this_nregs
; j
++)
1702 this_cost
+= spill_add_cost
[regno
+ j
];
1703 if ((TEST_HARD_REG_BIT (not_usable
, regno
+ j
))
1704 || TEST_HARD_REG_BIT (used_by_other_reload
, regno
+ j
))
1709 if (rl
->in
&& REG_P (rl
->in
) && REGNO (rl
->in
) == regno
)
1711 if (rl
->out
&& REG_P (rl
->out
) && REGNO (rl
->out
) == regno
)
1713 if (this_cost
< best_cost
1714 /* Among registers with equal cost, prefer caller-saved ones, or
1715 use REG_ALLOC_ORDER if it is defined. */
1716 || (this_cost
== best_cost
1717 #ifdef REG_ALLOC_ORDER
1718 && (inv_reg_alloc_order
[regno
]
1719 < inv_reg_alloc_order
[best_reg
])
1721 && call_used_regs
[regno
]
1722 && ! call_used_regs
[best_reg
]
1727 best_cost
= this_cost
;
1735 fprintf (dump_file
, "Using reg %d for reload %d\n", best_reg
, rnum
);
1737 rl
->nregs
= hard_regno_nregs
[best_reg
][rl
->mode
];
1738 rl
->regno
= best_reg
;
1740 EXECUTE_IF_SET_IN_REG_SET
1741 (&chain
->live_throughout
, FIRST_PSEUDO_REGISTER
, j
, rsi
)
1743 count_spilled_pseudo (best_reg
, rl
->nregs
, j
);
1746 EXECUTE_IF_SET_IN_REG_SET
1747 (&chain
->dead_or_set
, FIRST_PSEUDO_REGISTER
, j
, rsi
)
1749 count_spilled_pseudo (best_reg
, rl
->nregs
, j
);
1752 for (i
= 0; i
< rl
->nregs
; i
++)
1754 gcc_assert (spill_cost
[best_reg
+ i
] == 0);
1755 gcc_assert (spill_add_cost
[best_reg
+ i
] == 0);
1756 SET_HARD_REG_BIT (used_spill_regs_local
, best_reg
+ i
);
1761 /* Find more reload regs to satisfy the remaining need of an insn, which
1763 Do it by ascending class number, since otherwise a reg
1764 might be spilled for a big class and might fail to count
1765 for a smaller class even though it belongs to that class. */
1768 find_reload_regs (struct insn_chain
*chain
)
1772 /* In order to be certain of getting the registers we need,
1773 we must sort the reloads into order of increasing register class.
1774 Then our grabbing of reload registers will parallel the process
1775 that provided the reload registers. */
1776 for (i
= 0; i
< chain
->n_reloads
; i
++)
1778 /* Show whether this reload already has a hard reg. */
1779 if (chain
->rld
[i
].reg_rtx
)
1781 int regno
= REGNO (chain
->rld
[i
].reg_rtx
);
1782 chain
->rld
[i
].regno
= regno
;
1784 = hard_regno_nregs
[regno
][GET_MODE (chain
->rld
[i
].reg_rtx
)];
1787 chain
->rld
[i
].regno
= -1;
1788 reload_order
[i
] = i
;
1791 n_reloads
= chain
->n_reloads
;
1792 memcpy (rld
, chain
->rld
, n_reloads
* sizeof (struct reload
));
1794 CLEAR_HARD_REG_SET (used_spill_regs_local
);
1797 fprintf (dump_file
, "Spilling for insn %d.\n", INSN_UID (chain
->insn
));
1799 qsort (reload_order
, n_reloads
, sizeof (short), reload_reg_class_lower
);
1801 /* Compute the order of preference for hard registers to spill. */
1803 order_regs_for_reload (chain
);
1805 for (i
= 0; i
< n_reloads
; i
++)
1807 int r
= reload_order
[i
];
1809 /* Ignore reloads that got marked inoperative. */
1810 if ((rld
[r
].out
!= 0 || rld
[r
].in
!= 0 || rld
[r
].secondary_p
)
1811 && ! rld
[r
].optional
1812 && rld
[r
].regno
== -1)
1813 if (! find_reg (chain
, i
))
1815 spill_failure (chain
->insn
, rld
[r
].class);
1821 COPY_HARD_REG_SET (chain
->used_spill_regs
, used_spill_regs_local
);
1822 IOR_HARD_REG_SET (used_spill_regs
, used_spill_regs_local
);
1824 memcpy (chain
->rld
, rld
, n_reloads
* sizeof (struct reload
));
1828 select_reload_regs (void)
1830 struct insn_chain
*chain
;
1832 /* Try to satisfy the needs for each insn. */
1833 for (chain
= insns_need_reload
; chain
!= 0;
1834 chain
= chain
->next_need_reload
)
1835 find_reload_regs (chain
);
1838 /* Delete all insns that were inserted by emit_caller_save_insns during
1841 delete_caller_save_insns (void)
1843 struct insn_chain
*c
= reload_insn_chain
;
1847 while (c
!= 0 && c
->is_caller_save_insn
)
1849 struct insn_chain
*next
= c
->next
;
1852 if (c
== reload_insn_chain
)
1853 reload_insn_chain
= next
;
1857 next
->prev
= c
->prev
;
1859 c
->prev
->next
= next
;
1860 c
->next
= unused_insn_chains
;
1861 unused_insn_chains
= c
;
1869 /* Handle the failure to find a register to spill.
1870 INSN should be one of the insns which needed this particular spill reg. */
1873 spill_failure (rtx insn
, enum reg_class
class)
1875 static const char *const reg_class_names
[] = REG_CLASS_NAMES
;
1876 if (asm_noperands (PATTERN (insn
)) >= 0)
1877 error_for_asm (insn
, "can't find a register in class %qs while "
1878 "reloading %<asm%>",
1879 reg_class_names
[class]);
1882 error ("unable to find a register to spill in class %qs",
1883 reg_class_names
[class]);
1884 fatal_insn ("this is the insn:", insn
);
1888 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
1889 data that is dead in INSN. */
1892 delete_dead_insn (rtx insn
)
1894 rtx prev
= prev_real_insn (insn
);
1897 /* If the previous insn sets a register that dies in our insn, delete it
1899 if (prev
&& GET_CODE (PATTERN (prev
)) == SET
1900 && (prev_dest
= SET_DEST (PATTERN (prev
)), REG_P (prev_dest
))
1901 && reg_mentioned_p (prev_dest
, PATTERN (insn
))
1902 && find_regno_note (insn
, REG_DEAD
, REGNO (prev_dest
))
1903 && ! side_effects_p (SET_SRC (PATTERN (prev
))))
1904 delete_dead_insn (prev
);
1906 SET_INSN_DELETED (insn
);
1909 /* Modify the home of pseudo-reg I.
1910 The new home is present in reg_renumber[I].
1912 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
1913 or it may be -1, meaning there is none or it is not relevant.
1914 This is used so that all pseudos spilled from a given hard reg
1915 can share one stack slot. */
1918 alter_reg (int i
, int from_reg
)
1920 /* When outputting an inline function, this can happen
1921 for a reg that isn't actually used. */
1922 if (regno_reg_rtx
[i
] == 0)
1925 /* If the reg got changed to a MEM at rtl-generation time,
1927 if (!REG_P (regno_reg_rtx
[i
]))
1930 /* Modify the reg-rtx to contain the new hard reg
1931 number or else to contain its pseudo reg number. */
1932 REGNO (regno_reg_rtx
[i
])
1933 = reg_renumber
[i
] >= 0 ? reg_renumber
[i
] : i
;
1935 /* If we have a pseudo that is needed but has no hard reg or equivalent,
1936 allocate a stack slot for it. */
1938 if (reg_renumber
[i
] < 0
1939 && REG_N_REFS (i
) > 0
1940 && reg_equiv_constant
[i
] == 0
1941 && reg_equiv_memory_loc
[i
] == 0)
1944 unsigned int inherent_size
= PSEUDO_REGNO_BYTES (i
);
1945 unsigned int total_size
= MAX (inherent_size
, reg_max_ref_width
[i
]);
1948 /* Each pseudo reg has an inherent size which comes from its own mode,
1949 and a total size which provides room for paradoxical subregs
1950 which refer to the pseudo reg in wider modes.
1952 We can use a slot already allocated if it provides both
1953 enough inherent space and enough total space.
1954 Otherwise, we allocate a new slot, making sure that it has no less
1955 inherent space, and no less total space, then the previous slot. */
1958 /* No known place to spill from => no slot to reuse. */
1959 x
= assign_stack_local (GET_MODE (regno_reg_rtx
[i
]), total_size
,
1960 inherent_size
== total_size
? 0 : -1);
1961 if (BYTES_BIG_ENDIAN
)
1962 /* Cancel the big-endian correction done in assign_stack_local.
1963 Get the address of the beginning of the slot.
1964 This is so we can do a big-endian correction unconditionally
1966 adjust
= inherent_size
- total_size
;
1968 /* Nothing can alias this slot except this pseudo. */
1969 set_mem_alias_set (x
, new_alias_set ());
1972 /* Reuse a stack slot if possible. */
1973 else if (spill_stack_slot
[from_reg
] != 0
1974 && spill_stack_slot_width
[from_reg
] >= total_size
1975 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot
[from_reg
]))
1977 x
= spill_stack_slot
[from_reg
];
1979 /* Allocate a bigger slot. */
1982 /* Compute maximum size needed, both for inherent size
1983 and for total size. */
1984 enum machine_mode mode
= GET_MODE (regno_reg_rtx
[i
]);
1987 if (spill_stack_slot
[from_reg
])
1989 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot
[from_reg
]))
1991 mode
= GET_MODE (spill_stack_slot
[from_reg
]);
1992 if (spill_stack_slot_width
[from_reg
] > total_size
)
1993 total_size
= spill_stack_slot_width
[from_reg
];
1996 /* Make a slot with that size. */
1997 x
= assign_stack_local (mode
, total_size
,
1998 inherent_size
== total_size
? 0 : -1);
2001 /* All pseudos mapped to this slot can alias each other. */
2002 if (spill_stack_slot
[from_reg
])
2003 set_mem_alias_set (x
, MEM_ALIAS_SET (spill_stack_slot
[from_reg
]));
2005 set_mem_alias_set (x
, new_alias_set ());
2007 if (BYTES_BIG_ENDIAN
)
2009 /* Cancel the big-endian correction done in assign_stack_local.
2010 Get the address of the beginning of the slot.
2011 This is so we can do a big-endian correction unconditionally
2013 adjust
= GET_MODE_SIZE (mode
) - total_size
;
2016 = adjust_address_nv (x
, mode_for_size (total_size
2022 spill_stack_slot
[from_reg
] = stack_slot
;
2023 spill_stack_slot_width
[from_reg
] = total_size
;
2026 /* On a big endian machine, the "address" of the slot
2027 is the address of the low part that fits its inherent mode. */
2028 if (BYTES_BIG_ENDIAN
&& inherent_size
< total_size
)
2029 adjust
+= (total_size
- inherent_size
);
2031 /* If we have any adjustment to make, or if the stack slot is the
2032 wrong mode, make a new stack slot. */
2033 x
= adjust_address_nv (x
, GET_MODE (regno_reg_rtx
[i
]), adjust
);
2035 /* If we have a decl for the original register, set it for the
2036 memory. If this is a shared MEM, make a copy. */
2037 if (REG_EXPR (regno_reg_rtx
[i
])
2038 && DECL_P (REG_EXPR (regno_reg_rtx
[i
])))
2040 rtx decl
= DECL_RTL_IF_SET (REG_EXPR (regno_reg_rtx
[i
]));
2042 /* We can do this only for the DECLs home pseudo, not for
2043 any copies of it, since otherwise when the stack slot
2044 is reused, nonoverlapping_memrefs_p might think they
2046 if (decl
&& REG_P (decl
) && REGNO (decl
) == (unsigned) i
)
2048 if (from_reg
!= -1 && spill_stack_slot
[from_reg
] == x
)
2051 set_mem_attrs_from_reg (x
, regno_reg_rtx
[i
]);
2055 /* Save the stack slot for later. */
2056 reg_equiv_memory_loc
[i
] = x
;
2060 /* Mark the slots in regs_ever_live for the hard regs
2061 used by pseudo-reg number REGNO. */
2064 mark_home_live (int regno
)
2068 i
= reg_renumber
[regno
];
2071 lim
= i
+ hard_regno_nregs
[i
][PSEUDO_REGNO_MODE (regno
)];
2073 regs_ever_live
[i
++] = 1;
2076 /* This function handles the tracking of elimination offsets around branches.
2078 X is a piece of RTL being scanned.
2080 INSN is the insn that it came from, if any.
2082 INITIAL_P is nonzero if we are to set the offset to be the initial
2083 offset and zero if we are setting the offset of the label to be the
2087 set_label_offsets (rtx x
, rtx insn
, int initial_p
)
2089 enum rtx_code code
= GET_CODE (x
);
2092 struct elim_table
*p
;
2097 if (LABEL_REF_NONLOCAL_P (x
))
2102 /* ... fall through ... */
2105 /* If we know nothing about this label, set the desired offsets. Note
2106 that this sets the offset at a label to be the offset before a label
2107 if we don't know anything about the label. This is not correct for
2108 the label after a BARRIER, but is the best guess we can make. If
2109 we guessed wrong, we will suppress an elimination that might have
2110 been possible had we been able to guess correctly. */
2112 if (! offsets_known_at
[CODE_LABEL_NUMBER (x
) - first_label_num
])
2114 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2115 offsets_at
[CODE_LABEL_NUMBER (x
) - first_label_num
][i
]
2116 = (initial_p
? reg_eliminate
[i
].initial_offset
2117 : reg_eliminate
[i
].offset
);
2118 offsets_known_at
[CODE_LABEL_NUMBER (x
) - first_label_num
] = 1;
2121 /* Otherwise, if this is the definition of a label and it is
2122 preceded by a BARRIER, set our offsets to the known offset of
2126 && (tem
= prev_nonnote_insn (insn
)) != 0
2128 set_offsets_for_label (insn
);
2130 /* If neither of the above cases is true, compare each offset
2131 with those previously recorded and suppress any eliminations
2132 where the offsets disagree. */
2134 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2135 if (offsets_at
[CODE_LABEL_NUMBER (x
) - first_label_num
][i
]
2136 != (initial_p
? reg_eliminate
[i
].initial_offset
2137 : reg_eliminate
[i
].offset
))
2138 reg_eliminate
[i
].can_eliminate
= 0;
2143 set_label_offsets (PATTERN (insn
), insn
, initial_p
);
2145 /* ... fall through ... */
2149 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2150 and hence must have all eliminations at their initial offsets. */
2151 for (tem
= REG_NOTES (x
); tem
; tem
= XEXP (tem
, 1))
2152 if (REG_NOTE_KIND (tem
) == REG_LABEL
)
2153 set_label_offsets (XEXP (tem
, 0), insn
, 1);
2159 /* Each of the labels in the parallel or address vector must be
2160 at their initial offsets. We want the first field for PARALLEL
2161 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2163 for (i
= 0; i
< (unsigned) XVECLEN (x
, code
== ADDR_DIFF_VEC
); i
++)
2164 set_label_offsets (XVECEXP (x
, code
== ADDR_DIFF_VEC
, i
),
2169 /* We only care about setting PC. If the source is not RETURN,
2170 IF_THEN_ELSE, or a label, disable any eliminations not at
2171 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2172 isn't one of those possibilities. For branches to a label,
2173 call ourselves recursively.
2175 Note that this can disable elimination unnecessarily when we have
2176 a non-local goto since it will look like a non-constant jump to
2177 someplace in the current function. This isn't a significant
2178 problem since such jumps will normally be when all elimination
2179 pairs are back to their initial offsets. */
2181 if (SET_DEST (x
) != pc_rtx
)
2184 switch (GET_CODE (SET_SRC (x
)))
2191 set_label_offsets (SET_SRC (x
), insn
, initial_p
);
2195 tem
= XEXP (SET_SRC (x
), 1);
2196 if (GET_CODE (tem
) == LABEL_REF
)
2197 set_label_offsets (XEXP (tem
, 0), insn
, initial_p
);
2198 else if (GET_CODE (tem
) != PC
&& GET_CODE (tem
) != RETURN
)
2201 tem
= XEXP (SET_SRC (x
), 2);
2202 if (GET_CODE (tem
) == LABEL_REF
)
2203 set_label_offsets (XEXP (tem
, 0), insn
, initial_p
);
2204 else if (GET_CODE (tem
) != PC
&& GET_CODE (tem
) != RETURN
)
2212 /* If we reach here, all eliminations must be at their initial
2213 offset because we are doing a jump to a variable address. */
2214 for (p
= reg_eliminate
; p
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; p
++)
2215 if (p
->offset
!= p
->initial_offset
)
2216 p
->can_eliminate
= 0;
2224 /* Scan X and replace any eliminable registers (such as fp) with a
2225 replacement (such as sp), plus an offset.
2227 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2228 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2229 MEM, we are allowed to replace a sum of a register and the constant zero
2230 with the register, which we cannot do outside a MEM. In addition, we need
2231 to record the fact that a register is referenced outside a MEM.
2233 If INSN is an insn, it is the insn containing X. If we replace a REG
2234 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2235 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2236 the REG is being modified.
2238 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2239 That's used when we eliminate in expressions stored in notes.
2240 This means, do not set ref_outside_mem even if the reference
2243 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2244 replacements done assuming all offsets are at their initial values. If
2245 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2246 encounter, return the actual location so that find_reloads will do
2247 the proper thing. */
2250 eliminate_regs (rtx x
, enum machine_mode mem_mode
, rtx insn
)
2252 enum rtx_code code
= GET_CODE (x
);
2253 struct elim_table
*ep
;
2260 if (! current_function_decl
)
2282 /* First handle the case where we encounter a bare register that
2283 is eliminable. Replace it with a PLUS. */
2284 if (regno
< FIRST_PSEUDO_REGISTER
)
2286 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2288 if (ep
->from_rtx
== x
&& ep
->can_eliminate
)
2289 return plus_constant (ep
->to_rtx
, ep
->previous_offset
);
2292 else if (reg_renumber
&& reg_renumber
[regno
] < 0
2293 && reg_equiv_constant
&& reg_equiv_constant
[regno
]
2294 && ! CONSTANT_P (reg_equiv_constant
[regno
]))
2295 return eliminate_regs (copy_rtx (reg_equiv_constant
[regno
]),
2299 /* You might think handling MINUS in a manner similar to PLUS is a
2300 good idea. It is not. It has been tried multiple times and every
2301 time the change has had to have been reverted.
2303 Other parts of reload know a PLUS is special (gen_reload for example)
2304 and require special code to handle code a reloaded PLUS operand.
2306 Also consider backends where the flags register is clobbered by a
2307 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2308 lea instruction comes to mind). If we try to reload a MINUS, we
2309 may kill the flags register that was holding a useful value.
2311 So, please before trying to handle MINUS, consider reload as a
2312 whole instead of this little section as well as the backend issues. */
2314 /* If this is the sum of an eliminable register and a constant, rework
2316 if (REG_P (XEXP (x
, 0))
2317 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2318 && CONSTANT_P (XEXP (x
, 1)))
2320 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2322 if (ep
->from_rtx
== XEXP (x
, 0) && ep
->can_eliminate
)
2324 /* The only time we want to replace a PLUS with a REG (this
2325 occurs when the constant operand of the PLUS is the negative
2326 of the offset) is when we are inside a MEM. We won't want
2327 to do so at other times because that would change the
2328 structure of the insn in a way that reload can't handle.
2329 We special-case the commonest situation in
2330 eliminate_regs_in_insn, so just replace a PLUS with a
2331 PLUS here, unless inside a MEM. */
2332 if (mem_mode
!= 0 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2333 && INTVAL (XEXP (x
, 1)) == - ep
->previous_offset
)
2336 return gen_rtx_PLUS (Pmode
, ep
->to_rtx
,
2337 plus_constant (XEXP (x
, 1),
2338 ep
->previous_offset
));
2341 /* If the register is not eliminable, we are done since the other
2342 operand is a constant. */
2346 /* If this is part of an address, we want to bring any constant to the
2347 outermost PLUS. We will do this by doing register replacement in
2348 our operands and seeing if a constant shows up in one of them.
2350 Note that there is no risk of modifying the structure of the insn,
2351 since we only get called for its operands, thus we are either
2352 modifying the address inside a MEM, or something like an address
2353 operand of a load-address insn. */
2356 rtx new0
= eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2357 rtx new1
= eliminate_regs (XEXP (x
, 1), mem_mode
, insn
);
2359 if (reg_renumber
&& (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1)))
2361 /* If one side is a PLUS and the other side is a pseudo that
2362 didn't get a hard register but has a reg_equiv_constant,
2363 we must replace the constant here since it may no longer
2364 be in the position of any operand. */
2365 if (GET_CODE (new0
) == PLUS
&& REG_P (new1
)
2366 && REGNO (new1
) >= FIRST_PSEUDO_REGISTER
2367 && reg_renumber
[REGNO (new1
)] < 0
2368 && reg_equiv_constant
!= 0
2369 && reg_equiv_constant
[REGNO (new1
)] != 0)
2370 new1
= reg_equiv_constant
[REGNO (new1
)];
2371 else if (GET_CODE (new1
) == PLUS
&& REG_P (new0
)
2372 && REGNO (new0
) >= FIRST_PSEUDO_REGISTER
2373 && reg_renumber
[REGNO (new0
)] < 0
2374 && reg_equiv_constant
[REGNO (new0
)] != 0)
2375 new0
= reg_equiv_constant
[REGNO (new0
)];
2377 new = form_sum (new0
, new1
);
2379 /* As above, if we are not inside a MEM we do not want to
2380 turn a PLUS into something else. We might try to do so here
2381 for an addition of 0 if we aren't optimizing. */
2382 if (! mem_mode
&& GET_CODE (new) != PLUS
)
2383 return gen_rtx_PLUS (GET_MODE (x
), new, const0_rtx
);
2391 /* If this is the product of an eliminable register and a
2392 constant, apply the distribute law and move the constant out
2393 so that we have (plus (mult ..) ..). This is needed in order
2394 to keep load-address insns valid. This case is pathological.
2395 We ignore the possibility of overflow here. */
2396 if (REG_P (XEXP (x
, 0))
2397 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2398 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2399 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2401 if (ep
->from_rtx
== XEXP (x
, 0) && ep
->can_eliminate
)
2404 /* Refs inside notes don't count for this purpose. */
2405 && ! (insn
!= 0 && (GET_CODE (insn
) == EXPR_LIST
2406 || GET_CODE (insn
) == INSN_LIST
)))
2407 ep
->ref_outside_mem
= 1;
2410 plus_constant (gen_rtx_MULT (Pmode
, ep
->to_rtx
, XEXP (x
, 1)),
2411 ep
->previous_offset
* INTVAL (XEXP (x
, 1)));
2414 /* ... fall through ... */
2418 /* See comments before PLUS about handling MINUS. */
2420 case DIV
: case UDIV
:
2421 case MOD
: case UMOD
:
2422 case AND
: case IOR
: case XOR
:
2423 case ROTATERT
: case ROTATE
:
2424 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
2426 case GE
: case GT
: case GEU
: case GTU
:
2427 case LE
: case LT
: case LEU
: case LTU
:
2429 rtx new0
= eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2431 = XEXP (x
, 1) ? eliminate_regs (XEXP (x
, 1), mem_mode
, insn
) : 0;
2433 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
2434 return gen_rtx_fmt_ee (code
, GET_MODE (x
), new0
, new1
);
2439 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2442 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2443 if (new != XEXP (x
, 0))
2445 /* If this is a REG_DEAD note, it is not valid anymore.
2446 Using the eliminated version could result in creating a
2447 REG_DEAD note for the stack or frame pointer. */
2448 if (GET_MODE (x
) == REG_DEAD
)
2450 ? eliminate_regs (XEXP (x
, 1), mem_mode
, insn
)
2453 x
= gen_rtx_EXPR_LIST (REG_NOTE_KIND (x
), new, XEXP (x
, 1));
2457 /* ... fall through ... */
2460 /* Now do eliminations in the rest of the chain. If this was
2461 an EXPR_LIST, this might result in allocating more memory than is
2462 strictly needed, but it simplifies the code. */
2465 new = eliminate_regs (XEXP (x
, 1), mem_mode
, insn
);
2466 if (new != XEXP (x
, 1))
2468 gen_rtx_fmt_ee (GET_CODE (x
), GET_MODE (x
), XEXP (x
, 0), new);
2476 case STRICT_LOW_PART
:
2478 case SIGN_EXTEND
: case ZERO_EXTEND
:
2479 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
2480 case FLOAT
: case FIX
:
2481 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
2489 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2490 if (new != XEXP (x
, 0))
2491 return gen_rtx_fmt_e (code
, GET_MODE (x
), new);
2495 /* Similar to above processing, but preserve SUBREG_BYTE.
2496 Convert (subreg (mem)) to (mem) if not paradoxical.
2497 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2498 pseudo didn't get a hard reg, we must replace this with the
2499 eliminated version of the memory location because push_reload
2500 may do the replacement in certain circumstances. */
2501 if (REG_P (SUBREG_REG (x
))
2502 && (GET_MODE_SIZE (GET_MODE (x
))
2503 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
2504 && reg_equiv_memory_loc
!= 0
2505 && reg_equiv_memory_loc
[REGNO (SUBREG_REG (x
))] != 0)
2507 new = SUBREG_REG (x
);
2510 new = eliminate_regs (SUBREG_REG (x
), mem_mode
, insn
);
2512 if (new != SUBREG_REG (x
))
2514 int x_size
= GET_MODE_SIZE (GET_MODE (x
));
2515 int new_size
= GET_MODE_SIZE (GET_MODE (new));
2518 && ((x_size
< new_size
2519 #ifdef WORD_REGISTER_OPERATIONS
2520 /* On these machines, combine can create rtl of the form
2521 (set (subreg:m1 (reg:m2 R) 0) ...)
2522 where m1 < m2, and expects something interesting to
2523 happen to the entire word. Moreover, it will use the
2524 (reg:m2 R) later, expecting all bits to be preserved.
2525 So if the number of words is the same, preserve the
2526 subreg so that push_reload can see it. */
2527 && ! ((x_size
- 1) / UNITS_PER_WORD
2528 == (new_size
-1 ) / UNITS_PER_WORD
)
2531 || x_size
== new_size
)
2533 return adjust_address_nv (new, GET_MODE (x
), SUBREG_BYTE (x
));
2535 return gen_rtx_SUBREG (GET_MODE (x
), new, SUBREG_BYTE (x
));
2541 /* Our only special processing is to pass the mode of the MEM to our
2542 recursive call and copy the flags. While we are here, handle this
2543 case more efficiently. */
2545 replace_equiv_address_nv (x
,
2546 eliminate_regs (XEXP (x
, 0),
2547 GET_MODE (x
), insn
));
2550 /* Handle insn_list USE that a call to a pure function may generate. */
2551 new = eliminate_regs (XEXP (x
, 0), 0, insn
);
2552 if (new != XEXP (x
, 0))
2553 return gen_rtx_USE (GET_MODE (x
), new);
2565 /* Process each of our operands recursively. If any have changed, make a
2567 fmt
= GET_RTX_FORMAT (code
);
2568 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
2572 new = eliminate_regs (XEXP (x
, i
), mem_mode
, insn
);
2573 if (new != XEXP (x
, i
) && ! copied
)
2575 rtx new_x
= rtx_alloc (code
);
2576 memcpy (new_x
, x
, RTX_SIZE (code
));
2582 else if (*fmt
== 'E')
2585 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2587 new = eliminate_regs (XVECEXP (x
, i
, j
), mem_mode
, insn
);
2588 if (new != XVECEXP (x
, i
, j
) && ! copied_vec
)
2590 rtvec new_v
= gen_rtvec_v (XVECLEN (x
, i
),
2594 rtx new_x
= rtx_alloc (code
);
2595 memcpy (new_x
, x
, RTX_SIZE (code
));
2599 XVEC (x
, i
) = new_v
;
2602 XVECEXP (x
, i
, j
) = new;
2610 /* Scan rtx X for modifications of elimination target registers. Update
2611 the table of eliminables to reflect the changed state. MEM_MODE is
2612 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2615 elimination_effects (rtx x
, enum machine_mode mem_mode
)
2617 enum rtx_code code
= GET_CODE (x
);
2618 struct elim_table
*ep
;
2642 /* First handle the case where we encounter a bare register that
2643 is eliminable. Replace it with a PLUS. */
2644 if (regno
< FIRST_PSEUDO_REGISTER
)
2646 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2648 if (ep
->from_rtx
== x
&& ep
->can_eliminate
)
2651 ep
->ref_outside_mem
= 1;
2656 else if (reg_renumber
[regno
] < 0 && reg_equiv_constant
2657 && reg_equiv_constant
[regno
]
2658 && ! function_invariant_p (reg_equiv_constant
[regno
]))
2659 elimination_effects (reg_equiv_constant
[regno
], mem_mode
);
2668 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
2669 if (ep
->to_rtx
== XEXP (x
, 0))
2671 int size
= GET_MODE_SIZE (mem_mode
);
2673 /* If more bytes than MEM_MODE are pushed, account for them. */
2674 #ifdef PUSH_ROUNDING
2675 if (ep
->to_rtx
== stack_pointer_rtx
)
2676 size
= PUSH_ROUNDING (size
);
2678 if (code
== PRE_DEC
|| code
== POST_DEC
)
2680 else if (code
== PRE_INC
|| code
== POST_INC
)
2682 else if ((code
== PRE_MODIFY
|| code
== POST_MODIFY
)
2683 && GET_CODE (XEXP (x
, 1)) == PLUS
2684 && XEXP (x
, 0) == XEXP (XEXP (x
, 1), 0)
2685 && CONSTANT_P (XEXP (XEXP (x
, 1), 1)))
2686 ep
->offset
-= INTVAL (XEXP (XEXP (x
, 1), 1));
2689 /* These two aren't unary operators. */
2690 if (code
== POST_MODIFY
|| code
== PRE_MODIFY
)
2693 /* Fall through to generic unary operation case. */
2694 case STRICT_LOW_PART
:
2696 case SIGN_EXTEND
: case ZERO_EXTEND
:
2697 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
2698 case FLOAT
: case FIX
:
2699 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
2707 elimination_effects (XEXP (x
, 0), mem_mode
);
2711 if (REG_P (SUBREG_REG (x
))
2712 && (GET_MODE_SIZE (GET_MODE (x
))
2713 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
2714 && reg_equiv_memory_loc
!= 0
2715 && reg_equiv_memory_loc
[REGNO (SUBREG_REG (x
))] != 0)
2718 elimination_effects (SUBREG_REG (x
), mem_mode
);
2722 /* If using a register that is the source of an eliminate we still
2723 think can be performed, note it cannot be performed since we don't
2724 know how this register is used. */
2725 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
2726 if (ep
->from_rtx
== XEXP (x
, 0))
2727 ep
->can_eliminate
= 0;
2729 elimination_effects (XEXP (x
, 0), mem_mode
);
2733 /* If clobbering a register that is the replacement register for an
2734 elimination we still think can be performed, note that it cannot
2735 be performed. Otherwise, we need not be concerned about it. */
2736 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
2737 if (ep
->to_rtx
== XEXP (x
, 0))
2738 ep
->can_eliminate
= 0;
2740 elimination_effects (XEXP (x
, 0), mem_mode
);
2744 /* Check for setting a register that we know about. */
2745 if (REG_P (SET_DEST (x
)))
2747 /* See if this is setting the replacement register for an
2750 If DEST is the hard frame pointer, we do nothing because we
2751 assume that all assignments to the frame pointer are for
2752 non-local gotos and are being done at a time when they are valid
2753 and do not disturb anything else. Some machines want to
2754 eliminate a fake argument pointer (or even a fake frame pointer)
2755 with either the real frame or the stack pointer. Assignments to
2756 the hard frame pointer must not prevent this elimination. */
2758 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2760 if (ep
->to_rtx
== SET_DEST (x
)
2761 && SET_DEST (x
) != hard_frame_pointer_rtx
)
2763 /* If it is being incremented, adjust the offset. Otherwise,
2764 this elimination can't be done. */
2765 rtx src
= SET_SRC (x
);
2767 if (GET_CODE (src
) == PLUS
2768 && XEXP (src
, 0) == SET_DEST (x
)
2769 && GET_CODE (XEXP (src
, 1)) == CONST_INT
)
2770 ep
->offset
-= INTVAL (XEXP (src
, 1));
2772 ep
->can_eliminate
= 0;
2776 elimination_effects (SET_DEST (x
), 0);
2777 elimination_effects (SET_SRC (x
), 0);
2781 /* Our only special processing is to pass the mode of the MEM to our
2783 elimination_effects (XEXP (x
, 0), GET_MODE (x
));
2790 fmt
= GET_RTX_FORMAT (code
);
2791 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
2794 elimination_effects (XEXP (x
, i
), mem_mode
);
2795 else if (*fmt
== 'E')
2796 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2797 elimination_effects (XVECEXP (x
, i
, j
), mem_mode
);
2801 /* Descend through rtx X and verify that no references to eliminable registers
2802 remain. If any do remain, mark the involved register as not
2806 check_eliminable_occurrences (rtx x
)
2815 code
= GET_CODE (x
);
2817 if (code
== REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
2819 struct elim_table
*ep
;
2821 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
2822 if (ep
->from_rtx
== x
)
2823 ep
->can_eliminate
= 0;
2827 fmt
= GET_RTX_FORMAT (code
);
2828 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
2831 check_eliminable_occurrences (XEXP (x
, i
));
2832 else if (*fmt
== 'E')
2835 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2836 check_eliminable_occurrences (XVECEXP (x
, i
, j
));
2841 /* Scan INSN and eliminate all eliminable registers in it.
2843 If REPLACE is nonzero, do the replacement destructively. Also
2844 delete the insn as dead it if it is setting an eliminable register.
2846 If REPLACE is zero, do all our allocations in reload_obstack.
2848 If no eliminations were done and this insn doesn't require any elimination
2849 processing (these are not identical conditions: it might be updating sp,
2850 but not referencing fp; this needs to be seen during reload_as_needed so
2851 that the offset between fp and sp can be taken into consideration), zero
2852 is returned. Otherwise, 1 is returned. */
2855 eliminate_regs_in_insn (rtx insn
, int replace
)
2857 int icode
= recog_memoized (insn
);
2858 rtx old_body
= PATTERN (insn
);
2859 int insn_is_asm
= asm_noperands (old_body
) >= 0;
2860 rtx old_set
= single_set (insn
);
2864 rtx substed_operand
[MAX_RECOG_OPERANDS
];
2865 rtx orig_operand
[MAX_RECOG_OPERANDS
];
2866 struct elim_table
*ep
;
2869 if (! insn_is_asm
&& icode
< 0)
2871 gcc_assert (GET_CODE (PATTERN (insn
)) == USE
2872 || GET_CODE (PATTERN (insn
)) == CLOBBER
2873 || GET_CODE (PATTERN (insn
)) == ADDR_VEC
2874 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
2875 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
);
2879 if (old_set
!= 0 && REG_P (SET_DEST (old_set
))
2880 && REGNO (SET_DEST (old_set
)) < FIRST_PSEUDO_REGISTER
)
2882 /* Check for setting an eliminable register. */
2883 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
2884 if (ep
->from_rtx
== SET_DEST (old_set
) && ep
->can_eliminate
)
2886 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2887 /* If this is setting the frame pointer register to the
2888 hardware frame pointer register and this is an elimination
2889 that will be done (tested above), this insn is really
2890 adjusting the frame pointer downward to compensate for
2891 the adjustment done before a nonlocal goto. */
2892 if (ep
->from
== FRAME_POINTER_REGNUM
2893 && ep
->to
== HARD_FRAME_POINTER_REGNUM
)
2895 rtx base
= SET_SRC (old_set
);
2896 rtx base_insn
= insn
;
2897 HOST_WIDE_INT offset
= 0;
2899 while (base
!= ep
->to_rtx
)
2901 rtx prev_insn
, prev_set
;
2903 if (GET_CODE (base
) == PLUS
2904 && GET_CODE (XEXP (base
, 1)) == CONST_INT
)
2906 offset
+= INTVAL (XEXP (base
, 1));
2907 base
= XEXP (base
, 0);
2909 else if ((prev_insn
= prev_nonnote_insn (base_insn
)) != 0
2910 && (prev_set
= single_set (prev_insn
)) != 0
2911 && rtx_equal_p (SET_DEST (prev_set
), base
))
2913 base
= SET_SRC (prev_set
);
2914 base_insn
= prev_insn
;
2920 if (base
== ep
->to_rtx
)
2923 = plus_constant (ep
->to_rtx
, offset
- ep
->offset
);
2925 new_body
= old_body
;
2928 new_body
= copy_insn (old_body
);
2929 if (REG_NOTES (insn
))
2930 REG_NOTES (insn
) = copy_insn_1 (REG_NOTES (insn
));
2932 PATTERN (insn
) = new_body
;
2933 old_set
= single_set (insn
);
2935 /* First see if this insn remains valid when we
2936 make the change. If not, keep the INSN_CODE
2937 the same and let reload fit it up. */
2938 validate_change (insn
, &SET_SRC (old_set
), src
, 1);
2939 validate_change (insn
, &SET_DEST (old_set
),
2941 if (! apply_change_group ())
2943 SET_SRC (old_set
) = src
;
2944 SET_DEST (old_set
) = ep
->to_rtx
;
2953 /* In this case this insn isn't serving a useful purpose. We
2954 will delete it in reload_as_needed once we know that this
2955 elimination is, in fact, being done.
2957 If REPLACE isn't set, we can't delete this insn, but needn't
2958 process it since it won't be used unless something changes. */
2961 delete_dead_insn (insn
);
2969 /* We allow one special case which happens to work on all machines we
2970 currently support: a single set with the source or a REG_EQUAL
2971 note being a PLUS of an eliminable register and a constant. */
2973 if (old_set
&& REG_P (SET_DEST (old_set
)))
2975 /* First see if the source is of the form (plus (reg) CST). */
2976 if (GET_CODE (SET_SRC (old_set
)) == PLUS
2977 && REG_P (XEXP (SET_SRC (old_set
), 0))
2978 && GET_CODE (XEXP (SET_SRC (old_set
), 1)) == CONST_INT
2979 && REGNO (XEXP (SET_SRC (old_set
), 0)) < FIRST_PSEUDO_REGISTER
)
2980 plus_src
= SET_SRC (old_set
);
2981 else if (REG_P (SET_SRC (old_set
)))
2983 /* Otherwise, see if we have a REG_EQUAL note of the form
2984 (plus (reg) CST). */
2986 for (links
= REG_NOTES (insn
); links
; links
= XEXP (links
, 1))
2988 if (REG_NOTE_KIND (links
) == REG_EQUAL
2989 && GET_CODE (XEXP (links
, 0)) == PLUS
2990 && REG_P (XEXP (XEXP (links
, 0), 0))
2991 && GET_CODE (XEXP (XEXP (links
, 0), 1)) == CONST_INT
2992 && REGNO (XEXP (XEXP (links
, 0), 0)) < FIRST_PSEUDO_REGISTER
)
2994 plus_src
= XEXP (links
, 0);
3002 rtx reg
= XEXP (plus_src
, 0);
3003 HOST_WIDE_INT offset
= INTVAL (XEXP (plus_src
, 1));
3005 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3006 if (ep
->from_rtx
== reg
&& ep
->can_eliminate
)
3008 offset
+= ep
->offset
;
3013 /* We assume here that if we need a PARALLEL with
3014 CLOBBERs for this assignment, we can do with the
3015 MATCH_SCRATCHes that add_clobbers allocates.
3016 There's not much we can do if that doesn't work. */
3017 PATTERN (insn
) = gen_rtx_SET (VOIDmode
,
3021 INSN_CODE (insn
) = recog (PATTERN (insn
), insn
, &num_clobbers
);
3024 rtvec vec
= rtvec_alloc (num_clobbers
+ 1);
3026 vec
->elem
[0] = PATTERN (insn
);
3027 PATTERN (insn
) = gen_rtx_PARALLEL (VOIDmode
, vec
);
3028 add_clobbers (PATTERN (insn
), INSN_CODE (insn
));
3030 gcc_assert (INSN_CODE (insn
) >= 0);
3032 /* If we have a nonzero offset, and the source is already
3033 a simple REG, the following transformation would
3034 increase the cost of the insn by replacing a simple REG
3035 with (plus (reg sp) CST). So try only when plus_src
3036 comes from old_set proper, not REG_NOTES. */
3037 else if (SET_SRC (old_set
) == plus_src
)
3039 new_body
= old_body
;
3042 new_body
= copy_insn (old_body
);
3043 if (REG_NOTES (insn
))
3044 REG_NOTES (insn
) = copy_insn_1 (REG_NOTES (insn
));
3046 PATTERN (insn
) = new_body
;
3047 old_set
= single_set (insn
);
3049 XEXP (SET_SRC (old_set
), 0) = ep
->to_rtx
;
3050 XEXP (SET_SRC (old_set
), 1) = GEN_INT (offset
);
3056 /* This can't have an effect on elimination offsets, so skip right
3062 /* Determine the effects of this insn on elimination offsets. */
3063 elimination_effects (old_body
, 0);
3065 /* Eliminate all eliminable registers occurring in operands that
3066 can be handled by reload. */
3067 extract_insn (insn
);
3068 for (i
= 0; i
< recog_data
.n_operands
; i
++)
3070 orig_operand
[i
] = recog_data
.operand
[i
];
3071 substed_operand
[i
] = recog_data
.operand
[i
];
3073 /* For an asm statement, every operand is eliminable. */
3074 if (insn_is_asm
|| insn_data
[icode
].operand
[i
].eliminable
)
3076 /* Check for setting a register that we know about. */
3077 if (recog_data
.operand_type
[i
] != OP_IN
3078 && REG_P (orig_operand
[i
]))
3080 /* If we are assigning to a register that can be eliminated, it
3081 must be as part of a PARALLEL, since the code above handles
3082 single SETs. We must indicate that we can no longer
3083 eliminate this reg. */
3084 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3086 if (ep
->from_rtx
== orig_operand
[i
])
3087 ep
->can_eliminate
= 0;
3090 substed_operand
[i
] = eliminate_regs (recog_data
.operand
[i
], 0,
3091 replace
? insn
: NULL_RTX
);
3092 if (substed_operand
[i
] != orig_operand
[i
])
3094 /* Terminate the search in check_eliminable_occurrences at
3096 *recog_data
.operand_loc
[i
] = 0;
3098 /* If an output operand changed from a REG to a MEM and INSN is an
3099 insn, write a CLOBBER insn. */
3100 if (recog_data
.operand_type
[i
] != OP_IN
3101 && REG_P (orig_operand
[i
])
3102 && MEM_P (substed_operand
[i
])
3104 emit_insn_after (gen_rtx_CLOBBER (VOIDmode
, orig_operand
[i
]),
3109 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3110 *recog_data
.dup_loc
[i
]
3111 = *recog_data
.operand_loc
[(int) recog_data
.dup_num
[i
]];
3113 /* If any eliminable remain, they aren't eliminable anymore. */
3114 check_eliminable_occurrences (old_body
);
3116 /* Substitute the operands; the new values are in the substed_operand
3118 for (i
= 0; i
< recog_data
.n_operands
; i
++)
3119 *recog_data
.operand_loc
[i
] = substed_operand
[i
];
3120 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3121 *recog_data
.dup_loc
[i
] = substed_operand
[(int) recog_data
.dup_num
[i
]];
3123 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3124 re-recognize the insn. We do this in case we had a simple addition
3125 but now can do this as a load-address. This saves an insn in this
3127 If re-recognition fails, the old insn code number will still be used,
3128 and some register operands may have changed into PLUS expressions.
3129 These will be handled by find_reloads by loading them into a register
3134 /* If we aren't replacing things permanently and we changed something,
3135 make another copy to ensure that all the RTL is new. Otherwise
3136 things can go wrong if find_reload swaps commutative operands
3137 and one is inside RTL that has been copied while the other is not. */
3138 new_body
= old_body
;
3141 new_body
= copy_insn (old_body
);
3142 if (REG_NOTES (insn
))
3143 REG_NOTES (insn
) = copy_insn_1 (REG_NOTES (insn
));
3145 PATTERN (insn
) = new_body
;
3147 /* If we had a move insn but now we don't, rerecognize it. This will
3148 cause spurious re-recognition if the old move had a PARALLEL since
3149 the new one still will, but we can't call single_set without
3150 having put NEW_BODY into the insn and the re-recognition won't
3151 hurt in this rare case. */
3152 /* ??? Why this huge if statement - why don't we just rerecognize the
3156 && ((REG_P (SET_SRC (old_set
))
3157 && (GET_CODE (new_body
) != SET
3158 || !REG_P (SET_SRC (new_body
))))
3159 /* If this was a load from or store to memory, compare
3160 the MEM in recog_data.operand to the one in the insn.
3161 If they are not equal, then rerecognize the insn. */
3163 && ((MEM_P (SET_SRC (old_set
))
3164 && SET_SRC (old_set
) != recog_data
.operand
[1])
3165 || (MEM_P (SET_DEST (old_set
))
3166 && SET_DEST (old_set
) != recog_data
.operand
[0])))
3167 /* If this was an add insn before, rerecognize. */
3168 || GET_CODE (SET_SRC (old_set
)) == PLUS
))
3170 int new_icode
= recog (PATTERN (insn
), insn
, 0);
3172 INSN_CODE (insn
) = icode
;
3176 /* Restore the old body. If there were any changes to it, we made a copy
3177 of it while the changes were still in place, so we'll correctly return
3178 a modified insn below. */
3181 /* Restore the old body. */
3182 for (i
= 0; i
< recog_data
.n_operands
; i
++)
3183 *recog_data
.operand_loc
[i
] = orig_operand
[i
];
3184 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3185 *recog_data
.dup_loc
[i
] = orig_operand
[(int) recog_data
.dup_num
[i
]];
3188 /* Update all elimination pairs to reflect the status after the current
3189 insn. The changes we make were determined by the earlier call to
3190 elimination_effects.
3192 We also detect cases where register elimination cannot be done,
3193 namely, if a register would be both changed and referenced outside a MEM
3194 in the resulting insn since such an insn is often undefined and, even if
3195 not, we cannot know what meaning will be given to it. Note that it is
3196 valid to have a register used in an address in an insn that changes it
3197 (presumably with a pre- or post-increment or decrement).
3199 If anything changes, return nonzero. */
3201 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3203 if (ep
->previous_offset
!= ep
->offset
&& ep
->ref_outside_mem
)
3204 ep
->can_eliminate
= 0;
3206 ep
->ref_outside_mem
= 0;
3208 if (ep
->previous_offset
!= ep
->offset
)
3213 /* If we changed something, perform elimination in REG_NOTES. This is
3214 needed even when REPLACE is zero because a REG_DEAD note might refer
3215 to a register that we eliminate and could cause a different number
3216 of spill registers to be needed in the final reload pass than in
3218 if (val
&& REG_NOTES (insn
) != 0)
3219 REG_NOTES (insn
) = eliminate_regs (REG_NOTES (insn
), 0, REG_NOTES (insn
));
3224 /* Loop through all elimination pairs.
3225 Recalculate the number not at initial offset.
3227 Compute the maximum offset (minimum offset if the stack does not
3228 grow downward) for each elimination pair. */
3231 update_eliminable_offsets (void)
3233 struct elim_table
*ep
;
3235 num_not_at_initial_offset
= 0;
3236 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3238 ep
->previous_offset
= ep
->offset
;
3239 if (ep
->can_eliminate
&& ep
->offset
!= ep
->initial_offset
)
3240 num_not_at_initial_offset
++;
3244 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3245 replacement we currently believe is valid, mark it as not eliminable if X
3246 modifies DEST in any way other than by adding a constant integer to it.
3248 If DEST is the frame pointer, we do nothing because we assume that
3249 all assignments to the hard frame pointer are nonlocal gotos and are being
3250 done at a time when they are valid and do not disturb anything else.
3251 Some machines want to eliminate a fake argument pointer with either the
3252 frame or stack pointer. Assignments to the hard frame pointer must not
3253 prevent this elimination.
3255 Called via note_stores from reload before starting its passes to scan
3256 the insns of the function. */
3259 mark_not_eliminable (rtx dest
, rtx x
, void *data ATTRIBUTE_UNUSED
)
3263 /* A SUBREG of a hard register here is just changing its mode. We should
3264 not see a SUBREG of an eliminable hard register, but check just in
3266 if (GET_CODE (dest
) == SUBREG
)
3267 dest
= SUBREG_REG (dest
);
3269 if (dest
== hard_frame_pointer_rtx
)
3272 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
3273 if (reg_eliminate
[i
].can_eliminate
&& dest
== reg_eliminate
[i
].to_rtx
3274 && (GET_CODE (x
) != SET
3275 || GET_CODE (SET_SRC (x
)) != PLUS
3276 || XEXP (SET_SRC (x
), 0) != dest
3277 || GET_CODE (XEXP (SET_SRC (x
), 1)) != CONST_INT
))
3279 reg_eliminate
[i
].can_eliminate_previous
3280 = reg_eliminate
[i
].can_eliminate
= 0;
3285 /* Verify that the initial elimination offsets did not change since the
3286 last call to set_initial_elim_offsets. This is used to catch cases
3287 where something illegal happened during reload_as_needed that could
3288 cause incorrect code to be generated if we did not check for it. */
3291 verify_initial_elim_offsets (void)
3295 #ifdef ELIMINABLE_REGS
3296 struct elim_table
*ep
;
3298 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3300 INITIAL_ELIMINATION_OFFSET (ep
->from
, ep
->to
, t
);
3301 gcc_assert (t
== ep
->initial_offset
);
3304 INITIAL_FRAME_POINTER_OFFSET (t
);
3305 gcc_assert (t
== reg_eliminate
[0].initial_offset
);
3309 /* Reset all offsets on eliminable registers to their initial values. */
3312 set_initial_elim_offsets (void)
3314 struct elim_table
*ep
= reg_eliminate
;
3316 #ifdef ELIMINABLE_REGS
3317 for (; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3319 INITIAL_ELIMINATION_OFFSET (ep
->from
, ep
->to
, ep
->initial_offset
);
3320 ep
->previous_offset
= ep
->offset
= ep
->initial_offset
;
3323 INITIAL_FRAME_POINTER_OFFSET (ep
->initial_offset
);
3324 ep
->previous_offset
= ep
->offset
= ep
->initial_offset
;
3327 num_not_at_initial_offset
= 0;
3330 /* Initialize the known label offsets.
3331 Set a known offset for each forced label to be at the initial offset
3332 of each elimination. We do this because we assume that all
3333 computed jumps occur from a location where each elimination is
3334 at its initial offset.
3335 For all other labels, show that we don't know the offsets. */
3338 set_initial_label_offsets (void)
3341 memset (offsets_known_at
, 0, num_labels
);
3343 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
3345 set_label_offsets (XEXP (x
, 0), NULL_RTX
, 1);
3348 /* Set all elimination offsets to the known values for the code label given
3352 set_offsets_for_label (rtx insn
)
3355 int label_nr
= CODE_LABEL_NUMBER (insn
);
3356 struct elim_table
*ep
;
3358 num_not_at_initial_offset
= 0;
3359 for (i
= 0, ep
= reg_eliminate
; i
< NUM_ELIMINABLE_REGS
; ep
++, i
++)
3361 ep
->offset
= ep
->previous_offset
3362 = offsets_at
[label_nr
- first_label_num
][i
];
3363 if (ep
->can_eliminate
&& ep
->offset
!= ep
->initial_offset
)
3364 num_not_at_initial_offset
++;
3368 /* See if anything that happened changes which eliminations are valid.
3369 For example, on the SPARC, whether or not the frame pointer can
3370 be eliminated can depend on what registers have been used. We need
3371 not check some conditions again (such as flag_omit_frame_pointer)
3372 since they can't have changed. */
3375 update_eliminables (HARD_REG_SET
*pset
)
3377 int previous_frame_pointer_needed
= frame_pointer_needed
;
3378 struct elim_table
*ep
;
3380 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3381 if ((ep
->from
== HARD_FRAME_POINTER_REGNUM
&& FRAME_POINTER_REQUIRED
)
3382 #ifdef ELIMINABLE_REGS
3383 || ! CAN_ELIMINATE (ep
->from
, ep
->to
)
3386 ep
->can_eliminate
= 0;
3388 /* Look for the case where we have discovered that we can't replace
3389 register A with register B and that means that we will now be
3390 trying to replace register A with register C. This means we can
3391 no longer replace register C with register B and we need to disable
3392 such an elimination, if it exists. This occurs often with A == ap,
3393 B == sp, and C == fp. */
3395 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3397 struct elim_table
*op
;
3400 if (! ep
->can_eliminate
&& ep
->can_eliminate_previous
)
3402 /* Find the current elimination for ep->from, if there is a
3404 for (op
= reg_eliminate
;
3405 op
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; op
++)
3406 if (op
->from
== ep
->from
&& op
->can_eliminate
)
3412 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3414 for (op
= reg_eliminate
;
3415 op
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; op
++)
3416 if (op
->from
== new_to
&& op
->to
== ep
->to
)
3417 op
->can_eliminate
= 0;
3421 /* See if any registers that we thought we could eliminate the previous
3422 time are no longer eliminable. If so, something has changed and we
3423 must spill the register. Also, recompute the number of eliminable
3424 registers and see if the frame pointer is needed; it is if there is
3425 no elimination of the frame pointer that we can perform. */
3427 frame_pointer_needed
= 1;
3428 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3430 if (ep
->can_eliminate
&& ep
->from
== FRAME_POINTER_REGNUM
3431 && ep
->to
!= HARD_FRAME_POINTER_REGNUM
)
3432 frame_pointer_needed
= 0;
3434 if (! ep
->can_eliminate
&& ep
->can_eliminate_previous
)
3436 ep
->can_eliminate_previous
= 0;
3437 SET_HARD_REG_BIT (*pset
, ep
->from
);
3442 /* If we didn't need a frame pointer last time, but we do now, spill
3443 the hard frame pointer. */
3444 if (frame_pointer_needed
&& ! previous_frame_pointer_needed
)
3445 SET_HARD_REG_BIT (*pset
, HARD_FRAME_POINTER_REGNUM
);
3448 /* Initialize the table of registers to eliminate. */
3451 init_elim_table (void)
3453 struct elim_table
*ep
;
3454 #ifdef ELIMINABLE_REGS
3455 const struct elim_table_1
*ep1
;
3459 reg_eliminate
= xcalloc (sizeof (struct elim_table
), NUM_ELIMINABLE_REGS
);
3461 /* Does this function require a frame pointer? */
3463 frame_pointer_needed
= (! flag_omit_frame_pointer
3464 /* ?? If EXIT_IGNORE_STACK is set, we will not save
3465 and restore sp for alloca. So we can't eliminate
3466 the frame pointer in that case. At some point,
3467 we should improve this by emitting the
3468 sp-adjusting insns for this case. */
3469 || (current_function_calls_alloca
3470 && EXIT_IGNORE_STACK
)
3471 || FRAME_POINTER_REQUIRED
);
3475 #ifdef ELIMINABLE_REGS
3476 for (ep
= reg_eliminate
, ep1
= reg_eliminate_1
;
3477 ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++, ep1
++)
3479 ep
->from
= ep1
->from
;
3481 ep
->can_eliminate
= ep
->can_eliminate_previous
3482 = (CAN_ELIMINATE (ep
->from
, ep
->to
)
3483 && ! (ep
->to
== STACK_POINTER_REGNUM
&& frame_pointer_needed
));
3486 reg_eliminate
[0].from
= reg_eliminate_1
[0].from
;
3487 reg_eliminate
[0].to
= reg_eliminate_1
[0].to
;
3488 reg_eliminate
[0].can_eliminate
= reg_eliminate
[0].can_eliminate_previous
3489 = ! frame_pointer_needed
;
3492 /* Count the number of eliminable registers and build the FROM and TO
3493 REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
3494 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3495 We depend on this. */
3496 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3498 num_eliminable
+= ep
->can_eliminate
;
3499 ep
->from_rtx
= gen_rtx_REG (Pmode
, ep
->from
);
3500 ep
->to_rtx
= gen_rtx_REG (Pmode
, ep
->to
);
3504 /* Kick all pseudos out of hard register REGNO.
3506 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3507 because we found we can't eliminate some register. In the case, no pseudos
3508 are allowed to be in the register, even if they are only in a block that
3509 doesn't require spill registers, unlike the case when we are spilling this
3510 hard reg to produce another spill register.
3512 Return nonzero if any pseudos needed to be kicked out. */
3515 spill_hard_reg (unsigned int regno
, int cant_eliminate
)
3521 SET_HARD_REG_BIT (bad_spill_regs_global
, regno
);
3522 regs_ever_live
[regno
] = 1;
3525 /* Spill every pseudo reg that was allocated to this reg
3526 or to something that overlaps this reg. */
3528 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
3529 if (reg_renumber
[i
] >= 0
3530 && (unsigned int) reg_renumber
[i
] <= regno
3531 && ((unsigned int) reg_renumber
[i
]
3532 + hard_regno_nregs
[(unsigned int) reg_renumber
[i
]]
3533 [PSEUDO_REGNO_MODE (i
)]
3535 SET_REGNO_REG_SET (&spilled_pseudos
, i
);
3538 /* After find_reload_regs has been run for all insn that need reloads,
3539 and/or spill_hard_regs was called, this function is used to actually
3540 spill pseudo registers and try to reallocate them. It also sets up the
3541 spill_regs array for use by choose_reload_regs. */
3544 finish_spills (int global
)
3546 struct insn_chain
*chain
;
3547 int something_changed
= 0;
3549 reg_set_iterator rsi
;
3551 /* Build the spill_regs array for the function. */
3552 /* If there are some registers still to eliminate and one of the spill regs
3553 wasn't ever used before, additional stack space may have to be
3554 allocated to store this register. Thus, we may have changed the offset
3555 between the stack and frame pointers, so mark that something has changed.
3557 One might think that we need only set VAL to 1 if this is a call-used
3558 register. However, the set of registers that must be saved by the
3559 prologue is not identical to the call-used set. For example, the
3560 register used by the call insn for the return PC is a call-used register,
3561 but must be saved by the prologue. */
3564 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3565 if (TEST_HARD_REG_BIT (used_spill_regs
, i
))
3567 spill_reg_order
[i
] = n_spills
;
3568 spill_regs
[n_spills
++] = i
;
3569 if (num_eliminable
&& ! regs_ever_live
[i
])
3570 something_changed
= 1;
3571 regs_ever_live
[i
] = 1;
3574 spill_reg_order
[i
] = -1;
3576 EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos
, FIRST_PSEUDO_REGISTER
, i
, rsi
)
3578 /* Record the current hard register the pseudo is allocated to in
3579 pseudo_previous_regs so we avoid reallocating it to the same
3580 hard reg in a later pass. */
3581 gcc_assert (reg_renumber
[i
] >= 0);
3583 SET_HARD_REG_BIT (pseudo_previous_regs
[i
], reg_renumber
[i
]);
3584 /* Mark it as no longer having a hard register home. */
3585 reg_renumber
[i
] = -1;
3586 /* We will need to scan everything again. */
3587 something_changed
= 1;
3590 /* Retry global register allocation if possible. */
3593 memset (pseudo_forbidden_regs
, 0, max_regno
* sizeof (HARD_REG_SET
));
3594 /* For every insn that needs reloads, set the registers used as spill
3595 regs in pseudo_forbidden_regs for every pseudo live across the
3597 for (chain
= insns_need_reload
; chain
; chain
= chain
->next_need_reload
)
3599 EXECUTE_IF_SET_IN_REG_SET
3600 (&chain
->live_throughout
, FIRST_PSEUDO_REGISTER
, i
, rsi
)
3602 IOR_HARD_REG_SET (pseudo_forbidden_regs
[i
],
3603 chain
->used_spill_regs
);
3605 EXECUTE_IF_SET_IN_REG_SET
3606 (&chain
->dead_or_set
, FIRST_PSEUDO_REGISTER
, i
, rsi
)
3608 IOR_HARD_REG_SET (pseudo_forbidden_regs
[i
],
3609 chain
->used_spill_regs
);
3613 /* Retry allocating the spilled pseudos. For each reg, merge the
3614 various reg sets that indicate which hard regs can't be used,
3615 and call retry_global_alloc.
3616 We change spill_pseudos here to only contain pseudos that did not
3617 get a new hard register. */
3618 for (i
= FIRST_PSEUDO_REGISTER
; i
< (unsigned)max_regno
; i
++)
3619 if (reg_old_renumber
[i
] != reg_renumber
[i
])
3621 HARD_REG_SET forbidden
;
3622 COPY_HARD_REG_SET (forbidden
, bad_spill_regs_global
);
3623 IOR_HARD_REG_SET (forbidden
, pseudo_forbidden_regs
[i
]);
3624 IOR_HARD_REG_SET (forbidden
, pseudo_previous_regs
[i
]);
3625 retry_global_alloc (i
, forbidden
);
3626 if (reg_renumber
[i
] >= 0)
3627 CLEAR_REGNO_REG_SET (&spilled_pseudos
, i
);
3631 /* Fix up the register information in the insn chain.
3632 This involves deleting those of the spilled pseudos which did not get
3633 a new hard register home from the live_{before,after} sets. */
3634 for (chain
= reload_insn_chain
; chain
; chain
= chain
->next
)
3636 HARD_REG_SET used_by_pseudos
;
3637 HARD_REG_SET used_by_pseudos2
;
3639 AND_COMPL_REG_SET (&chain
->live_throughout
, &spilled_pseudos
);
3640 AND_COMPL_REG_SET (&chain
->dead_or_set
, &spilled_pseudos
);
3642 /* Mark any unallocated hard regs as available for spills. That
3643 makes inheritance work somewhat better. */
3644 if (chain
->need_reload
)
3646 REG_SET_TO_HARD_REG_SET (used_by_pseudos
, &chain
->live_throughout
);
3647 REG_SET_TO_HARD_REG_SET (used_by_pseudos2
, &chain
->dead_or_set
);
3648 IOR_HARD_REG_SET (used_by_pseudos
, used_by_pseudos2
);
3650 /* Save the old value for the sanity test below. */
3651 COPY_HARD_REG_SET (used_by_pseudos2
, chain
->used_spill_regs
);
3653 compute_use_by_pseudos (&used_by_pseudos
, &chain
->live_throughout
);
3654 compute_use_by_pseudos (&used_by_pseudos
, &chain
->dead_or_set
);
3655 COMPL_HARD_REG_SET (chain
->used_spill_regs
, used_by_pseudos
);
3656 AND_HARD_REG_SET (chain
->used_spill_regs
, used_spill_regs
);
3658 /* Make sure we only enlarge the set. */
3659 GO_IF_HARD_REG_SUBSET (used_by_pseudos2
, chain
->used_spill_regs
, ok
);
3665 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
3666 for (i
= FIRST_PSEUDO_REGISTER
; i
< (unsigned)max_regno
; i
++)
3668 int regno
= reg_renumber
[i
];
3669 if (reg_old_renumber
[i
] == regno
)
3672 alter_reg (i
, reg_old_renumber
[i
]);
3673 reg_old_renumber
[i
] = regno
;
3677 fprintf (dump_file
, " Register %d now on stack.\n\n", i
);
3679 fprintf (dump_file
, " Register %d now in %d.\n\n",
3680 i
, reg_renumber
[i
]);
3684 return something_changed
;
3687 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
3690 scan_paradoxical_subregs (rtx x
)
3694 enum rtx_code code
= GET_CODE (x
);
3704 case CONST_VECTOR
: /* shouldn't happen, but just in case. */
3712 if (REG_P (SUBREG_REG (x
))
3713 && GET_MODE_SIZE (GET_MODE (x
)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
3714 reg_max_ref_width
[REGNO (SUBREG_REG (x
))]
3715 = GET_MODE_SIZE (GET_MODE (x
));
3722 fmt
= GET_RTX_FORMAT (code
);
3723 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3726 scan_paradoxical_subregs (XEXP (x
, i
));
3727 else if (fmt
[i
] == 'E')
3730 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3731 scan_paradoxical_subregs (XVECEXP (x
, i
, j
));
3736 /* Reload pseudo-registers into hard regs around each insn as needed.
3737 Additional register load insns are output before the insn that needs it
3738 and perhaps store insns after insns that modify the reloaded pseudo reg.
3740 reg_last_reload_reg and reg_reloaded_contents keep track of
3741 which registers are already available in reload registers.
3742 We update these for the reloads that we perform,
3743 as the insns are scanned. */
3746 reload_as_needed (int live_known
)
3748 struct insn_chain
*chain
;
3749 #if defined (AUTO_INC_DEC)
3754 memset (spill_reg_rtx
, 0, sizeof spill_reg_rtx
);
3755 memset (spill_reg_store
, 0, sizeof spill_reg_store
);
3756 reg_last_reload_reg
= xcalloc (max_regno
, sizeof (rtx
));
3757 reg_has_output_reload
= xmalloc (max_regno
);
3758 CLEAR_HARD_REG_SET (reg_reloaded_valid
);
3759 CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered
);
3761 set_initial_elim_offsets ();
3763 for (chain
= reload_insn_chain
; chain
; chain
= chain
->next
)
3766 rtx insn
= chain
->insn
;
3767 rtx old_next
= NEXT_INSN (insn
);
3769 /* If we pass a label, copy the offsets from the label information
3770 into the current offsets of each elimination. */
3772 set_offsets_for_label (insn
);
3774 else if (INSN_P (insn
))
3776 rtx oldpat
= copy_rtx (PATTERN (insn
));
3778 /* If this is a USE and CLOBBER of a MEM, ensure that any
3779 references to eliminable registers have been removed. */
3781 if ((GET_CODE (PATTERN (insn
)) == USE
3782 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
3783 && MEM_P (XEXP (PATTERN (insn
), 0)))
3784 XEXP (XEXP (PATTERN (insn
), 0), 0)
3785 = eliminate_regs (XEXP (XEXP (PATTERN (insn
), 0), 0),
3786 GET_MODE (XEXP (PATTERN (insn
), 0)),
3789 /* If we need to do register elimination processing, do so.
3790 This might delete the insn, in which case we are done. */
3791 if ((num_eliminable
|| num_eliminable_invariants
) && chain
->need_elim
)
3793 eliminate_regs_in_insn (insn
, 1);
3796 update_eliminable_offsets ();
3801 /* If need_elim is nonzero but need_reload is zero, one might think
3802 that we could simply set n_reloads to 0. However, find_reloads
3803 could have done some manipulation of the insn (such as swapping
3804 commutative operands), and these manipulations are lost during
3805 the first pass for every insn that needs register elimination.
3806 So the actions of find_reloads must be redone here. */
3808 if (! chain
->need_elim
&& ! chain
->need_reload
3809 && ! chain
->need_operand_change
)
3811 /* First find the pseudo regs that must be reloaded for this insn.
3812 This info is returned in the tables reload_... (see reload.h).
3813 Also modify the body of INSN by substituting RELOAD
3814 rtx's for those pseudo regs. */
3817 memset (reg_has_output_reload
, 0, max_regno
);
3818 CLEAR_HARD_REG_SET (reg_is_output_reload
);
3820 find_reloads (insn
, 1, spill_indirect_levels
, live_known
,
3826 rtx next
= NEXT_INSN (insn
);
3829 prev
= PREV_INSN (insn
);
3831 /* Now compute which reload regs to reload them into. Perhaps
3832 reusing reload regs from previous insns, or else output
3833 load insns to reload them. Maybe output store insns too.
3834 Record the choices of reload reg in reload_reg_rtx. */
3835 choose_reload_regs (chain
);
3837 /* Merge any reloads that we didn't combine for fear of
3838 increasing the number of spill registers needed but now
3839 discover can be safely merged. */
3840 if (SMALL_REGISTER_CLASSES
)
3841 merge_assigned_reloads (insn
);
3843 /* Generate the insns to reload operands into or out of
3844 their reload regs. */
3845 emit_reload_insns (chain
);
3847 /* Substitute the chosen reload regs from reload_reg_rtx
3848 into the insn's body (or perhaps into the bodies of other
3849 load and store insn that we just made for reloading
3850 and that we moved the structure into). */
3851 subst_reloads (insn
);
3853 /* If this was an ASM, make sure that all the reload insns
3854 we have generated are valid. If not, give an error
3857 if (asm_noperands (PATTERN (insn
)) >= 0)
3858 for (p
= NEXT_INSN (prev
); p
!= next
; p
= NEXT_INSN (p
))
3859 if (p
!= insn
&& INSN_P (p
)
3860 && GET_CODE (PATTERN (p
)) != USE
3861 && (recog_memoized (p
) < 0
3862 || (extract_insn (p
), ! constrain_operands (1))))
3864 error_for_asm (insn
,
3865 "%<asm%> operand requires "
3866 "impossible reload");
3871 if (num_eliminable
&& chain
->need_elim
)
3872 update_eliminable_offsets ();
3874 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3875 is no longer validly lying around to save a future reload.
3876 Note that this does not detect pseudos that were reloaded
3877 for this insn in order to be stored in
3878 (obeying register constraints). That is correct; such reload
3879 registers ARE still valid. */
3880 note_stores (oldpat
, forget_old_reloads_1
, NULL
);
3882 /* There may have been CLOBBER insns placed after INSN. So scan
3883 between INSN and NEXT and use them to forget old reloads. */
3884 for (x
= NEXT_INSN (insn
); x
!= old_next
; x
= NEXT_INSN (x
))
3885 if (NONJUMP_INSN_P (x
) && GET_CODE (PATTERN (x
)) == CLOBBER
)
3886 note_stores (PATTERN (x
), forget_old_reloads_1
, NULL
);
3889 /* Likewise for regs altered by auto-increment in this insn.
3890 REG_INC notes have been changed by reloading:
3891 find_reloads_address_1 records substitutions for them,
3892 which have been performed by subst_reloads above. */
3893 for (i
= n_reloads
- 1; i
>= 0; i
--)
3895 rtx in_reg
= rld
[i
].in_reg
;
3898 enum rtx_code code
= GET_CODE (in_reg
);
3899 /* PRE_INC / PRE_DEC will have the reload register ending up
3900 with the same value as the stack slot, but that doesn't
3901 hold true for POST_INC / POST_DEC. Either we have to
3902 convert the memory access to a true POST_INC / POST_DEC,
3903 or we can't use the reload register for inheritance. */
3904 if ((code
== POST_INC
|| code
== POST_DEC
)
3905 && TEST_HARD_REG_BIT (reg_reloaded_valid
,
3906 REGNO (rld
[i
].reg_rtx
))
3907 /* Make sure it is the inc/dec pseudo, and not
3908 some other (e.g. output operand) pseudo. */
3909 && ((unsigned) reg_reloaded_contents
[REGNO (rld
[i
].reg_rtx
)]
3910 == REGNO (XEXP (in_reg
, 0))))
3913 rtx reload_reg
= rld
[i
].reg_rtx
;
3914 enum machine_mode mode
= GET_MODE (reload_reg
);
3918 for (p
= PREV_INSN (old_next
); p
!= prev
; p
= PREV_INSN (p
))
3920 /* We really want to ignore REG_INC notes here, so
3921 use PATTERN (p) as argument to reg_set_p . */
3922 if (reg_set_p (reload_reg
, PATTERN (p
)))
3924 n
= count_occurrences (PATTERN (p
), reload_reg
, 0);
3929 n
= validate_replace_rtx (reload_reg
,
3930 gen_rtx_fmt_e (code
,
3935 /* We must also verify that the constraints
3936 are met after the replacement. */
3939 n
= constrain_operands (1);
3943 /* If the constraints were not met, then
3944 undo the replacement. */
3947 validate_replace_rtx (gen_rtx_fmt_e (code
,
3960 = gen_rtx_EXPR_LIST (REG_INC
, reload_reg
,
3962 /* Mark this as having an output reload so that the
3963 REG_INC processing code below won't invalidate
3964 the reload for inheritance. */
3965 SET_HARD_REG_BIT (reg_is_output_reload
,
3966 REGNO (reload_reg
));
3967 reg_has_output_reload
[REGNO (XEXP (in_reg
, 0))] = 1;
3970 forget_old_reloads_1 (XEXP (in_reg
, 0), NULL_RTX
,
3973 else if ((code
== PRE_INC
|| code
== PRE_DEC
)
3974 && TEST_HARD_REG_BIT (reg_reloaded_valid
,
3975 REGNO (rld
[i
].reg_rtx
))
3976 /* Make sure it is the inc/dec pseudo, and not
3977 some other (e.g. output operand) pseudo. */
3978 && ((unsigned) reg_reloaded_contents
[REGNO (rld
[i
].reg_rtx
)]
3979 == REGNO (XEXP (in_reg
, 0))))
3981 SET_HARD_REG_BIT (reg_is_output_reload
,
3982 REGNO (rld
[i
].reg_rtx
));
3983 reg_has_output_reload
[REGNO (XEXP (in_reg
, 0))] = 1;
3987 /* If a pseudo that got a hard register is auto-incremented,
3988 we must purge records of copying it into pseudos without
3990 for (x
= REG_NOTES (insn
); x
; x
= XEXP (x
, 1))
3991 if (REG_NOTE_KIND (x
) == REG_INC
)
3993 /* See if this pseudo reg was reloaded in this insn.
3994 If so, its last-reload info is still valid
3995 because it is based on this insn's reload. */
3996 for (i
= 0; i
< n_reloads
; i
++)
3997 if (rld
[i
].out
== XEXP (x
, 0))
4001 forget_old_reloads_1 (XEXP (x
, 0), NULL_RTX
, NULL
);
4005 /* A reload reg's contents are unknown after a label. */
4007 CLEAR_HARD_REG_SET (reg_reloaded_valid
);
4009 /* Don't assume a reload reg is still good after a call insn
4010 if it is a call-used reg, or if it contains a value that will
4011 be partially clobbered by the call. */
4012 else if (CALL_P (insn
))
4014 AND_COMPL_HARD_REG_SET (reg_reloaded_valid
, call_used_reg_set
);
4015 AND_COMPL_HARD_REG_SET (reg_reloaded_valid
, reg_reloaded_call_part_clobbered
);
4020 free (reg_last_reload_reg
);
4021 free (reg_has_output_reload
);
4024 /* Discard all record of any value reloaded from X,
4025 or reloaded in X from someplace else;
4026 unless X is an output reload reg of the current insn.
4028 X may be a hard reg (the reload reg)
4029 or it may be a pseudo reg that was reloaded from. */
4032 forget_old_reloads_1 (rtx x
, rtx ignored ATTRIBUTE_UNUSED
,
4033 void *data ATTRIBUTE_UNUSED
)
4038 /* note_stores does give us subregs of hard regs,
4039 subreg_regno_offset will abort if it is not a hard reg. */
4040 while (GET_CODE (x
) == SUBREG
)
4042 /* We ignore the subreg offset when calculating the regno,
4043 because we are using the entire underlying hard register
4053 if (regno
>= FIRST_PSEUDO_REGISTER
)
4059 nr
= hard_regno_nregs
[regno
][GET_MODE (x
)];
4060 /* Storing into a spilled-reg invalidates its contents.
4061 This can happen if a block-local pseudo is allocated to that reg
4062 and it wasn't spilled because this block's total need is 0.
4063 Then some insn might have an optional reload and use this reg. */
4064 for (i
= 0; i
< nr
; i
++)
4065 /* But don't do this if the reg actually serves as an output
4066 reload reg in the current instruction. */
4068 || ! TEST_HARD_REG_BIT (reg_is_output_reload
, regno
+ i
))
4070 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, regno
+ i
);
4071 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered
, regno
+ i
);
4072 spill_reg_store
[regno
+ i
] = 0;
4076 /* Since value of X has changed,
4077 forget any value previously copied from it. */
4080 /* But don't forget a copy if this is the output reload
4081 that establishes the copy's validity. */
4082 if (n_reloads
== 0 || reg_has_output_reload
[regno
+ nr
] == 0)
4083 reg_last_reload_reg
[regno
+ nr
] = 0;
4086 /* The following HARD_REG_SETs indicate when each hard register is
4087 used for a reload of various parts of the current insn. */
4089 /* If reg is unavailable for all reloads. */
4090 static HARD_REG_SET reload_reg_unavailable
;
4091 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4092 static HARD_REG_SET reload_reg_used
;
4093 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4094 static HARD_REG_SET reload_reg_used_in_input_addr
[MAX_RECOG_OPERANDS
];
4095 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4096 static HARD_REG_SET reload_reg_used_in_inpaddr_addr
[MAX_RECOG_OPERANDS
];
4097 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4098 static HARD_REG_SET reload_reg_used_in_output_addr
[MAX_RECOG_OPERANDS
];
4099 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4100 static HARD_REG_SET reload_reg_used_in_outaddr_addr
[MAX_RECOG_OPERANDS
];
4101 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4102 static HARD_REG_SET reload_reg_used_in_input
[MAX_RECOG_OPERANDS
];
4103 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4104 static HARD_REG_SET reload_reg_used_in_output
[MAX_RECOG_OPERANDS
];
4105 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4106 static HARD_REG_SET reload_reg_used_in_op_addr
;
4107 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4108 static HARD_REG_SET reload_reg_used_in_op_addr_reload
;
4109 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4110 static HARD_REG_SET reload_reg_used_in_insn
;
4111 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4112 static HARD_REG_SET reload_reg_used_in_other_addr
;
4114 /* If reg is in use as a reload reg for any sort of reload. */
4115 static HARD_REG_SET reload_reg_used_at_all
;
4117 /* If reg is use as an inherited reload. We just mark the first register
4119 static HARD_REG_SET reload_reg_used_for_inherit
;
4121 /* Records which hard regs are used in any way, either as explicit use or
4122 by being allocated to a pseudo during any point of the current insn. */
4123 static HARD_REG_SET reg_used_in_insn
;
4125 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4126 TYPE. MODE is used to indicate how many consecutive regs are
4130 mark_reload_reg_in_use (unsigned int regno
, int opnum
, enum reload_type type
,
4131 enum machine_mode mode
)
4133 unsigned int nregs
= hard_regno_nregs
[regno
][mode
];
4136 for (i
= regno
; i
< nregs
+ regno
; i
++)
4141 SET_HARD_REG_BIT (reload_reg_used
, i
);
4144 case RELOAD_FOR_INPUT_ADDRESS
:
4145 SET_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], i
);
4148 case RELOAD_FOR_INPADDR_ADDRESS
:
4149 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], i
);
4152 case RELOAD_FOR_OUTPUT_ADDRESS
:
4153 SET_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], i
);
4156 case RELOAD_FOR_OUTADDR_ADDRESS
:
4157 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[opnum
], i
);
4160 case RELOAD_FOR_OPERAND_ADDRESS
:
4161 SET_HARD_REG_BIT (reload_reg_used_in_op_addr
, i
);
4164 case RELOAD_FOR_OPADDR_ADDR
:
4165 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, i
);
4168 case RELOAD_FOR_OTHER_ADDRESS
:
4169 SET_HARD_REG_BIT (reload_reg_used_in_other_addr
, i
);
4172 case RELOAD_FOR_INPUT
:
4173 SET_HARD_REG_BIT (reload_reg_used_in_input
[opnum
], i
);
4176 case RELOAD_FOR_OUTPUT
:
4177 SET_HARD_REG_BIT (reload_reg_used_in_output
[opnum
], i
);
4180 case RELOAD_FOR_INSN
:
4181 SET_HARD_REG_BIT (reload_reg_used_in_insn
, i
);
4185 SET_HARD_REG_BIT (reload_reg_used_at_all
, i
);
4189 /* Similarly, but show REGNO is no longer in use for a reload. */
4192 clear_reload_reg_in_use (unsigned int regno
, int opnum
,
4193 enum reload_type type
, enum machine_mode mode
)
4195 unsigned int nregs
= hard_regno_nregs
[regno
][mode
];
4196 unsigned int start_regno
, end_regno
, r
;
4198 /* A complication is that for some reload types, inheritance might
4199 allow multiple reloads of the same types to share a reload register.
4200 We set check_opnum if we have to check only reloads with the same
4201 operand number, and check_any if we have to check all reloads. */
4202 int check_opnum
= 0;
4204 HARD_REG_SET
*used_in_set
;
4209 used_in_set
= &reload_reg_used
;
4212 case RELOAD_FOR_INPUT_ADDRESS
:
4213 used_in_set
= &reload_reg_used_in_input_addr
[opnum
];
4216 case RELOAD_FOR_INPADDR_ADDRESS
:
4218 used_in_set
= &reload_reg_used_in_inpaddr_addr
[opnum
];
4221 case RELOAD_FOR_OUTPUT_ADDRESS
:
4222 used_in_set
= &reload_reg_used_in_output_addr
[opnum
];
4225 case RELOAD_FOR_OUTADDR_ADDRESS
:
4227 used_in_set
= &reload_reg_used_in_outaddr_addr
[opnum
];
4230 case RELOAD_FOR_OPERAND_ADDRESS
:
4231 used_in_set
= &reload_reg_used_in_op_addr
;
4234 case RELOAD_FOR_OPADDR_ADDR
:
4236 used_in_set
= &reload_reg_used_in_op_addr_reload
;
4239 case RELOAD_FOR_OTHER_ADDRESS
:
4240 used_in_set
= &reload_reg_used_in_other_addr
;
4244 case RELOAD_FOR_INPUT
:
4245 used_in_set
= &reload_reg_used_in_input
[opnum
];
4248 case RELOAD_FOR_OUTPUT
:
4249 used_in_set
= &reload_reg_used_in_output
[opnum
];
4252 case RELOAD_FOR_INSN
:
4253 used_in_set
= &reload_reg_used_in_insn
;
4258 /* We resolve conflicts with remaining reloads of the same type by
4259 excluding the intervals of reload registers by them from the
4260 interval of freed reload registers. Since we only keep track of
4261 one set of interval bounds, we might have to exclude somewhat
4262 more than what would be necessary if we used a HARD_REG_SET here.
4263 But this should only happen very infrequently, so there should
4264 be no reason to worry about it. */
4266 start_regno
= regno
;
4267 end_regno
= regno
+ nregs
;
4268 if (check_opnum
|| check_any
)
4270 for (i
= n_reloads
- 1; i
>= 0; i
--)
4272 if (rld
[i
].when_needed
== type
4273 && (check_any
|| rld
[i
].opnum
== opnum
)
4276 unsigned int conflict_start
= true_regnum (rld
[i
].reg_rtx
);
4277 unsigned int conflict_end
4279 + hard_regno_nregs
[conflict_start
][rld
[i
].mode
]);
4281 /* If there is an overlap with the first to-be-freed register,
4282 adjust the interval start. */
4283 if (conflict_start
<= start_regno
&& conflict_end
> start_regno
)
4284 start_regno
= conflict_end
;
4285 /* Otherwise, if there is a conflict with one of the other
4286 to-be-freed registers, adjust the interval end. */
4287 if (conflict_start
> start_regno
&& conflict_start
< end_regno
)
4288 end_regno
= conflict_start
;
4293 for (r
= start_regno
; r
< end_regno
; r
++)
4294 CLEAR_HARD_REG_BIT (*used_in_set
, r
);
4297 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4298 specified by OPNUM and TYPE. */
4301 reload_reg_free_p (unsigned int regno
, int opnum
, enum reload_type type
)
4305 /* In use for a RELOAD_OTHER means it's not available for anything. */
4306 if (TEST_HARD_REG_BIT (reload_reg_used
, regno
)
4307 || TEST_HARD_REG_BIT (reload_reg_unavailable
, regno
))
4313 /* In use for anything means we can't use it for RELOAD_OTHER. */
4314 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
)
4315 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4316 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
)
4317 || TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
4320 for (i
= 0; i
< reload_n_operands
; i
++)
4321 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4322 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
4323 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4324 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
4325 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
4326 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4331 case RELOAD_FOR_INPUT
:
4332 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4333 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
))
4336 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
))
4339 /* If it is used for some other input, can't use it. */
4340 for (i
= 0; i
< reload_n_operands
; i
++)
4341 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4344 /* If it is used in a later operand's address, can't use it. */
4345 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4346 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4347 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
))
4352 case RELOAD_FOR_INPUT_ADDRESS
:
4353 /* Can't use a register if it is used for an input address for this
4354 operand or used as an input in an earlier one. */
4355 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], regno
)
4356 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], regno
))
4359 for (i
= 0; i
< opnum
; i
++)
4360 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4365 case RELOAD_FOR_INPADDR_ADDRESS
:
4366 /* Can't use a register if it is used for an input address
4367 for this operand or used as an input in an earlier
4369 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], regno
))
4372 for (i
= 0; i
< opnum
; i
++)
4373 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4378 case RELOAD_FOR_OUTPUT_ADDRESS
:
4379 /* Can't use a register if it is used for an output address for this
4380 operand or used as an output in this or a later operand. Note
4381 that multiple output operands are emitted in reverse order, so
4382 the conflicting ones are those with lower indices. */
4383 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], regno
))
4386 for (i
= 0; i
<= opnum
; i
++)
4387 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4392 case RELOAD_FOR_OUTADDR_ADDRESS
:
4393 /* Can't use a register if it is used for an output address
4394 for this operand or used as an output in this or a
4395 later operand. Note that multiple output operands are
4396 emitted in reverse order, so the conflicting ones are
4397 those with lower indices. */
4398 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[opnum
], regno
))
4401 for (i
= 0; i
<= opnum
; i
++)
4402 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4407 case RELOAD_FOR_OPERAND_ADDRESS
:
4408 for (i
= 0; i
< reload_n_operands
; i
++)
4409 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4412 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4413 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
4415 case RELOAD_FOR_OPADDR_ADDR
:
4416 for (i
= 0; i
< reload_n_operands
; i
++)
4417 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4420 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
));
4422 case RELOAD_FOR_OUTPUT
:
4423 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4424 outputs, or an operand address for this or an earlier output.
4425 Note that multiple output operands are emitted in reverse order,
4426 so the conflicting ones are those with higher indices. */
4427 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
4430 for (i
= 0; i
< reload_n_operands
; i
++)
4431 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4434 for (i
= opnum
; i
< reload_n_operands
; i
++)
4435 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4436 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
))
4441 case RELOAD_FOR_INSN
:
4442 for (i
= 0; i
< reload_n_operands
; i
++)
4443 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
4444 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4447 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4448 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
4450 case RELOAD_FOR_OTHER_ADDRESS
:
4451 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4458 /* Return 1 if the value in reload reg REGNO, as used by a reload
4459 needed for the part of the insn specified by OPNUM and TYPE,
4460 is still available in REGNO at the end of the insn.
4462 We can assume that the reload reg was already tested for availability
4463 at the time it is needed, and we should not check this again,
4464 in case the reg has already been marked in use. */
4467 reload_reg_reaches_end_p (unsigned int regno
, int opnum
, enum reload_type type
)
4474 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4475 its value must reach the end. */
4478 /* If this use is for part of the insn,
4479 its value reaches if no subsequent part uses the same register.
4480 Just like the above function, don't try to do this with lots
4483 case RELOAD_FOR_OTHER_ADDRESS
:
4484 /* Here we check for everything else, since these don't conflict
4485 with anything else and everything comes later. */
4487 for (i
= 0; i
< reload_n_operands
; i
++)
4488 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4489 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
4490 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
)
4491 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4492 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
4493 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4496 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4497 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
)
4498 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4499 && ! TEST_HARD_REG_BIT (reload_reg_used
, regno
));
4501 case RELOAD_FOR_INPUT_ADDRESS
:
4502 case RELOAD_FOR_INPADDR_ADDRESS
:
4503 /* Similar, except that we check only for this and subsequent inputs
4504 and the address of only subsequent inputs and we do not need
4505 to check for RELOAD_OTHER objects since they are known not to
4508 for (i
= opnum
; i
< reload_n_operands
; i
++)
4509 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4512 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4513 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4514 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
))
4517 for (i
= 0; i
< reload_n_operands
; i
++)
4518 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4519 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
4520 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4523 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
))
4526 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4527 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4528 && !TEST_HARD_REG_BIT (reload_reg_used
, regno
));
4530 case RELOAD_FOR_INPUT
:
4531 /* Similar to input address, except we start at the next operand for
4532 both input and input address and we do not check for
4533 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4536 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4537 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4538 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
4539 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4542 /* ... fall through ... */
4544 case RELOAD_FOR_OPERAND_ADDRESS
:
4545 /* Check outputs and their addresses. */
4547 for (i
= 0; i
< reload_n_operands
; i
++)
4548 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4549 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
4550 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4553 return (!TEST_HARD_REG_BIT (reload_reg_used
, regno
));
4555 case RELOAD_FOR_OPADDR_ADDR
:
4556 for (i
= 0; i
< reload_n_operands
; i
++)
4557 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4558 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
4559 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4562 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4563 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4564 && !TEST_HARD_REG_BIT (reload_reg_used
, regno
));
4566 case RELOAD_FOR_INSN
:
4567 /* These conflict with other outputs with RELOAD_OTHER. So
4568 we need only check for output addresses. */
4570 opnum
= reload_n_operands
;
4572 /* ... fall through ... */
4574 case RELOAD_FOR_OUTPUT
:
4575 case RELOAD_FOR_OUTPUT_ADDRESS
:
4576 case RELOAD_FOR_OUTADDR_ADDRESS
:
4577 /* We already know these can't conflict with a later output. So the
4578 only thing to check are later output addresses.
4579 Note that multiple output operands are emitted in reverse order,
4580 so the conflicting ones are those with lower indices. */
4581 for (i
= 0; i
< opnum
; i
++)
4582 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4583 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
))
4593 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4596 This function uses the same algorithm as reload_reg_free_p above. */
4599 reloads_conflict (int r1
, int r2
)
4601 enum reload_type r1_type
= rld
[r1
].when_needed
;
4602 enum reload_type r2_type
= rld
[r2
].when_needed
;
4603 int r1_opnum
= rld
[r1
].opnum
;
4604 int r2_opnum
= rld
[r2
].opnum
;
4606 /* RELOAD_OTHER conflicts with everything. */
4607 if (r2_type
== RELOAD_OTHER
)
4610 /* Otherwise, check conflicts differently for each type. */
4614 case RELOAD_FOR_INPUT
:
4615 return (r2_type
== RELOAD_FOR_INSN
4616 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
4617 || r2_type
== RELOAD_FOR_OPADDR_ADDR
4618 || r2_type
== RELOAD_FOR_INPUT
4619 || ((r2_type
== RELOAD_FOR_INPUT_ADDRESS
4620 || r2_type
== RELOAD_FOR_INPADDR_ADDRESS
)
4621 && r2_opnum
> r1_opnum
));
4623 case RELOAD_FOR_INPUT_ADDRESS
:
4624 return ((r2_type
== RELOAD_FOR_INPUT_ADDRESS
&& r1_opnum
== r2_opnum
)
4625 || (r2_type
== RELOAD_FOR_INPUT
&& r2_opnum
< r1_opnum
));
4627 case RELOAD_FOR_INPADDR_ADDRESS
:
4628 return ((r2_type
== RELOAD_FOR_INPADDR_ADDRESS
&& r1_opnum
== r2_opnum
)
4629 || (r2_type
== RELOAD_FOR_INPUT
&& r2_opnum
< r1_opnum
));
4631 case RELOAD_FOR_OUTPUT_ADDRESS
:
4632 return ((r2_type
== RELOAD_FOR_OUTPUT_ADDRESS
&& r2_opnum
== r1_opnum
)
4633 || (r2_type
== RELOAD_FOR_OUTPUT
&& r2_opnum
<= r1_opnum
));
4635 case RELOAD_FOR_OUTADDR_ADDRESS
:
4636 return ((r2_type
== RELOAD_FOR_OUTADDR_ADDRESS
&& r2_opnum
== r1_opnum
)
4637 || (r2_type
== RELOAD_FOR_OUTPUT
&& r2_opnum
<= r1_opnum
));
4639 case RELOAD_FOR_OPERAND_ADDRESS
:
4640 return (r2_type
== RELOAD_FOR_INPUT
|| r2_type
== RELOAD_FOR_INSN
4641 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
);
4643 case RELOAD_FOR_OPADDR_ADDR
:
4644 return (r2_type
== RELOAD_FOR_INPUT
4645 || r2_type
== RELOAD_FOR_OPADDR_ADDR
);
4647 case RELOAD_FOR_OUTPUT
:
4648 return (r2_type
== RELOAD_FOR_INSN
|| r2_type
== RELOAD_FOR_OUTPUT
4649 || ((r2_type
== RELOAD_FOR_OUTPUT_ADDRESS
4650 || r2_type
== RELOAD_FOR_OUTADDR_ADDRESS
)
4651 && r2_opnum
>= r1_opnum
));
4653 case RELOAD_FOR_INSN
:
4654 return (r2_type
== RELOAD_FOR_INPUT
|| r2_type
== RELOAD_FOR_OUTPUT
4655 || r2_type
== RELOAD_FOR_INSN
4656 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
);
4658 case RELOAD_FOR_OTHER_ADDRESS
:
4659 return r2_type
== RELOAD_FOR_OTHER_ADDRESS
;
4669 /* Indexed by reload number, 1 if incoming value
4670 inherited from previous insns. */
4671 char reload_inherited
[MAX_RELOADS
];
4673 /* For an inherited reload, this is the insn the reload was inherited from,
4674 if we know it. Otherwise, this is 0. */
4675 rtx reload_inheritance_insn
[MAX_RELOADS
];
4677 /* If nonzero, this is a place to get the value of the reload,
4678 rather than using reload_in. */
4679 rtx reload_override_in
[MAX_RELOADS
];
4681 /* For each reload, the hard register number of the register used,
4682 or -1 if we did not need a register for this reload. */
4683 int reload_spill_index
[MAX_RELOADS
];
4685 /* Subroutine of free_for_value_p, used to check a single register.
4686 START_REGNO is the starting regno of the full reload register
4687 (possibly comprising multiple hard registers) that we are considering. */
4690 reload_reg_free_for_value_p (int start_regno
, int regno
, int opnum
,
4691 enum reload_type type
, rtx value
, rtx out
,
4692 int reloadnum
, int ignore_address_reloads
)
4695 /* Set if we see an input reload that must not share its reload register
4696 with any new earlyclobber, but might otherwise share the reload
4697 register with an output or input-output reload. */
4698 int check_earlyclobber
= 0;
4702 if (TEST_HARD_REG_BIT (reload_reg_unavailable
, regno
))
4705 if (out
== const0_rtx
)
4711 /* We use some pseudo 'time' value to check if the lifetimes of the
4712 new register use would overlap with the one of a previous reload
4713 that is not read-only or uses a different value.
4714 The 'time' used doesn't have to be linear in any shape or form, just
4716 Some reload types use different 'buckets' for each operand.
4717 So there are MAX_RECOG_OPERANDS different time values for each
4719 We compute TIME1 as the time when the register for the prospective
4720 new reload ceases to be live, and TIME2 for each existing
4721 reload as the time when that the reload register of that reload
4723 Where there is little to be gained by exact lifetime calculations,
4724 we just make conservative assumptions, i.e. a longer lifetime;
4725 this is done in the 'default:' cases. */
4728 case RELOAD_FOR_OTHER_ADDRESS
:
4729 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
4730 time1
= copy
? 0 : 1;
4733 time1
= copy
? 1 : MAX_RECOG_OPERANDS
* 5 + 5;
4735 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
4736 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
4737 respectively, to the time values for these, we get distinct time
4738 values. To get distinct time values for each operand, we have to
4739 multiply opnum by at least three. We round that up to four because
4740 multiply by four is often cheaper. */
4741 case RELOAD_FOR_INPADDR_ADDRESS
:
4742 time1
= opnum
* 4 + 2;
4744 case RELOAD_FOR_INPUT_ADDRESS
:
4745 time1
= opnum
* 4 + 3;
4747 case RELOAD_FOR_INPUT
:
4748 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
4749 executes (inclusive). */
4750 time1
= copy
? opnum
* 4 + 4 : MAX_RECOG_OPERANDS
* 4 + 3;
4752 case RELOAD_FOR_OPADDR_ADDR
:
4754 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
4755 time1
= MAX_RECOG_OPERANDS
* 4 + 1;
4757 case RELOAD_FOR_OPERAND_ADDRESS
:
4758 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
4760 time1
= copy
? MAX_RECOG_OPERANDS
* 4 + 2 : MAX_RECOG_OPERANDS
* 4 + 3;
4762 case RELOAD_FOR_OUTADDR_ADDRESS
:
4763 time1
= MAX_RECOG_OPERANDS
* 4 + 4 + opnum
;
4765 case RELOAD_FOR_OUTPUT_ADDRESS
:
4766 time1
= MAX_RECOG_OPERANDS
* 4 + 5 + opnum
;
4769 time1
= MAX_RECOG_OPERANDS
* 5 + 5;
4772 for (i
= 0; i
< n_reloads
; i
++)
4774 rtx reg
= rld
[i
].reg_rtx
;
4775 if (reg
&& REG_P (reg
)
4776 && ((unsigned) regno
- true_regnum (reg
)
4777 <= hard_regno_nregs
[REGNO (reg
)][GET_MODE (reg
)] - (unsigned) 1)
4780 rtx other_input
= rld
[i
].in
;
4782 /* If the other reload loads the same input value, that
4783 will not cause a conflict only if it's loading it into
4784 the same register. */
4785 if (true_regnum (reg
) != start_regno
)
4786 other_input
= NULL_RTX
;
4787 if (! other_input
|| ! rtx_equal_p (other_input
, value
)
4788 || rld
[i
].out
|| out
)
4791 switch (rld
[i
].when_needed
)
4793 case RELOAD_FOR_OTHER_ADDRESS
:
4796 case RELOAD_FOR_INPADDR_ADDRESS
:
4797 /* find_reloads makes sure that a
4798 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
4799 by at most one - the first -
4800 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
4801 address reload is inherited, the address address reload
4802 goes away, so we can ignore this conflict. */
4803 if (type
== RELOAD_FOR_INPUT_ADDRESS
&& reloadnum
== i
+ 1
4804 && ignore_address_reloads
4805 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
4806 Then the address address is still needed to store
4807 back the new address. */
4808 && ! rld
[reloadnum
].out
)
4810 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
4811 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
4813 if (type
== RELOAD_FOR_INPUT
&& opnum
== rld
[i
].opnum
4814 && ignore_address_reloads
4815 /* Unless we are reloading an auto_inc expression. */
4816 && ! rld
[reloadnum
].out
)
4818 time2
= rld
[i
].opnum
* 4 + 2;
4820 case RELOAD_FOR_INPUT_ADDRESS
:
4821 if (type
== RELOAD_FOR_INPUT
&& opnum
== rld
[i
].opnum
4822 && ignore_address_reloads
4823 && ! rld
[reloadnum
].out
)
4825 time2
= rld
[i
].opnum
* 4 + 3;
4827 case RELOAD_FOR_INPUT
:
4828 time2
= rld
[i
].opnum
* 4 + 4;
4829 check_earlyclobber
= 1;
4831 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
4832 == MAX_RECOG_OPERAND * 4 */
4833 case RELOAD_FOR_OPADDR_ADDR
:
4834 if (type
== RELOAD_FOR_OPERAND_ADDRESS
&& reloadnum
== i
+ 1
4835 && ignore_address_reloads
4836 && ! rld
[reloadnum
].out
)
4838 time2
= MAX_RECOG_OPERANDS
* 4 + 1;
4840 case RELOAD_FOR_OPERAND_ADDRESS
:
4841 time2
= MAX_RECOG_OPERANDS
* 4 + 2;
4842 check_earlyclobber
= 1;
4844 case RELOAD_FOR_INSN
:
4845 time2
= MAX_RECOG_OPERANDS
* 4 + 3;
4847 case RELOAD_FOR_OUTPUT
:
4848 /* All RELOAD_FOR_OUTPUT reloads become live just after the
4849 instruction is executed. */
4850 time2
= MAX_RECOG_OPERANDS
* 4 + 4;
4852 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
4853 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
4855 case RELOAD_FOR_OUTADDR_ADDRESS
:
4856 if (type
== RELOAD_FOR_OUTPUT_ADDRESS
&& reloadnum
== i
+ 1
4857 && ignore_address_reloads
4858 && ! rld
[reloadnum
].out
)
4860 time2
= MAX_RECOG_OPERANDS
* 4 + 4 + rld
[i
].opnum
;
4862 case RELOAD_FOR_OUTPUT_ADDRESS
:
4863 time2
= MAX_RECOG_OPERANDS
* 4 + 5 + rld
[i
].opnum
;
4866 /* If there is no conflict in the input part, handle this
4867 like an output reload. */
4868 if (! rld
[i
].in
|| rtx_equal_p (other_input
, value
))
4870 time2
= MAX_RECOG_OPERANDS
* 4 + 4;
4871 /* Earlyclobbered outputs must conflict with inputs. */
4872 if (earlyclobber_operand_p (rld
[i
].out
))
4873 time2
= MAX_RECOG_OPERANDS
* 4 + 3;
4878 /* RELOAD_OTHER might be live beyond instruction execution,
4879 but this is not obvious when we set time2 = 1. So check
4880 here if there might be a problem with the new reload
4881 clobbering the register used by the RELOAD_OTHER. */
4889 && (! rld
[i
].in
|| rld
[i
].out
4890 || ! rtx_equal_p (other_input
, value
)))
4891 || (out
&& rld
[reloadnum
].out_reg
4892 && time2
>= MAX_RECOG_OPERANDS
* 4 + 3))
4898 /* Earlyclobbered outputs must conflict with inputs. */
4899 if (check_earlyclobber
&& out
&& earlyclobber_operand_p (out
))
4905 /* Return 1 if the value in reload reg REGNO, as used by a reload
4906 needed for the part of the insn specified by OPNUM and TYPE,
4907 may be used to load VALUE into it.
4909 MODE is the mode in which the register is used, this is needed to
4910 determine how many hard regs to test.
4912 Other read-only reloads with the same value do not conflict
4913 unless OUT is nonzero and these other reloads have to live while
4914 output reloads live.
4915 If OUT is CONST0_RTX, this is a special case: it means that the
4916 test should not be for using register REGNO as reload register, but
4917 for copying from register REGNO into the reload register.
4919 RELOADNUM is the number of the reload we want to load this value for;
4920 a reload does not conflict with itself.
4922 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
4923 reloads that load an address for the very reload we are considering.
4925 The caller has to make sure that there is no conflict with the return
4929 free_for_value_p (int regno
, enum machine_mode mode
, int opnum
,
4930 enum reload_type type
, rtx value
, rtx out
, int reloadnum
,
4931 int ignore_address_reloads
)
4933 int nregs
= hard_regno_nregs
[regno
][mode
];
4935 if (! reload_reg_free_for_value_p (regno
, regno
+ nregs
, opnum
, type
,
4936 value
, out
, reloadnum
,
4937 ignore_address_reloads
))
4942 /* Return nonzero if the rtx X is invariant over the current function. */
4943 /* ??? Actually, the places where we use this expect exactly what
4944 * is tested here, and not everything that is function invariant. In
4945 * particular, the frame pointer and arg pointer are special cased;
4946 * pic_offset_table_rtx is not, and this will cause aborts when we
4947 * go to spill these things to memory. */
4950 function_invariant_p (rtx x
)
4954 if (x
== frame_pointer_rtx
|| x
== arg_pointer_rtx
)
4956 if (GET_CODE (x
) == PLUS
4957 && (XEXP (x
, 0) == frame_pointer_rtx
|| XEXP (x
, 0) == arg_pointer_rtx
)
4958 && CONSTANT_P (XEXP (x
, 1)))
4963 /* Determine whether the reload reg X overlaps any rtx'es used for
4964 overriding inheritance. Return nonzero if so. */
4967 conflicts_with_override (rtx x
)
4970 for (i
= 0; i
< n_reloads
; i
++)
4971 if (reload_override_in
[i
]
4972 && reg_overlap_mentioned_p (x
, reload_override_in
[i
]))
4977 /* Give an error message saying we failed to find a reload for INSN,
4978 and clear out reload R. */
4980 failed_reload (rtx insn
, int r
)
4982 if (asm_noperands (PATTERN (insn
)) < 0)
4983 /* It's the compiler's fault. */
4984 fatal_insn ("could not find a spill register", insn
);
4986 /* It's the user's fault; the operand's mode and constraint
4987 don't match. Disable this reload so we don't crash in final. */
4988 error_for_asm (insn
,
4989 "%<asm%> operand constraint incompatible with operand size");
4993 rld
[r
].optional
= 1;
4994 rld
[r
].secondary_p
= 1;
4997 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
4998 for reload R. If it's valid, get an rtx for it. Return nonzero if
5001 set_reload_reg (int i
, int r
)
5004 rtx reg
= spill_reg_rtx
[i
];
5006 if (reg
== 0 || GET_MODE (reg
) != rld
[r
].mode
)
5007 spill_reg_rtx
[i
] = reg
5008 = gen_rtx_REG (rld
[r
].mode
, spill_regs
[i
]);
5010 regno
= true_regnum (reg
);
5012 /* Detect when the reload reg can't hold the reload mode.
5013 This used to be one `if', but Sequent compiler can't handle that. */
5014 if (HARD_REGNO_MODE_OK (regno
, rld
[r
].mode
))
5016 enum machine_mode test_mode
= VOIDmode
;
5018 test_mode
= GET_MODE (rld
[r
].in
);
5019 /* If rld[r].in has VOIDmode, it means we will load it
5020 in whatever mode the reload reg has: to wit, rld[r].mode.
5021 We have already tested that for validity. */
5022 /* Aside from that, we need to test that the expressions
5023 to reload from or into have modes which are valid for this
5024 reload register. Otherwise the reload insns would be invalid. */
5025 if (! (rld
[r
].in
!= 0 && test_mode
!= VOIDmode
5026 && ! HARD_REGNO_MODE_OK (regno
, test_mode
)))
5027 if (! (rld
[r
].out
!= 0
5028 && ! HARD_REGNO_MODE_OK (regno
, GET_MODE (rld
[r
].out
))))
5030 /* The reg is OK. */
5033 /* Mark as in use for this insn the reload regs we use
5035 mark_reload_reg_in_use (spill_regs
[i
], rld
[r
].opnum
,
5036 rld
[r
].when_needed
, rld
[r
].mode
);
5038 rld
[r
].reg_rtx
= reg
;
5039 reload_spill_index
[r
] = spill_regs
[i
];
5046 /* Find a spill register to use as a reload register for reload R.
5047 LAST_RELOAD is nonzero if this is the last reload for the insn being
5050 Set rld[R].reg_rtx to the register allocated.
5052 We return 1 if successful, or 0 if we couldn't find a spill reg and
5053 we didn't change anything. */
5056 allocate_reload_reg (struct insn_chain
*chain ATTRIBUTE_UNUSED
, int r
,
5061 /* If we put this reload ahead, thinking it is a group,
5062 then insist on finding a group. Otherwise we can grab a
5063 reg that some other reload needs.
5064 (That can happen when we have a 68000 DATA_OR_FP_REG
5065 which is a group of data regs or one fp reg.)
5066 We need not be so restrictive if there are no more reloads
5069 ??? Really it would be nicer to have smarter handling
5070 for that kind of reg class, where a problem like this is normal.
5071 Perhaps those classes should be avoided for reloading
5072 by use of more alternatives. */
5074 int force_group
= rld
[r
].nregs
> 1 && ! last_reload
;
5076 /* If we want a single register and haven't yet found one,
5077 take any reg in the right class and not in use.
5078 If we want a consecutive group, here is where we look for it.
5080 We use two passes so we can first look for reload regs to
5081 reuse, which are already in use for other reloads in this insn,
5082 and only then use additional registers.
5083 I think that maximizing reuse is needed to make sure we don't
5084 run out of reload regs. Suppose we have three reloads, and
5085 reloads A and B can share regs. These need two regs.
5086 Suppose A and B are given different regs.
5087 That leaves none for C. */
5088 for (pass
= 0; pass
< 2; pass
++)
5090 /* I is the index in spill_regs.
5091 We advance it round-robin between insns to use all spill regs
5092 equally, so that inherited reloads have a chance
5093 of leapfrogging each other. */
5097 for (count
= 0; count
< n_spills
; count
++)
5099 int class = (int) rld
[r
].class;
5105 regnum
= spill_regs
[i
];
5107 if ((reload_reg_free_p (regnum
, rld
[r
].opnum
,
5110 /* We check reload_reg_used to make sure we
5111 don't clobber the return register. */
5112 && ! TEST_HARD_REG_BIT (reload_reg_used
, regnum
)
5113 && free_for_value_p (regnum
, rld
[r
].mode
, rld
[r
].opnum
,
5114 rld
[r
].when_needed
, rld
[r
].in
,
5116 && TEST_HARD_REG_BIT (reg_class_contents
[class], regnum
)
5117 && HARD_REGNO_MODE_OK (regnum
, rld
[r
].mode
)
5118 /* Look first for regs to share, then for unshared. But
5119 don't share regs used for inherited reloads; they are
5120 the ones we want to preserve. */
5122 || (TEST_HARD_REG_BIT (reload_reg_used_at_all
,
5124 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit
,
5127 int nr
= hard_regno_nregs
[regnum
][rld
[r
].mode
];
5128 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5129 (on 68000) got us two FP regs. If NR is 1,
5130 we would reject both of them. */
5133 /* If we need only one reg, we have already won. */
5136 /* But reject a single reg if we demand a group. */
5141 /* Otherwise check that as many consecutive regs as we need
5142 are available here. */
5145 int regno
= regnum
+ nr
- 1;
5146 if (!(TEST_HARD_REG_BIT (reg_class_contents
[class], regno
)
5147 && spill_reg_order
[regno
] >= 0
5148 && reload_reg_free_p (regno
, rld
[r
].opnum
,
5149 rld
[r
].when_needed
)))
5158 /* If we found something on pass 1, omit pass 2. */
5159 if (count
< n_spills
)
5163 /* We should have found a spill register by now. */
5164 if (count
>= n_spills
)
5167 /* I is the index in SPILL_REG_RTX of the reload register we are to
5168 allocate. Get an rtx for it and find its register number. */
5170 return set_reload_reg (i
, r
);
5173 /* Initialize all the tables needed to allocate reload registers.
5174 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
5175 is the array we use to restore the reg_rtx field for every reload. */
5178 choose_reload_regs_init (struct insn_chain
*chain
, rtx
*save_reload_reg_rtx
)
5182 for (i
= 0; i
< n_reloads
; i
++)
5183 rld
[i
].reg_rtx
= save_reload_reg_rtx
[i
];
5185 memset (reload_inherited
, 0, MAX_RELOADS
);
5186 memset (reload_inheritance_insn
, 0, MAX_RELOADS
* sizeof (rtx
));
5187 memset (reload_override_in
, 0, MAX_RELOADS
* sizeof (rtx
));
5189 CLEAR_HARD_REG_SET (reload_reg_used
);
5190 CLEAR_HARD_REG_SET (reload_reg_used_at_all
);
5191 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr
);
5192 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload
);
5193 CLEAR_HARD_REG_SET (reload_reg_used_in_insn
);
5194 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr
);
5196 CLEAR_HARD_REG_SET (reg_used_in_insn
);
5199 REG_SET_TO_HARD_REG_SET (tmp
, &chain
->live_throughout
);
5200 IOR_HARD_REG_SET (reg_used_in_insn
, tmp
);
5201 REG_SET_TO_HARD_REG_SET (tmp
, &chain
->dead_or_set
);
5202 IOR_HARD_REG_SET (reg_used_in_insn
, tmp
);
5203 compute_use_by_pseudos (®_used_in_insn
, &chain
->live_throughout
);
5204 compute_use_by_pseudos (®_used_in_insn
, &chain
->dead_or_set
);
5207 for (i
= 0; i
< reload_n_operands
; i
++)
5209 CLEAR_HARD_REG_SET (reload_reg_used_in_output
[i
]);
5210 CLEAR_HARD_REG_SET (reload_reg_used_in_input
[i
]);
5211 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr
[i
]);
5212 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr
[i
]);
5213 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr
[i
]);
5214 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr
[i
]);
5217 COMPL_HARD_REG_SET (reload_reg_unavailable
, chain
->used_spill_regs
);
5219 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit
);
5221 for (i
= 0; i
< n_reloads
; i
++)
5222 /* If we have already decided to use a certain register,
5223 don't use it in another way. */
5225 mark_reload_reg_in_use (REGNO (rld
[i
].reg_rtx
), rld
[i
].opnum
,
5226 rld
[i
].when_needed
, rld
[i
].mode
);
5229 /* Assign hard reg targets for the pseudo-registers we must reload
5230 into hard regs for this insn.
5231 Also output the instructions to copy them in and out of the hard regs.
5233 For machines with register classes, we are responsible for
5234 finding a reload reg in the proper class. */
5237 choose_reload_regs (struct insn_chain
*chain
)
5239 rtx insn
= chain
->insn
;
5241 unsigned int max_group_size
= 1;
5242 enum reg_class group_class
= NO_REGS
;
5243 int pass
, win
, inheritance
;
5245 rtx save_reload_reg_rtx
[MAX_RELOADS
];
5247 /* In order to be certain of getting the registers we need,
5248 we must sort the reloads into order of increasing register class.
5249 Then our grabbing of reload registers will parallel the process
5250 that provided the reload registers.
5252 Also note whether any of the reloads wants a consecutive group of regs.
5253 If so, record the maximum size of the group desired and what
5254 register class contains all the groups needed by this insn. */
5256 for (j
= 0; j
< n_reloads
; j
++)
5258 reload_order
[j
] = j
;
5259 reload_spill_index
[j
] = -1;
5261 if (rld
[j
].nregs
> 1)
5263 max_group_size
= MAX (rld
[j
].nregs
, max_group_size
);
5265 = reg_class_superunion
[(int) rld
[j
].class][(int) group_class
];
5268 save_reload_reg_rtx
[j
] = rld
[j
].reg_rtx
;
5272 qsort (reload_order
, n_reloads
, sizeof (short), reload_reg_class_lower
);
5274 /* If -O, try first with inheritance, then turning it off.
5275 If not -O, don't do inheritance.
5276 Using inheritance when not optimizing leads to paradoxes
5277 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5278 because one side of the comparison might be inherited. */
5280 for (inheritance
= optimize
> 0; inheritance
>= 0; inheritance
--)
5282 choose_reload_regs_init (chain
, save_reload_reg_rtx
);
5284 /* Process the reloads in order of preference just found.
5285 Beyond this point, subregs can be found in reload_reg_rtx.
5287 This used to look for an existing reloaded home for all of the
5288 reloads, and only then perform any new reloads. But that could lose
5289 if the reloads were done out of reg-class order because a later
5290 reload with a looser constraint might have an old home in a register
5291 needed by an earlier reload with a tighter constraint.
5293 To solve this, we make two passes over the reloads, in the order
5294 described above. In the first pass we try to inherit a reload
5295 from a previous insn. If there is a later reload that needs a
5296 class that is a proper subset of the class being processed, we must
5297 also allocate a spill register during the first pass.
5299 Then make a second pass over the reloads to allocate any reloads
5300 that haven't been given registers yet. */
5302 for (j
= 0; j
< n_reloads
; j
++)
5304 int r
= reload_order
[j
];
5305 rtx search_equiv
= NULL_RTX
;
5307 /* Ignore reloads that got marked inoperative. */
5308 if (rld
[r
].out
== 0 && rld
[r
].in
== 0
5309 && ! rld
[r
].secondary_p
)
5312 /* If find_reloads chose to use reload_in or reload_out as a reload
5313 register, we don't need to chose one. Otherwise, try even if it
5314 found one since we might save an insn if we find the value lying
5316 Try also when reload_in is a pseudo without a hard reg. */
5317 if (rld
[r
].in
!= 0 && rld
[r
].reg_rtx
!= 0
5318 && (rtx_equal_p (rld
[r
].in
, rld
[r
].reg_rtx
)
5319 || (rtx_equal_p (rld
[r
].out
, rld
[r
].reg_rtx
)
5320 && !MEM_P (rld
[r
].in
)
5321 && true_regnum (rld
[r
].in
) < FIRST_PSEUDO_REGISTER
)))
5324 #if 0 /* No longer needed for correct operation.
5325 It might give better code, or might not; worth an experiment? */
5326 /* If this is an optional reload, we can't inherit from earlier insns
5327 until we are sure that any non-optional reloads have been allocated.
5328 The following code takes advantage of the fact that optional reloads
5329 are at the end of reload_order. */
5330 if (rld
[r
].optional
!= 0)
5331 for (i
= 0; i
< j
; i
++)
5332 if ((rld
[reload_order
[i
]].out
!= 0
5333 || rld
[reload_order
[i
]].in
!= 0
5334 || rld
[reload_order
[i
]].secondary_p
)
5335 && ! rld
[reload_order
[i
]].optional
5336 && rld
[reload_order
[i
]].reg_rtx
== 0)
5337 allocate_reload_reg (chain
, reload_order
[i
], 0);
5340 /* First see if this pseudo is already available as reloaded
5341 for a previous insn. We cannot try to inherit for reloads
5342 that are smaller than the maximum number of registers needed
5343 for groups unless the register we would allocate cannot be used
5346 We could check here to see if this is a secondary reload for
5347 an object that is already in a register of the desired class.
5348 This would avoid the need for the secondary reload register.
5349 But this is complex because we can't easily determine what
5350 objects might want to be loaded via this reload. So let a
5351 register be allocated here. In `emit_reload_insns' we suppress
5352 one of the loads in the case described above. */
5358 enum machine_mode mode
= VOIDmode
;
5362 else if (REG_P (rld
[r
].in
))
5364 regno
= REGNO (rld
[r
].in
);
5365 mode
= GET_MODE (rld
[r
].in
);
5367 else if (REG_P (rld
[r
].in_reg
))
5369 regno
= REGNO (rld
[r
].in_reg
);
5370 mode
= GET_MODE (rld
[r
].in_reg
);
5372 else if (GET_CODE (rld
[r
].in_reg
) == SUBREG
5373 && REG_P (SUBREG_REG (rld
[r
].in_reg
)))
5375 byte
= SUBREG_BYTE (rld
[r
].in_reg
);
5376 regno
= REGNO (SUBREG_REG (rld
[r
].in_reg
));
5377 if (regno
< FIRST_PSEUDO_REGISTER
)
5378 regno
= subreg_regno (rld
[r
].in_reg
);
5379 mode
= GET_MODE (rld
[r
].in_reg
);
5382 else if ((GET_CODE (rld
[r
].in_reg
) == PRE_INC
5383 || GET_CODE (rld
[r
].in_reg
) == PRE_DEC
5384 || GET_CODE (rld
[r
].in_reg
) == POST_INC
5385 || GET_CODE (rld
[r
].in_reg
) == POST_DEC
)
5386 && REG_P (XEXP (rld
[r
].in_reg
, 0)))
5388 regno
= REGNO (XEXP (rld
[r
].in_reg
, 0));
5389 mode
= GET_MODE (XEXP (rld
[r
].in_reg
, 0));
5390 rld
[r
].out
= rld
[r
].in
;
5394 /* This won't work, since REGNO can be a pseudo reg number.
5395 Also, it takes much more hair to keep track of all the things
5396 that can invalidate an inherited reload of part of a pseudoreg. */
5397 else if (GET_CODE (rld
[r
].in
) == SUBREG
5398 && REG_P (SUBREG_REG (rld
[r
].in
)))
5399 regno
= subreg_regno (rld
[r
].in
);
5402 if (regno
>= 0 && reg_last_reload_reg
[regno
] != 0)
5404 enum reg_class
class = rld
[r
].class, last_class
;
5405 rtx last_reg
= reg_last_reload_reg
[regno
];
5406 enum machine_mode need_mode
;
5408 i
= REGNO (last_reg
);
5409 i
+= subreg_regno_offset (i
, GET_MODE (last_reg
), byte
, mode
);
5410 last_class
= REGNO_REG_CLASS (i
);
5416 = smallest_mode_for_size (GET_MODE_SIZE (mode
) + byte
,
5417 GET_MODE_CLASS (mode
));
5420 #ifdef CANNOT_CHANGE_MODE_CLASS
5421 (!REG_CANNOT_CHANGE_MODE_P (i
, GET_MODE (last_reg
),
5425 (GET_MODE_SIZE (GET_MODE (last_reg
))
5426 >= GET_MODE_SIZE (need_mode
))
5427 #ifdef CANNOT_CHANGE_MODE_CLASS
5430 && reg_reloaded_contents
[i
] == regno
5431 && TEST_HARD_REG_BIT (reg_reloaded_valid
, i
)
5432 && HARD_REGNO_MODE_OK (i
, rld
[r
].mode
)
5433 && (TEST_HARD_REG_BIT (reg_class_contents
[(int) class], i
)
5434 /* Even if we can't use this register as a reload
5435 register, we might use it for reload_override_in,
5436 if copying it to the desired class is cheap
5438 || ((REGISTER_MOVE_COST (mode
, last_class
, class)
5439 < MEMORY_MOVE_COST (mode
, class, 1))
5440 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5441 && (SECONDARY_INPUT_RELOAD_CLASS (class, mode
,
5445 #ifdef SECONDARY_MEMORY_NEEDED
5446 && ! SECONDARY_MEMORY_NEEDED (last_class
, class,
5451 && (rld
[r
].nregs
== max_group_size
5452 || ! TEST_HARD_REG_BIT (reg_class_contents
[(int) group_class
],
5454 && free_for_value_p (i
, rld
[r
].mode
, rld
[r
].opnum
,
5455 rld
[r
].when_needed
, rld
[r
].in
,
5458 /* If a group is needed, verify that all the subsequent
5459 registers still have their values intact. */
5460 int nr
= hard_regno_nregs
[i
][rld
[r
].mode
];
5463 for (k
= 1; k
< nr
; k
++)
5464 if (reg_reloaded_contents
[i
+ k
] != regno
5465 || ! TEST_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
))
5473 last_reg
= (GET_MODE (last_reg
) == mode
5474 ? last_reg
: gen_rtx_REG (mode
, i
));
5477 for (k
= 0; k
< nr
; k
++)
5478 bad_for_class
|= ! TEST_HARD_REG_BIT (reg_class_contents
[(int) rld
[r
].class],
5481 /* We found a register that contains the
5482 value we need. If this register is the
5483 same as an `earlyclobber' operand of the
5484 current insn, just mark it as a place to
5485 reload from since we can't use it as the
5486 reload register itself. */
5488 for (i1
= 0; i1
< n_earlyclobbers
; i1
++)
5489 if (reg_overlap_mentioned_for_reload_p
5490 (reg_last_reload_reg
[regno
],
5491 reload_earlyclobbers
[i1
]))
5494 if (i1
!= n_earlyclobbers
5495 || ! (free_for_value_p (i
, rld
[r
].mode
,
5497 rld
[r
].when_needed
, rld
[r
].in
,
5499 /* Don't use it if we'd clobber a pseudo reg. */
5500 || (TEST_HARD_REG_BIT (reg_used_in_insn
, i
)
5502 && ! TEST_HARD_REG_BIT (reg_reloaded_dead
, i
))
5503 /* Don't clobber the frame pointer. */
5504 || (i
== HARD_FRAME_POINTER_REGNUM
5505 && frame_pointer_needed
5507 /* Don't really use the inherited spill reg
5508 if we need it wider than we've got it. */
5509 || (GET_MODE_SIZE (rld
[r
].mode
)
5510 > GET_MODE_SIZE (mode
))
5513 /* If find_reloads chose reload_out as reload
5514 register, stay with it - that leaves the
5515 inherited register for subsequent reloads. */
5516 || (rld
[r
].out
&& rld
[r
].reg_rtx
5517 && rtx_equal_p (rld
[r
].out
, rld
[r
].reg_rtx
)))
5519 if (! rld
[r
].optional
)
5521 reload_override_in
[r
] = last_reg
;
5522 reload_inheritance_insn
[r
]
5523 = reg_reloaded_insn
[i
];
5529 /* We can use this as a reload reg. */
5530 /* Mark the register as in use for this part of
5532 mark_reload_reg_in_use (i
,
5536 rld
[r
].reg_rtx
= last_reg
;
5537 reload_inherited
[r
] = 1;
5538 reload_inheritance_insn
[r
]
5539 = reg_reloaded_insn
[i
];
5540 reload_spill_index
[r
] = i
;
5541 for (k
= 0; k
< nr
; k
++)
5542 SET_HARD_REG_BIT (reload_reg_used_for_inherit
,
5550 /* Here's another way to see if the value is already lying around. */
5553 && ! reload_inherited
[r
]
5555 && (CONSTANT_P (rld
[r
].in
)
5556 || GET_CODE (rld
[r
].in
) == PLUS
5557 || REG_P (rld
[r
].in
)
5558 || MEM_P (rld
[r
].in
))
5559 && (rld
[r
].nregs
== max_group_size
5560 || ! reg_classes_intersect_p (rld
[r
].class, group_class
)))
5561 search_equiv
= rld
[r
].in
;
5562 /* If this is an output reload from a simple move insn, look
5563 if an equivalence for the input is available. */
5564 else if (inheritance
&& rld
[r
].in
== 0 && rld
[r
].out
!= 0)
5566 rtx set
= single_set (insn
);
5569 && rtx_equal_p (rld
[r
].out
, SET_DEST (set
))
5570 && CONSTANT_P (SET_SRC (set
)))
5571 search_equiv
= SET_SRC (set
);
5577 = find_equiv_reg (search_equiv
, insn
, rld
[r
].class,
5578 -1, NULL
, 0, rld
[r
].mode
);
5584 regno
= REGNO (equiv
);
5587 /* This must be a SUBREG of a hard register.
5588 Make a new REG since this might be used in an
5589 address and not all machines support SUBREGs
5591 gcc_assert (GET_CODE (equiv
) == SUBREG
);
5592 regno
= subreg_regno (equiv
);
5593 equiv
= gen_rtx_REG (rld
[r
].mode
, regno
);
5597 /* If we found a spill reg, reject it unless it is free
5598 and of the desired class. */
5602 int bad_for_class
= 0;
5603 int max_regno
= regno
+ rld
[r
].nregs
;
5605 for (i
= regno
; i
< max_regno
; i
++)
5607 regs_used
|= TEST_HARD_REG_BIT (reload_reg_used_at_all
,
5609 bad_for_class
|= ! TEST_HARD_REG_BIT (reg_class_contents
[(int) rld
[r
].class],
5614 && ! free_for_value_p (regno
, rld
[r
].mode
,
5615 rld
[r
].opnum
, rld
[r
].when_needed
,
5616 rld
[r
].in
, rld
[r
].out
, r
, 1))
5621 if (equiv
!= 0 && ! HARD_REGNO_MODE_OK (regno
, rld
[r
].mode
))
5624 /* We found a register that contains the value we need.
5625 If this register is the same as an `earlyclobber' operand
5626 of the current insn, just mark it as a place to reload from
5627 since we can't use it as the reload register itself. */
5630 for (i
= 0; i
< n_earlyclobbers
; i
++)
5631 if (reg_overlap_mentioned_for_reload_p (equiv
,
5632 reload_earlyclobbers
[i
]))
5634 if (! rld
[r
].optional
)
5635 reload_override_in
[r
] = equiv
;
5640 /* If the equiv register we have found is explicitly clobbered
5641 in the current insn, it depends on the reload type if we
5642 can use it, use it for reload_override_in, or not at all.
5643 In particular, we then can't use EQUIV for a
5644 RELOAD_FOR_OUTPUT_ADDRESS reload. */
5648 if (regno_clobbered_p (regno
, insn
, rld
[r
].mode
, 0))
5649 switch (rld
[r
].when_needed
)
5651 case RELOAD_FOR_OTHER_ADDRESS
:
5652 case RELOAD_FOR_INPADDR_ADDRESS
:
5653 case RELOAD_FOR_INPUT_ADDRESS
:
5654 case RELOAD_FOR_OPADDR_ADDR
:
5657 case RELOAD_FOR_INPUT
:
5658 case RELOAD_FOR_OPERAND_ADDRESS
:
5659 if (! rld
[r
].optional
)
5660 reload_override_in
[r
] = equiv
;
5666 else if (regno_clobbered_p (regno
, insn
, rld
[r
].mode
, 1))
5667 switch (rld
[r
].when_needed
)
5669 case RELOAD_FOR_OTHER_ADDRESS
:
5670 case RELOAD_FOR_INPADDR_ADDRESS
:
5671 case RELOAD_FOR_INPUT_ADDRESS
:
5672 case RELOAD_FOR_OPADDR_ADDR
:
5673 case RELOAD_FOR_OPERAND_ADDRESS
:
5674 case RELOAD_FOR_INPUT
:
5677 if (! rld
[r
].optional
)
5678 reload_override_in
[r
] = equiv
;
5686 /* If we found an equivalent reg, say no code need be generated
5687 to load it, and use it as our reload reg. */
5689 && (regno
!= HARD_FRAME_POINTER_REGNUM
5690 || !frame_pointer_needed
))
5692 int nr
= hard_regno_nregs
[regno
][rld
[r
].mode
];
5694 rld
[r
].reg_rtx
= equiv
;
5695 reload_inherited
[r
] = 1;
5697 /* If reg_reloaded_valid is not set for this register,
5698 there might be a stale spill_reg_store lying around.
5699 We must clear it, since otherwise emit_reload_insns
5700 might delete the store. */
5701 if (! TEST_HARD_REG_BIT (reg_reloaded_valid
, regno
))
5702 spill_reg_store
[regno
] = NULL_RTX
;
5703 /* If any of the hard registers in EQUIV are spill
5704 registers, mark them as in use for this insn. */
5705 for (k
= 0; k
< nr
; k
++)
5707 i
= spill_reg_order
[regno
+ k
];
5710 mark_reload_reg_in_use (regno
, rld
[r
].opnum
,
5713 SET_HARD_REG_BIT (reload_reg_used_for_inherit
,
5720 /* If we found a register to use already, or if this is an optional
5721 reload, we are done. */
5722 if (rld
[r
].reg_rtx
!= 0 || rld
[r
].optional
!= 0)
5726 /* No longer needed for correct operation. Might or might
5727 not give better code on the average. Want to experiment? */
5729 /* See if there is a later reload that has a class different from our
5730 class that intersects our class or that requires less register
5731 than our reload. If so, we must allocate a register to this
5732 reload now, since that reload might inherit a previous reload
5733 and take the only available register in our class. Don't do this
5734 for optional reloads since they will force all previous reloads
5735 to be allocated. Also don't do this for reloads that have been
5738 for (i
= j
+ 1; i
< n_reloads
; i
++)
5740 int s
= reload_order
[i
];
5742 if ((rld
[s
].in
== 0 && rld
[s
].out
== 0
5743 && ! rld
[s
].secondary_p
)
5747 if ((rld
[s
].class != rld
[r
].class
5748 && reg_classes_intersect_p (rld
[r
].class,
5750 || rld
[s
].nregs
< rld
[r
].nregs
)
5757 allocate_reload_reg (chain
, r
, j
== n_reloads
- 1);
5761 /* Now allocate reload registers for anything non-optional that
5762 didn't get one yet. */
5763 for (j
= 0; j
< n_reloads
; j
++)
5765 int r
= reload_order
[j
];
5767 /* Ignore reloads that got marked inoperative. */
5768 if (rld
[r
].out
== 0 && rld
[r
].in
== 0 && ! rld
[r
].secondary_p
)
5771 /* Skip reloads that already have a register allocated or are
5773 if (rld
[r
].reg_rtx
!= 0 || rld
[r
].optional
)
5776 if (! allocate_reload_reg (chain
, r
, j
== n_reloads
- 1))
5780 /* If that loop got all the way, we have won. */
5787 /* Loop around and try without any inheritance. */
5792 /* First undo everything done by the failed attempt
5793 to allocate with inheritance. */
5794 choose_reload_regs_init (chain
, save_reload_reg_rtx
);
5796 /* Some sanity tests to verify that the reloads found in the first
5797 pass are identical to the ones we have now. */
5798 gcc_assert (chain
->n_reloads
== n_reloads
);
5800 for (i
= 0; i
< n_reloads
; i
++)
5802 if (chain
->rld
[i
].regno
< 0 || chain
->rld
[i
].reg_rtx
!= 0)
5804 gcc_assert (chain
->rld
[i
].when_needed
== rld
[i
].when_needed
);
5805 for (j
= 0; j
< n_spills
; j
++)
5806 if (spill_regs
[j
] == chain
->rld
[i
].regno
)
5807 if (! set_reload_reg (j
, i
))
5808 failed_reload (chain
->insn
, i
);
5812 /* If we thought we could inherit a reload, because it seemed that
5813 nothing else wanted the same reload register earlier in the insn,
5814 verify that assumption, now that all reloads have been assigned.
5815 Likewise for reloads where reload_override_in has been set. */
5817 /* If doing expensive optimizations, do one preliminary pass that doesn't
5818 cancel any inheritance, but removes reloads that have been needed only
5819 for reloads that we know can be inherited. */
5820 for (pass
= flag_expensive_optimizations
; pass
>= 0; pass
--)
5822 for (j
= 0; j
< n_reloads
; j
++)
5824 int r
= reload_order
[j
];
5826 if (reload_inherited
[r
] && rld
[r
].reg_rtx
)
5827 check_reg
= rld
[r
].reg_rtx
;
5828 else if (reload_override_in
[r
]
5829 && (REG_P (reload_override_in
[r
])
5830 || GET_CODE (reload_override_in
[r
]) == SUBREG
))
5831 check_reg
= reload_override_in
[r
];
5834 if (! free_for_value_p (true_regnum (check_reg
), rld
[r
].mode
,
5835 rld
[r
].opnum
, rld
[r
].when_needed
, rld
[r
].in
,
5836 (reload_inherited
[r
]
5837 ? rld
[r
].out
: const0_rtx
),
5842 reload_inherited
[r
] = 0;
5843 reload_override_in
[r
] = 0;
5845 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
5846 reload_override_in, then we do not need its related
5847 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
5848 likewise for other reload types.
5849 We handle this by removing a reload when its only replacement
5850 is mentioned in reload_in of the reload we are going to inherit.
5851 A special case are auto_inc expressions; even if the input is
5852 inherited, we still need the address for the output. We can
5853 recognize them because they have RELOAD_OUT set to RELOAD_IN.
5854 If we succeeded removing some reload and we are doing a preliminary
5855 pass just to remove such reloads, make another pass, since the
5856 removal of one reload might allow us to inherit another one. */
5858 && rld
[r
].out
!= rld
[r
].in
5859 && remove_address_replacements (rld
[r
].in
) && pass
)
5864 /* Now that reload_override_in is known valid,
5865 actually override reload_in. */
5866 for (j
= 0; j
< n_reloads
; j
++)
5867 if (reload_override_in
[j
])
5868 rld
[j
].in
= reload_override_in
[j
];
5870 /* If this reload won't be done because it has been canceled or is
5871 optional and not inherited, clear reload_reg_rtx so other
5872 routines (such as subst_reloads) don't get confused. */
5873 for (j
= 0; j
< n_reloads
; j
++)
5874 if (rld
[j
].reg_rtx
!= 0
5875 && ((rld
[j
].optional
&& ! reload_inherited
[j
])
5876 || (rld
[j
].in
== 0 && rld
[j
].out
== 0
5877 && ! rld
[j
].secondary_p
)))
5879 int regno
= true_regnum (rld
[j
].reg_rtx
);
5881 if (spill_reg_order
[regno
] >= 0)
5882 clear_reload_reg_in_use (regno
, rld
[j
].opnum
,
5883 rld
[j
].when_needed
, rld
[j
].mode
);
5885 reload_spill_index
[j
] = -1;
5888 /* Record which pseudos and which spill regs have output reloads. */
5889 for (j
= 0; j
< n_reloads
; j
++)
5891 int r
= reload_order
[j
];
5893 i
= reload_spill_index
[r
];
5895 /* I is nonneg if this reload uses a register.
5896 If rld[r].reg_rtx is 0, this is an optional reload
5897 that we opted to ignore. */
5898 if (rld
[r
].out_reg
!= 0 && REG_P (rld
[r
].out_reg
)
5899 && rld
[r
].reg_rtx
!= 0)
5901 int nregno
= REGNO (rld
[r
].out_reg
);
5904 if (nregno
< FIRST_PSEUDO_REGISTER
)
5905 nr
= hard_regno_nregs
[nregno
][rld
[r
].mode
];
5908 reg_has_output_reload
[nregno
+ nr
] = 1;
5912 nr
= hard_regno_nregs
[i
][rld
[r
].mode
];
5914 SET_HARD_REG_BIT (reg_is_output_reload
, i
+ nr
);
5917 gcc_assert (rld
[r
].when_needed
== RELOAD_OTHER
5918 || rld
[r
].when_needed
== RELOAD_FOR_OUTPUT
5919 || rld
[r
].when_needed
== RELOAD_FOR_INSN
);
5924 /* Deallocate the reload register for reload R. This is called from
5925 remove_address_replacements. */
5928 deallocate_reload_reg (int r
)
5932 if (! rld
[r
].reg_rtx
)
5934 regno
= true_regnum (rld
[r
].reg_rtx
);
5936 if (spill_reg_order
[regno
] >= 0)
5937 clear_reload_reg_in_use (regno
, rld
[r
].opnum
, rld
[r
].when_needed
,
5939 reload_spill_index
[r
] = -1;
5942 /* If SMALL_REGISTER_CLASSES is nonzero, we may not have merged two
5943 reloads of the same item for fear that we might not have enough reload
5944 registers. However, normally they will get the same reload register
5945 and hence actually need not be loaded twice.
5947 Here we check for the most common case of this phenomenon: when we have
5948 a number of reloads for the same object, each of which were allocated
5949 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5950 reload, and is not modified in the insn itself. If we find such,
5951 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5952 This will not increase the number of spill registers needed and will
5953 prevent redundant code. */
5956 merge_assigned_reloads (rtx insn
)
5960 /* Scan all the reloads looking for ones that only load values and
5961 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5962 assigned and not modified by INSN. */
5964 for (i
= 0; i
< n_reloads
; i
++)
5966 int conflicting_input
= 0;
5967 int max_input_address_opnum
= -1;
5968 int min_conflicting_input_opnum
= MAX_RECOG_OPERANDS
;
5970 if (rld
[i
].in
== 0 || rld
[i
].when_needed
== RELOAD_OTHER
5971 || rld
[i
].out
!= 0 || rld
[i
].reg_rtx
== 0
5972 || reg_set_p (rld
[i
].reg_rtx
, insn
))
5975 /* Look at all other reloads. Ensure that the only use of this
5976 reload_reg_rtx is in a reload that just loads the same value
5977 as we do. Note that any secondary reloads must be of the identical
5978 class since the values, modes, and result registers are the
5979 same, so we need not do anything with any secondary reloads. */
5981 for (j
= 0; j
< n_reloads
; j
++)
5983 if (i
== j
|| rld
[j
].reg_rtx
== 0
5984 || ! reg_overlap_mentioned_p (rld
[j
].reg_rtx
,
5988 if (rld
[j
].when_needed
== RELOAD_FOR_INPUT_ADDRESS
5989 && rld
[j
].opnum
> max_input_address_opnum
)
5990 max_input_address_opnum
= rld
[j
].opnum
;
5992 /* If the reload regs aren't exactly the same (e.g, different modes)
5993 or if the values are different, we can't merge this reload.
5994 But if it is an input reload, we might still merge
5995 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
5997 if (! rtx_equal_p (rld
[i
].reg_rtx
, rld
[j
].reg_rtx
)
5998 || rld
[j
].out
!= 0 || rld
[j
].in
== 0
5999 || ! rtx_equal_p (rld
[i
].in
, rld
[j
].in
))
6001 if (rld
[j
].when_needed
!= RELOAD_FOR_INPUT
6002 || ((rld
[i
].when_needed
!= RELOAD_FOR_INPUT_ADDRESS
6003 || rld
[i
].opnum
> rld
[j
].opnum
)
6004 && rld
[i
].when_needed
!= RELOAD_FOR_OTHER_ADDRESS
))
6006 conflicting_input
= 1;
6007 if (min_conflicting_input_opnum
> rld
[j
].opnum
)
6008 min_conflicting_input_opnum
= rld
[j
].opnum
;
6012 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6013 we, in fact, found any matching reloads. */
6016 && max_input_address_opnum
<= min_conflicting_input_opnum
)
6018 for (j
= 0; j
< n_reloads
; j
++)
6019 if (i
!= j
&& rld
[j
].reg_rtx
!= 0
6020 && rtx_equal_p (rld
[i
].reg_rtx
, rld
[j
].reg_rtx
)
6021 && (! conflicting_input
6022 || rld
[j
].when_needed
== RELOAD_FOR_INPUT_ADDRESS
6023 || rld
[j
].when_needed
== RELOAD_FOR_OTHER_ADDRESS
))
6025 rld
[i
].when_needed
= RELOAD_OTHER
;
6027 reload_spill_index
[j
] = -1;
6028 transfer_replacements (i
, j
);
6031 /* If this is now RELOAD_OTHER, look for any reloads that load
6032 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
6033 if they were for inputs, RELOAD_OTHER for outputs. Note that
6034 this test is equivalent to looking for reloads for this operand
6036 /* We must take special care when there are two or more reloads to
6037 be merged and a RELOAD_FOR_OUTPUT_ADDRESS reload that loads the
6038 same value or a part of it; we must not change its type if there
6039 is a conflicting input. */
6041 if (rld
[i
].when_needed
== RELOAD_OTHER
)
6042 for (j
= 0; j
< n_reloads
; j
++)
6044 && rld
[j
].when_needed
!= RELOAD_OTHER
6045 && rld
[j
].when_needed
!= RELOAD_FOR_OTHER_ADDRESS
6046 && (! conflicting_input
6047 || rld
[j
].when_needed
== RELOAD_FOR_INPUT_ADDRESS
6048 || rld
[j
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
)
6049 && reg_overlap_mentioned_for_reload_p (rld
[j
].in
,
6055 = ((rld
[j
].when_needed
== RELOAD_FOR_INPUT_ADDRESS
6056 || rld
[j
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
)
6057 ? RELOAD_FOR_OTHER_ADDRESS
: RELOAD_OTHER
);
6059 /* Check to see if we accidentally converted two reloads
6060 that use the same reload register with different inputs
6061 to the same type. If so, the resulting code won't work,
6064 for (k
= 0; k
< j
; k
++)
6065 gcc_assert (rld
[k
].in
== 0 || rld
[k
].reg_rtx
== 0
6066 || rld
[k
].when_needed
!= rld
[j
].when_needed
6067 || !rtx_equal_p (rld
[k
].reg_rtx
,
6069 || rtx_equal_p (rld
[k
].in
,
6076 /* These arrays are filled by emit_reload_insns and its subroutines. */
6077 static rtx input_reload_insns
[MAX_RECOG_OPERANDS
];
6078 static rtx other_input_address_reload_insns
= 0;
6079 static rtx other_input_reload_insns
= 0;
6080 static rtx input_address_reload_insns
[MAX_RECOG_OPERANDS
];
6081 static rtx inpaddr_address_reload_insns
[MAX_RECOG_OPERANDS
];
6082 static rtx output_reload_insns
[MAX_RECOG_OPERANDS
];
6083 static rtx output_address_reload_insns
[MAX_RECOG_OPERANDS
];
6084 static rtx outaddr_address_reload_insns
[MAX_RECOG_OPERANDS
];
6085 static rtx operand_reload_insns
= 0;
6086 static rtx other_operand_reload_insns
= 0;
6087 static rtx other_output_reload_insns
[MAX_RECOG_OPERANDS
];
6089 /* Values to be put in spill_reg_store are put here first. */
6090 static rtx new_spill_reg_store
[FIRST_PSEUDO_REGISTER
];
6091 static HARD_REG_SET reg_reloaded_died
;
6093 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
6094 has the number J. OLD contains the value to be used as input. */
6097 emit_input_reload_insns (struct insn_chain
*chain
, struct reload
*rl
,
6100 rtx insn
= chain
->insn
;
6101 rtx reloadreg
= rl
->reg_rtx
;
6102 rtx oldequiv_reg
= 0;
6105 enum machine_mode mode
;
6108 /* Determine the mode to reload in.
6109 This is very tricky because we have three to choose from.
6110 There is the mode the insn operand wants (rl->inmode).
6111 There is the mode of the reload register RELOADREG.
6112 There is the intrinsic mode of the operand, which we could find
6113 by stripping some SUBREGs.
6114 It turns out that RELOADREG's mode is irrelevant:
6115 we can change that arbitrarily.
6117 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6118 then the reload reg may not support QImode moves, so use SImode.
6119 If foo is in memory due to spilling a pseudo reg, this is safe,
6120 because the QImode value is in the least significant part of a
6121 slot big enough for a SImode. If foo is some other sort of
6122 memory reference, then it is impossible to reload this case,
6123 so previous passes had better make sure this never happens.
6125 Then consider a one-word union which has SImode and one of its
6126 members is a float, being fetched as (SUBREG:SF union:SI).
6127 We must fetch that as SFmode because we could be loading into
6128 a float-only register. In this case OLD's mode is correct.
6130 Consider an immediate integer: it has VOIDmode. Here we need
6131 to get a mode from something else.
6133 In some cases, there is a fourth mode, the operand's
6134 containing mode. If the insn specifies a containing mode for
6135 this operand, it overrides all others.
6137 I am not sure whether the algorithm here is always right,
6138 but it does the right things in those cases. */
6140 mode
= GET_MODE (old
);
6141 if (mode
== VOIDmode
)
6144 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6145 /* If we need a secondary register for this operation, see if
6146 the value is already in a register in that class. Don't
6147 do this if the secondary register will be used as a scratch
6150 if (rl
->secondary_in_reload
>= 0
6151 && rl
->secondary_in_icode
== CODE_FOR_nothing
6154 = find_equiv_reg (old
, insn
,
6155 rld
[rl
->secondary_in_reload
].class,
6159 /* If reloading from memory, see if there is a register
6160 that already holds the same value. If so, reload from there.
6161 We can pass 0 as the reload_reg_p argument because
6162 any other reload has either already been emitted,
6163 in which case find_equiv_reg will see the reload-insn,
6164 or has yet to be emitted, in which case it doesn't matter
6165 because we will use this equiv reg right away. */
6167 if (oldequiv
== 0 && optimize
6170 && REGNO (old
) >= FIRST_PSEUDO_REGISTER
6171 && reg_renumber
[REGNO (old
)] < 0)))
6172 oldequiv
= find_equiv_reg (old
, insn
, ALL_REGS
, -1, NULL
, 0, mode
);
6176 unsigned int regno
= true_regnum (oldequiv
);
6178 /* Don't use OLDEQUIV if any other reload changes it at an
6179 earlier stage of this insn or at this stage. */
6180 if (! free_for_value_p (regno
, rl
->mode
, rl
->opnum
, rl
->when_needed
,
6181 rl
->in
, const0_rtx
, j
, 0))
6184 /* If it is no cheaper to copy from OLDEQUIV into the
6185 reload register than it would be to move from memory,
6186 don't use it. Likewise, if we need a secondary register
6190 && (((enum reg_class
) REGNO_REG_CLASS (regno
) != rl
->class
6191 && (REGISTER_MOVE_COST (mode
, REGNO_REG_CLASS (regno
),
6193 >= MEMORY_MOVE_COST (mode
, rl
->class, 1)))
6194 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6195 || (SECONDARY_INPUT_RELOAD_CLASS (rl
->class,
6199 #ifdef SECONDARY_MEMORY_NEEDED
6200 || SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (regno
),
6208 /* delete_output_reload is only invoked properly if old contains
6209 the original pseudo register. Since this is replaced with a
6210 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6211 find the pseudo in RELOAD_IN_REG. */
6213 && reload_override_in
[j
]
6214 && REG_P (rl
->in_reg
))
6221 else if (REG_P (oldequiv
))
6222 oldequiv_reg
= oldequiv
;
6223 else if (GET_CODE (oldequiv
) == SUBREG
)
6224 oldequiv_reg
= SUBREG_REG (oldequiv
);
6226 /* If we are reloading from a register that was recently stored in
6227 with an output-reload, see if we can prove there was
6228 actually no need to store the old value in it. */
6230 if (optimize
&& REG_P (oldequiv
)
6231 && REGNO (oldequiv
) < FIRST_PSEUDO_REGISTER
6232 && spill_reg_store
[REGNO (oldequiv
)]
6234 && (dead_or_set_p (insn
, spill_reg_stored_to
[REGNO (oldequiv
)])
6235 || rtx_equal_p (spill_reg_stored_to
[REGNO (oldequiv
)],
6237 delete_output_reload (insn
, j
, REGNO (oldequiv
));
6239 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6240 then load RELOADREG from OLDEQUIV. Note that we cannot use
6241 gen_lowpart_common since it can do the wrong thing when
6242 RELOADREG has a multi-word mode. Note that RELOADREG
6243 must always be a REG here. */
6245 if (GET_MODE (reloadreg
) != mode
)
6246 reloadreg
= reload_adjust_reg_for_mode (reloadreg
, mode
);
6247 while (GET_CODE (oldequiv
) == SUBREG
&& GET_MODE (oldequiv
) != mode
)
6248 oldequiv
= SUBREG_REG (oldequiv
);
6249 if (GET_MODE (oldequiv
) != VOIDmode
6250 && mode
!= GET_MODE (oldequiv
))
6251 oldequiv
= gen_lowpart_SUBREG (mode
, oldequiv
);
6253 /* Switch to the right place to emit the reload insns. */
6254 switch (rl
->when_needed
)
6257 where
= &other_input_reload_insns
;
6259 case RELOAD_FOR_INPUT
:
6260 where
= &input_reload_insns
[rl
->opnum
];
6262 case RELOAD_FOR_INPUT_ADDRESS
:
6263 where
= &input_address_reload_insns
[rl
->opnum
];
6265 case RELOAD_FOR_INPADDR_ADDRESS
:
6266 where
= &inpaddr_address_reload_insns
[rl
->opnum
];
6268 case RELOAD_FOR_OUTPUT_ADDRESS
:
6269 where
= &output_address_reload_insns
[rl
->opnum
];
6271 case RELOAD_FOR_OUTADDR_ADDRESS
:
6272 where
= &outaddr_address_reload_insns
[rl
->opnum
];
6274 case RELOAD_FOR_OPERAND_ADDRESS
:
6275 where
= &operand_reload_insns
;
6277 case RELOAD_FOR_OPADDR_ADDR
:
6278 where
= &other_operand_reload_insns
;
6280 case RELOAD_FOR_OTHER_ADDRESS
:
6281 where
= &other_input_address_reload_insns
;
6287 push_to_sequence (*where
);
6289 /* Auto-increment addresses must be reloaded in a special way. */
6290 if (rl
->out
&& ! rl
->out_reg
)
6292 /* We are not going to bother supporting the case where a
6293 incremented register can't be copied directly from
6294 OLDEQUIV since this seems highly unlikely. */
6295 gcc_assert (rl
->secondary_in_reload
< 0);
6297 if (reload_inherited
[j
])
6298 oldequiv
= reloadreg
;
6300 old
= XEXP (rl
->in_reg
, 0);
6302 if (optimize
&& REG_P (oldequiv
)
6303 && REGNO (oldequiv
) < FIRST_PSEUDO_REGISTER
6304 && spill_reg_store
[REGNO (oldequiv
)]
6306 && (dead_or_set_p (insn
,
6307 spill_reg_stored_to
[REGNO (oldequiv
)])
6308 || rtx_equal_p (spill_reg_stored_to
[REGNO (oldequiv
)],
6310 delete_output_reload (insn
, j
, REGNO (oldequiv
));
6312 /* Prevent normal processing of this reload. */
6314 /* Output a special code sequence for this case. */
6315 new_spill_reg_store
[REGNO (reloadreg
)]
6316 = inc_for_reload (reloadreg
, oldequiv
, rl
->out
,
6320 /* If we are reloading a pseudo-register that was set by the previous
6321 insn, see if we can get rid of that pseudo-register entirely
6322 by redirecting the previous insn into our reload register. */
6324 else if (optimize
&& REG_P (old
)
6325 && REGNO (old
) >= FIRST_PSEUDO_REGISTER
6326 && dead_or_set_p (insn
, old
)
6327 /* This is unsafe if some other reload
6328 uses the same reg first. */
6329 && ! conflicts_with_override (reloadreg
)
6330 && free_for_value_p (REGNO (reloadreg
), rl
->mode
, rl
->opnum
,
6331 rl
->when_needed
, old
, rl
->out
, j
, 0))
6333 rtx temp
= PREV_INSN (insn
);
6334 while (temp
&& NOTE_P (temp
))
6335 temp
= PREV_INSN (temp
);
6337 && NONJUMP_INSN_P (temp
)
6338 && GET_CODE (PATTERN (temp
)) == SET
6339 && SET_DEST (PATTERN (temp
)) == old
6340 /* Make sure we can access insn_operand_constraint. */
6341 && asm_noperands (PATTERN (temp
)) < 0
6342 /* This is unsafe if operand occurs more than once in current
6343 insn. Perhaps some occurrences aren't reloaded. */
6344 && count_occurrences (PATTERN (insn
), old
, 0) == 1)
6346 rtx old
= SET_DEST (PATTERN (temp
));
6347 /* Store into the reload register instead of the pseudo. */
6348 SET_DEST (PATTERN (temp
)) = reloadreg
;
6350 /* Verify that resulting insn is valid. */
6351 extract_insn (temp
);
6352 if (constrain_operands (1))
6354 /* If the previous insn is an output reload, the source is
6355 a reload register, and its spill_reg_store entry will
6356 contain the previous destination. This is now
6358 if (REG_P (SET_SRC (PATTERN (temp
)))
6359 && REGNO (SET_SRC (PATTERN (temp
))) < FIRST_PSEUDO_REGISTER
)
6361 spill_reg_store
[REGNO (SET_SRC (PATTERN (temp
)))] = 0;
6362 spill_reg_stored_to
[REGNO (SET_SRC (PATTERN (temp
)))] = 0;
6365 /* If these are the only uses of the pseudo reg,
6366 pretend for GDB it lives in the reload reg we used. */
6367 if (REG_N_DEATHS (REGNO (old
)) == 1
6368 && REG_N_SETS (REGNO (old
)) == 1)
6370 reg_renumber
[REGNO (old
)] = REGNO (rl
->reg_rtx
);
6371 alter_reg (REGNO (old
), -1);
6377 SET_DEST (PATTERN (temp
)) = old
;
6382 /* We can't do that, so output an insn to load RELOADREG. */
6384 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6385 /* If we have a secondary reload, pick up the secondary register
6386 and icode, if any. If OLDEQUIV and OLD are different or
6387 if this is an in-out reload, recompute whether or not we
6388 still need a secondary register and what the icode should
6389 be. If we still need a secondary register and the class or
6390 icode is different, go back to reloading from OLD if using
6391 OLDEQUIV means that we got the wrong type of register. We
6392 cannot have different class or icode due to an in-out reload
6393 because we don't make such reloads when both the input and
6394 output need secondary reload registers. */
6396 if (! special
&& rl
->secondary_in_reload
>= 0)
6398 rtx second_reload_reg
= 0;
6399 int secondary_reload
= rl
->secondary_in_reload
;
6400 rtx real_oldequiv
= oldequiv
;
6403 enum insn_code icode
;
6405 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6406 and similarly for OLD.
6407 See comments in get_secondary_reload in reload.c. */
6408 /* If it is a pseudo that cannot be replaced with its
6409 equivalent MEM, we must fall back to reload_in, which
6410 will have all the necessary substitutions registered.
6411 Likewise for a pseudo that can't be replaced with its
6412 equivalent constant.
6414 Take extra care for subregs of such pseudos. Note that
6415 we cannot use reg_equiv_mem in this case because it is
6416 not in the right mode. */
6419 if (GET_CODE (tmp
) == SUBREG
)
6420 tmp
= SUBREG_REG (tmp
);
6422 && REGNO (tmp
) >= FIRST_PSEUDO_REGISTER
6423 && (reg_equiv_memory_loc
[REGNO (tmp
)] != 0
6424 || reg_equiv_constant
[REGNO (tmp
)] != 0))
6426 if (! reg_equiv_mem
[REGNO (tmp
)]
6427 || num_not_at_initial_offset
6428 || GET_CODE (oldequiv
) == SUBREG
)
6429 real_oldequiv
= rl
->in
;
6431 real_oldequiv
= reg_equiv_mem
[REGNO (tmp
)];
6435 if (GET_CODE (tmp
) == SUBREG
)
6436 tmp
= SUBREG_REG (tmp
);
6438 && REGNO (tmp
) >= FIRST_PSEUDO_REGISTER
6439 && (reg_equiv_memory_loc
[REGNO (tmp
)] != 0
6440 || reg_equiv_constant
[REGNO (tmp
)] != 0))
6442 if (! reg_equiv_mem
[REGNO (tmp
)]
6443 || num_not_at_initial_offset
6444 || GET_CODE (old
) == SUBREG
)
6447 real_old
= reg_equiv_mem
[REGNO (tmp
)];
6450 second_reload_reg
= rld
[secondary_reload
].reg_rtx
;
6451 icode
= rl
->secondary_in_icode
;
6453 if ((old
!= oldequiv
&& ! rtx_equal_p (old
, oldequiv
))
6454 || (rl
->in
!= 0 && rl
->out
!= 0))
6456 enum reg_class new_class
6457 = SECONDARY_INPUT_RELOAD_CLASS (rl
->class,
6458 mode
, real_oldequiv
);
6460 if (new_class
== NO_REGS
)
6461 second_reload_reg
= 0;
6464 enum insn_code new_icode
;
6465 enum machine_mode new_mode
;
6467 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) new_class
],
6468 REGNO (second_reload_reg
)))
6469 oldequiv
= old
, real_oldequiv
= real_old
;
6472 new_icode
= reload_in_optab
[(int) mode
];
6473 if (new_icode
!= CODE_FOR_nothing
6474 && ((insn_data
[(int) new_icode
].operand
[0].predicate
6475 && ! ((*insn_data
[(int) new_icode
].operand
[0].predicate
)
6477 || (insn_data
[(int) new_icode
].operand
[1].predicate
6478 && ! ((*insn_data
[(int) new_icode
].operand
[1].predicate
)
6479 (real_oldequiv
, mode
)))))
6480 new_icode
= CODE_FOR_nothing
;
6482 if (new_icode
== CODE_FOR_nothing
)
6485 new_mode
= insn_data
[(int) new_icode
].operand
[2].mode
;
6487 if (GET_MODE (second_reload_reg
) != new_mode
)
6489 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg
),
6491 oldequiv
= old
, real_oldequiv
= real_old
;
6494 = reload_adjust_reg_for_mode (second_reload_reg
,
6501 /* If we still need a secondary reload register, check
6502 to see if it is being used as a scratch or intermediate
6503 register and generate code appropriately. If we need
6504 a scratch register, use REAL_OLDEQUIV since the form of
6505 the insn may depend on the actual address if it is
6508 if (second_reload_reg
)
6510 if (icode
!= CODE_FOR_nothing
)
6512 emit_insn (GEN_FCN (icode
) (reloadreg
, real_oldequiv
,
6513 second_reload_reg
));
6518 /* See if we need a scratch register to load the
6519 intermediate register (a tertiary reload). */
6520 enum insn_code tertiary_icode
6521 = rld
[secondary_reload
].secondary_in_icode
;
6523 if (tertiary_icode
!= CODE_FOR_nothing
)
6525 rtx third_reload_reg
6526 = rld
[rld
[secondary_reload
].secondary_in_reload
].reg_rtx
;
6528 emit_insn ((GEN_FCN (tertiary_icode
)
6529 (second_reload_reg
, real_oldequiv
,
6530 third_reload_reg
)));
6533 gen_reload (second_reload_reg
, real_oldequiv
,
6537 oldequiv
= second_reload_reg
;
6543 if (! special
&& ! rtx_equal_p (reloadreg
, oldequiv
))
6545 rtx real_oldequiv
= oldequiv
;
6547 if ((REG_P (oldequiv
)
6548 && REGNO (oldequiv
) >= FIRST_PSEUDO_REGISTER
6549 && (reg_equiv_memory_loc
[REGNO (oldequiv
)] != 0
6550 || reg_equiv_constant
[REGNO (oldequiv
)] != 0))
6551 || (GET_CODE (oldequiv
) == SUBREG
6552 && REG_P (SUBREG_REG (oldequiv
))
6553 && (REGNO (SUBREG_REG (oldequiv
))
6554 >= FIRST_PSEUDO_REGISTER
)
6555 && ((reg_equiv_memory_loc
6556 [REGNO (SUBREG_REG (oldequiv
))] != 0)
6557 || (reg_equiv_constant
6558 [REGNO (SUBREG_REG (oldequiv
))] != 0)))
6559 || (CONSTANT_P (oldequiv
)
6560 && (PREFERRED_RELOAD_CLASS (oldequiv
,
6561 REGNO_REG_CLASS (REGNO (reloadreg
)))
6563 real_oldequiv
= rl
->in
;
6564 gen_reload (reloadreg
, real_oldequiv
, rl
->opnum
,
6568 if (flag_non_call_exceptions
)
6569 copy_eh_notes (insn
, get_insns ());
6571 /* End this sequence. */
6572 *where
= get_insns ();
6575 /* Update reload_override_in so that delete_address_reloads_1
6576 can see the actual register usage. */
6578 reload_override_in
[j
] = oldequiv
;
6581 /* Generate insns to for the output reload RL, which is for the insn described
6582 by CHAIN and has the number J. */
6584 emit_output_reload_insns (struct insn_chain
*chain
, struct reload
*rl
,
6587 rtx reloadreg
= rl
->reg_rtx
;
6588 rtx insn
= chain
->insn
;
6591 enum machine_mode mode
= GET_MODE (old
);
6594 if (rl
->when_needed
== RELOAD_OTHER
)
6597 push_to_sequence (output_reload_insns
[rl
->opnum
]);
6599 /* Determine the mode to reload in.
6600 See comments above (for input reloading). */
6602 if (mode
== VOIDmode
)
6604 /* VOIDmode should never happen for an output. */
6605 if (asm_noperands (PATTERN (insn
)) < 0)
6606 /* It's the compiler's fault. */
6607 fatal_insn ("VOIDmode on an output", insn
);
6608 error_for_asm (insn
, "output operand is constant in %<asm%>");
6609 /* Prevent crash--use something we know is valid. */
6611 old
= gen_rtx_REG (mode
, REGNO (reloadreg
));
6614 if (GET_MODE (reloadreg
) != mode
)
6615 reloadreg
= reload_adjust_reg_for_mode (reloadreg
, mode
);
6617 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6619 /* If we need two reload regs, set RELOADREG to the intermediate
6620 one, since it will be stored into OLD. We might need a secondary
6621 register only for an input reload, so check again here. */
6623 if (rl
->secondary_out_reload
>= 0)
6627 if (REG_P (old
) && REGNO (old
) >= FIRST_PSEUDO_REGISTER
6628 && reg_equiv_mem
[REGNO (old
)] != 0)
6629 real_old
= reg_equiv_mem
[REGNO (old
)];
6631 if ((SECONDARY_OUTPUT_RELOAD_CLASS (rl
->class,
6635 rtx second_reloadreg
= reloadreg
;
6636 reloadreg
= rld
[rl
->secondary_out_reload
].reg_rtx
;
6638 /* See if RELOADREG is to be used as a scratch register
6639 or as an intermediate register. */
6640 if (rl
->secondary_out_icode
!= CODE_FOR_nothing
)
6642 emit_insn ((GEN_FCN (rl
->secondary_out_icode
)
6643 (real_old
, second_reloadreg
, reloadreg
)));
6648 /* See if we need both a scratch and intermediate reload
6651 int secondary_reload
= rl
->secondary_out_reload
;
6652 enum insn_code tertiary_icode
6653 = rld
[secondary_reload
].secondary_out_icode
;
6655 if (GET_MODE (reloadreg
) != mode
)
6656 reloadreg
= reload_adjust_reg_for_mode (reloadreg
, mode
);
6658 if (tertiary_icode
!= CODE_FOR_nothing
)
6661 = rld
[rld
[secondary_reload
].secondary_out_reload
].reg_rtx
;
6664 /* Copy primary reload reg to secondary reload reg.
6665 (Note that these have been swapped above, then
6666 secondary reload reg to OLD using our insn.) */
6668 /* If REAL_OLD is a paradoxical SUBREG, remove it
6669 and try to put the opposite SUBREG on
6671 if (GET_CODE (real_old
) == SUBREG
6672 && (GET_MODE_SIZE (GET_MODE (real_old
))
6673 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old
))))
6674 && 0 != (tem
= gen_lowpart_common
6675 (GET_MODE (SUBREG_REG (real_old
)),
6677 real_old
= SUBREG_REG (real_old
), reloadreg
= tem
;
6679 gen_reload (reloadreg
, second_reloadreg
,
6680 rl
->opnum
, rl
->when_needed
);
6681 emit_insn ((GEN_FCN (tertiary_icode
)
6682 (real_old
, reloadreg
, third_reloadreg
)));
6687 /* Copy between the reload regs here and then to
6690 gen_reload (reloadreg
, second_reloadreg
,
6691 rl
->opnum
, rl
->when_needed
);
6697 /* Output the last reload insn. */
6702 /* Don't output the last reload if OLD is not the dest of
6703 INSN and is in the src and is clobbered by INSN. */
6704 if (! flag_expensive_optimizations
6706 || !(set
= single_set (insn
))
6707 || rtx_equal_p (old
, SET_DEST (set
))
6708 || !reg_mentioned_p (old
, SET_SRC (set
))
6709 || !regno_clobbered_p (REGNO (old
), insn
, rl
->mode
, 0))
6710 gen_reload (old
, reloadreg
, rl
->opnum
,
6714 /* Look at all insns we emitted, just to be safe. */
6715 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
6718 rtx pat
= PATTERN (p
);
6720 /* If this output reload doesn't come from a spill reg,
6721 clear any memory of reloaded copies of the pseudo reg.
6722 If this output reload comes from a spill reg,
6723 reg_has_output_reload will make this do nothing. */
6724 note_stores (pat
, forget_old_reloads_1
, NULL
);
6726 if (reg_mentioned_p (rl
->reg_rtx
, pat
))
6728 rtx set
= single_set (insn
);
6729 if (reload_spill_index
[j
] < 0
6731 && SET_SRC (set
) == rl
->reg_rtx
)
6733 int src
= REGNO (SET_SRC (set
));
6735 reload_spill_index
[j
] = src
;
6736 SET_HARD_REG_BIT (reg_is_output_reload
, src
);
6737 if (find_regno_note (insn
, REG_DEAD
, src
))
6738 SET_HARD_REG_BIT (reg_reloaded_died
, src
);
6740 if (REGNO (rl
->reg_rtx
) < FIRST_PSEUDO_REGISTER
)
6742 int s
= rl
->secondary_out_reload
;
6743 set
= single_set (p
);
6744 /* If this reload copies only to the secondary reload
6745 register, the secondary reload does the actual
6747 if (s
>= 0 && set
== NULL_RTX
)
6748 /* We can't tell what function the secondary reload
6749 has and where the actual store to the pseudo is
6750 made; leave new_spill_reg_store alone. */
6753 && SET_SRC (set
) == rl
->reg_rtx
6754 && SET_DEST (set
) == rld
[s
].reg_rtx
)
6756 /* Usually the next instruction will be the
6757 secondary reload insn; if we can confirm
6758 that it is, setting new_spill_reg_store to
6759 that insn will allow an extra optimization. */
6760 rtx s_reg
= rld
[s
].reg_rtx
;
6761 rtx next
= NEXT_INSN (p
);
6762 rld
[s
].out
= rl
->out
;
6763 rld
[s
].out_reg
= rl
->out_reg
;
6764 set
= single_set (next
);
6765 if (set
&& SET_SRC (set
) == s_reg
6766 && ! new_spill_reg_store
[REGNO (s_reg
)])
6768 SET_HARD_REG_BIT (reg_is_output_reload
,
6770 new_spill_reg_store
[REGNO (s_reg
)] = next
;
6774 new_spill_reg_store
[REGNO (rl
->reg_rtx
)] = p
;
6779 if (rl
->when_needed
== RELOAD_OTHER
)
6781 emit_insn (other_output_reload_insns
[rl
->opnum
]);
6782 other_output_reload_insns
[rl
->opnum
] = get_insns ();
6785 output_reload_insns
[rl
->opnum
] = get_insns ();
6787 if (flag_non_call_exceptions
)
6788 copy_eh_notes (insn
, get_insns ());
6793 /* Do input reloading for reload RL, which is for the insn described by CHAIN
6794 and has the number J. */
6796 do_input_reload (struct insn_chain
*chain
, struct reload
*rl
, int j
)
6798 rtx insn
= chain
->insn
;
6799 rtx old
= (rl
->in
&& MEM_P (rl
->in
)
6800 ? rl
->in_reg
: rl
->in
);
6803 /* AUTO_INC reloads need to be handled even if inherited. We got an
6804 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
6805 && (! reload_inherited
[j
] || (rl
->out
&& ! rl
->out_reg
))
6806 && ! rtx_equal_p (rl
->reg_rtx
, old
)
6807 && rl
->reg_rtx
!= 0)
6808 emit_input_reload_insns (chain
, rld
+ j
, old
, j
);
6810 /* When inheriting a wider reload, we have a MEM in rl->in,
6811 e.g. inheriting a SImode output reload for
6812 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
6813 if (optimize
&& reload_inherited
[j
] && rl
->in
6815 && MEM_P (rl
->in_reg
)
6816 && reload_spill_index
[j
] >= 0
6817 && TEST_HARD_REG_BIT (reg_reloaded_valid
, reload_spill_index
[j
]))
6818 rl
->in
= regno_reg_rtx
[reg_reloaded_contents
[reload_spill_index
[j
]]];
6820 /* If we are reloading a register that was recently stored in with an
6821 output-reload, see if we can prove there was
6822 actually no need to store the old value in it. */
6825 && (reload_inherited
[j
] || reload_override_in
[j
])
6827 && REG_P (rl
->reg_rtx
)
6828 && spill_reg_store
[REGNO (rl
->reg_rtx
)] != 0
6830 /* There doesn't seem to be any reason to restrict this to pseudos
6831 and doing so loses in the case where we are copying from a
6832 register of the wrong class. */
6833 && (REGNO (spill_reg_stored_to
[REGNO (rl
->reg_rtx
)])
6834 >= FIRST_PSEUDO_REGISTER
)
6836 /* The insn might have already some references to stackslots
6837 replaced by MEMs, while reload_out_reg still names the
6839 && (dead_or_set_p (insn
,
6840 spill_reg_stored_to
[REGNO (rl
->reg_rtx
)])
6841 || rtx_equal_p (spill_reg_stored_to
[REGNO (rl
->reg_rtx
)],
6843 delete_output_reload (insn
, j
, REGNO (rl
->reg_rtx
));
6846 /* Do output reloading for reload RL, which is for the insn described by
6847 CHAIN and has the number J.
6848 ??? At some point we need to support handling output reloads of
6849 JUMP_INSNs or insns that set cc0. */
6851 do_output_reload (struct insn_chain
*chain
, struct reload
*rl
, int j
)
6854 rtx insn
= chain
->insn
;
6855 /* If this is an output reload that stores something that is
6856 not loaded in this same reload, see if we can eliminate a previous
6858 rtx pseudo
= rl
->out_reg
;
6863 && ! rtx_equal_p (rl
->in_reg
, pseudo
)
6864 && REGNO (pseudo
) >= FIRST_PSEUDO_REGISTER
6865 && reg_last_reload_reg
[REGNO (pseudo
)])
6867 int pseudo_no
= REGNO (pseudo
);
6868 int last_regno
= REGNO (reg_last_reload_reg
[pseudo_no
]);
6870 /* We don't need to test full validity of last_regno for
6871 inherit here; we only want to know if the store actually
6872 matches the pseudo. */
6873 if (TEST_HARD_REG_BIT (reg_reloaded_valid
, last_regno
)
6874 && reg_reloaded_contents
[last_regno
] == pseudo_no
6875 && spill_reg_store
[last_regno
]
6876 && rtx_equal_p (pseudo
, spill_reg_stored_to
[last_regno
]))
6877 delete_output_reload (insn
, j
, last_regno
);
6882 || rl
->reg_rtx
== old
6883 || rl
->reg_rtx
== 0)
6886 /* An output operand that dies right away does need a reload,
6887 but need not be copied from it. Show the new location in the
6889 if ((REG_P (old
) || GET_CODE (old
) == SCRATCH
)
6890 && (note
= find_reg_note (insn
, REG_UNUSED
, old
)) != 0)
6892 XEXP (note
, 0) = rl
->reg_rtx
;
6895 /* Likewise for a SUBREG of an operand that dies. */
6896 else if (GET_CODE (old
) == SUBREG
6897 && REG_P (SUBREG_REG (old
))
6898 && 0 != (note
= find_reg_note (insn
, REG_UNUSED
,
6901 XEXP (note
, 0) = gen_lowpart_common (GET_MODE (old
),
6905 else if (GET_CODE (old
) == SCRATCH
)
6906 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6907 but we don't want to make an output reload. */
6910 /* If is a JUMP_INSN, we can't support output reloads yet. */
6911 gcc_assert (!JUMP_P (insn
));
6913 emit_output_reload_insns (chain
, rld
+ j
, j
);
6916 /* Reload number R reloads from or to a group of hard registers starting at
6917 register REGNO. Return true if it can be treated for inheritance purposes
6918 like a group of reloads, each one reloading a single hard register.
6919 The caller has already checked that the spill register and REGNO use
6920 the same number of registers to store the reload value. */
6923 inherit_piecemeal_p (int r ATTRIBUTE_UNUSED
, int regno ATTRIBUTE_UNUSED
)
6925 #ifdef CANNOT_CHANGE_MODE_CLASS
6926 return (!REG_CANNOT_CHANGE_MODE_P (reload_spill_index
[r
],
6927 GET_MODE (rld
[r
].reg_rtx
),
6928 reg_raw_mode
[reload_spill_index
[r
]])
6929 && !REG_CANNOT_CHANGE_MODE_P (regno
,
6930 GET_MODE (rld
[r
].reg_rtx
),
6931 reg_raw_mode
[regno
]));
6937 /* Output insns to reload values in and out of the chosen reload regs. */
6940 emit_reload_insns (struct insn_chain
*chain
)
6942 rtx insn
= chain
->insn
;
6946 CLEAR_HARD_REG_SET (reg_reloaded_died
);
6948 for (j
= 0; j
< reload_n_operands
; j
++)
6949 input_reload_insns
[j
] = input_address_reload_insns
[j
]
6950 = inpaddr_address_reload_insns
[j
]
6951 = output_reload_insns
[j
] = output_address_reload_insns
[j
]
6952 = outaddr_address_reload_insns
[j
]
6953 = other_output_reload_insns
[j
] = 0;
6954 other_input_address_reload_insns
= 0;
6955 other_input_reload_insns
= 0;
6956 operand_reload_insns
= 0;
6957 other_operand_reload_insns
= 0;
6959 /* Dump reloads into the dump file. */
6962 fprintf (dump_file
, "\nReloads for insn # %d\n", INSN_UID (insn
));
6963 debug_reload_to_stream (dump_file
);
6966 /* Now output the instructions to copy the data into and out of the
6967 reload registers. Do these in the order that the reloads were reported,
6968 since reloads of base and index registers precede reloads of operands
6969 and the operands may need the base and index registers reloaded. */
6971 for (j
= 0; j
< n_reloads
; j
++)
6974 && REGNO (rld
[j
].reg_rtx
) < FIRST_PSEUDO_REGISTER
)
6975 new_spill_reg_store
[REGNO (rld
[j
].reg_rtx
)] = 0;
6977 do_input_reload (chain
, rld
+ j
, j
);
6978 do_output_reload (chain
, rld
+ j
, j
);
6981 /* Now write all the insns we made for reloads in the order expected by
6982 the allocation functions. Prior to the insn being reloaded, we write
6983 the following reloads:
6985 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6987 RELOAD_OTHER reloads.
6989 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
6990 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
6991 RELOAD_FOR_INPUT reload for the operand.
6993 RELOAD_FOR_OPADDR_ADDRS reloads.
6995 RELOAD_FOR_OPERAND_ADDRESS reloads.
6997 After the insn being reloaded, we write the following:
6999 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7000 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7001 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7002 reloads for the operand. The RELOAD_OTHER output reloads are
7003 output in descending order by reload number. */
7005 emit_insn_before (other_input_address_reload_insns
, insn
);
7006 emit_insn_before (other_input_reload_insns
, insn
);
7008 for (j
= 0; j
< reload_n_operands
; j
++)
7010 emit_insn_before (inpaddr_address_reload_insns
[j
], insn
);
7011 emit_insn_before (input_address_reload_insns
[j
], insn
);
7012 emit_insn_before (input_reload_insns
[j
], insn
);
7015 emit_insn_before (other_operand_reload_insns
, insn
);
7016 emit_insn_before (operand_reload_insns
, insn
);
7018 for (j
= 0; j
< reload_n_operands
; j
++)
7020 rtx x
= emit_insn_after (outaddr_address_reload_insns
[j
], insn
);
7021 x
= emit_insn_after (output_address_reload_insns
[j
], x
);
7022 x
= emit_insn_after (output_reload_insns
[j
], x
);
7023 emit_insn_after (other_output_reload_insns
[j
], x
);
7026 /* For all the spill regs newly reloaded in this instruction,
7027 record what they were reloaded from, so subsequent instructions
7028 can inherit the reloads.
7030 Update spill_reg_store for the reloads of this insn.
7031 Copy the elements that were updated in the loop above. */
7033 for (j
= 0; j
< n_reloads
; j
++)
7035 int r
= reload_order
[j
];
7036 int i
= reload_spill_index
[r
];
7038 /* If this is a non-inherited input reload from a pseudo, we must
7039 clear any memory of a previous store to the same pseudo. Only do
7040 something if there will not be an output reload for the pseudo
7042 if (rld
[r
].in_reg
!= 0
7043 && ! (reload_inherited
[r
] || reload_override_in
[r
]))
7045 rtx reg
= rld
[r
].in_reg
;
7047 if (GET_CODE (reg
) == SUBREG
)
7048 reg
= SUBREG_REG (reg
);
7051 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
7052 && ! reg_has_output_reload
[REGNO (reg
)])
7054 int nregno
= REGNO (reg
);
7056 if (reg_last_reload_reg
[nregno
])
7058 int last_regno
= REGNO (reg_last_reload_reg
[nregno
]);
7060 if (reg_reloaded_contents
[last_regno
] == nregno
)
7061 spill_reg_store
[last_regno
] = 0;
7066 /* I is nonneg if this reload used a register.
7067 If rld[r].reg_rtx is 0, this is an optional reload
7068 that we opted to ignore. */
7070 if (i
>= 0 && rld
[r
].reg_rtx
!= 0)
7072 int nr
= hard_regno_nregs
[i
][GET_MODE (rld
[r
].reg_rtx
)];
7074 int part_reaches_end
= 0;
7075 int all_reaches_end
= 1;
7077 /* For a multi register reload, we need to check if all or part
7078 of the value lives to the end. */
7079 for (k
= 0; k
< nr
; k
++)
7081 if (reload_reg_reaches_end_p (i
+ k
, rld
[r
].opnum
,
7082 rld
[r
].when_needed
))
7083 part_reaches_end
= 1;
7085 all_reaches_end
= 0;
7088 /* Ignore reloads that don't reach the end of the insn in
7090 if (all_reaches_end
)
7092 /* First, clear out memory of what used to be in this spill reg.
7093 If consecutive registers are used, clear them all. */
7095 for (k
= 0; k
< nr
; k
++)
7097 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
);
7098 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered
, i
+ k
);
7101 /* Maybe the spill reg contains a copy of reload_out. */
7103 && (REG_P (rld
[r
].out
)
7107 || REG_P (rld
[r
].out_reg
)))
7109 rtx out
= (REG_P (rld
[r
].out
)
7113 /* AUTO_INC */ : XEXP (rld
[r
].in_reg
, 0));
7114 int nregno
= REGNO (out
);
7115 int nnr
= (nregno
>= FIRST_PSEUDO_REGISTER
? 1
7116 : hard_regno_nregs
[nregno
]
7117 [GET_MODE (rld
[r
].reg_rtx
)]);
7120 spill_reg_store
[i
] = new_spill_reg_store
[i
];
7121 spill_reg_stored_to
[i
] = out
;
7122 reg_last_reload_reg
[nregno
] = rld
[r
].reg_rtx
;
7124 piecemeal
= (nregno
< FIRST_PSEUDO_REGISTER
7126 && inherit_piecemeal_p (r
, nregno
));
7128 /* If NREGNO is a hard register, it may occupy more than
7129 one register. If it does, say what is in the
7130 rest of the registers assuming that both registers
7131 agree on how many words the object takes. If not,
7132 invalidate the subsequent registers. */
7134 if (nregno
< FIRST_PSEUDO_REGISTER
)
7135 for (k
= 1; k
< nnr
; k
++)
7136 reg_last_reload_reg
[nregno
+ k
]
7138 ? regno_reg_rtx
[REGNO (rld
[r
].reg_rtx
) + k
]
7141 /* Now do the inverse operation. */
7142 for (k
= 0; k
< nr
; k
++)
7144 CLEAR_HARD_REG_BIT (reg_reloaded_dead
, i
+ k
);
7145 reg_reloaded_contents
[i
+ k
]
7146 = (nregno
>= FIRST_PSEUDO_REGISTER
|| !piecemeal
7149 reg_reloaded_insn
[i
+ k
] = insn
;
7150 SET_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
);
7151 if (HARD_REGNO_CALL_PART_CLOBBERED (i
+ k
, GET_MODE (out
)))
7152 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered
, i
+ k
);
7156 /* Maybe the spill reg contains a copy of reload_in. Only do
7157 something if there will not be an output reload for
7158 the register being reloaded. */
7159 else if (rld
[r
].out_reg
== 0
7161 && ((REG_P (rld
[r
].in
)
7162 && REGNO (rld
[r
].in
) >= FIRST_PSEUDO_REGISTER
7163 && ! reg_has_output_reload
[REGNO (rld
[r
].in
)])
7164 || (REG_P (rld
[r
].in_reg
)
7165 && ! reg_has_output_reload
[REGNO (rld
[r
].in_reg
)]))
7166 && ! reg_set_p (rld
[r
].reg_rtx
, PATTERN (insn
)))
7173 if (REG_P (rld
[r
].in
)
7174 && REGNO (rld
[r
].in
) >= FIRST_PSEUDO_REGISTER
)
7176 else if (REG_P (rld
[r
].in_reg
))
7179 in
= XEXP (rld
[r
].in_reg
, 0);
7180 nregno
= REGNO (in
);
7182 nnr
= (nregno
>= FIRST_PSEUDO_REGISTER
? 1
7183 : hard_regno_nregs
[nregno
]
7184 [GET_MODE (rld
[r
].reg_rtx
)]);
7186 reg_last_reload_reg
[nregno
] = rld
[r
].reg_rtx
;
7188 piecemeal
= (nregno
< FIRST_PSEUDO_REGISTER
7190 && inherit_piecemeal_p (r
, nregno
));
7192 if (nregno
< FIRST_PSEUDO_REGISTER
)
7193 for (k
= 1; k
< nnr
; k
++)
7194 reg_last_reload_reg
[nregno
+ k
]
7196 ? regno_reg_rtx
[REGNO (rld
[r
].reg_rtx
) + k
]
7199 /* Unless we inherited this reload, show we haven't
7200 recently done a store.
7201 Previous stores of inherited auto_inc expressions
7202 also have to be discarded. */
7203 if (! reload_inherited
[r
]
7204 || (rld
[r
].out
&& ! rld
[r
].out_reg
))
7205 spill_reg_store
[i
] = 0;
7207 for (k
= 0; k
< nr
; k
++)
7209 CLEAR_HARD_REG_BIT (reg_reloaded_dead
, i
+ k
);
7210 reg_reloaded_contents
[i
+ k
]
7211 = (nregno
>= FIRST_PSEUDO_REGISTER
|| !piecemeal
7214 reg_reloaded_insn
[i
+ k
] = insn
;
7215 SET_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
);
7216 if (HARD_REGNO_CALL_PART_CLOBBERED (i
+ k
, GET_MODE (in
)))
7217 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered
, i
+ k
);
7222 /* However, if part of the reload reaches the end, then we must
7223 invalidate the old info for the part that survives to the end. */
7224 else if (part_reaches_end
)
7226 for (k
= 0; k
< nr
; k
++)
7227 if (reload_reg_reaches_end_p (i
+ k
,
7229 rld
[r
].when_needed
))
7230 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
);
7234 /* The following if-statement was #if 0'd in 1.34 (or before...).
7235 It's reenabled in 1.35 because supposedly nothing else
7236 deals with this problem. */
7238 /* If a register gets output-reloaded from a non-spill register,
7239 that invalidates any previous reloaded copy of it.
7240 But forget_old_reloads_1 won't get to see it, because
7241 it thinks only about the original insn. So invalidate it here. */
7242 if (i
< 0 && rld
[r
].out
!= 0
7243 && (REG_P (rld
[r
].out
)
7244 || (MEM_P (rld
[r
].out
)
7245 && REG_P (rld
[r
].out_reg
))))
7247 rtx out
= (REG_P (rld
[r
].out
)
7248 ? rld
[r
].out
: rld
[r
].out_reg
);
7249 int nregno
= REGNO (out
);
7250 if (nregno
>= FIRST_PSEUDO_REGISTER
)
7252 rtx src_reg
, store_insn
= NULL_RTX
;
7254 reg_last_reload_reg
[nregno
] = 0;
7256 /* If we can find a hard register that is stored, record
7257 the storing insn so that we may delete this insn with
7258 delete_output_reload. */
7259 src_reg
= rld
[r
].reg_rtx
;
7261 /* If this is an optional reload, try to find the source reg
7262 from an input reload. */
7265 rtx set
= single_set (insn
);
7266 if (set
&& SET_DEST (set
) == rld
[r
].out
)
7270 src_reg
= SET_SRC (set
);
7272 for (k
= 0; k
< n_reloads
; k
++)
7274 if (rld
[k
].in
== src_reg
)
7276 src_reg
= rld
[k
].reg_rtx
;
7283 store_insn
= new_spill_reg_store
[REGNO (src_reg
)];
7284 if (src_reg
&& REG_P (src_reg
)
7285 && REGNO (src_reg
) < FIRST_PSEUDO_REGISTER
)
7287 int src_regno
= REGNO (src_reg
);
7288 int nr
= hard_regno_nregs
[src_regno
][rld
[r
].mode
];
7289 /* The place where to find a death note varies with
7290 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
7291 necessarily checked exactly in the code that moves
7292 notes, so just check both locations. */
7293 rtx note
= find_regno_note (insn
, REG_DEAD
, src_regno
);
7294 if (! note
&& store_insn
)
7295 note
= find_regno_note (store_insn
, REG_DEAD
, src_regno
);
7298 spill_reg_store
[src_regno
+ nr
] = store_insn
;
7299 spill_reg_stored_to
[src_regno
+ nr
] = out
;
7300 reg_reloaded_contents
[src_regno
+ nr
] = nregno
;
7301 reg_reloaded_insn
[src_regno
+ nr
] = store_insn
;
7302 CLEAR_HARD_REG_BIT (reg_reloaded_dead
, src_regno
+ nr
);
7303 SET_HARD_REG_BIT (reg_reloaded_valid
, src_regno
+ nr
);
7304 if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno
+ nr
,
7305 GET_MODE (src_reg
)))
7306 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered
,
7308 SET_HARD_REG_BIT (reg_is_output_reload
, src_regno
+ nr
);
7310 SET_HARD_REG_BIT (reg_reloaded_died
, src_regno
);
7312 CLEAR_HARD_REG_BIT (reg_reloaded_died
, src_regno
);
7314 reg_last_reload_reg
[nregno
] = src_reg
;
7315 /* We have to set reg_has_output_reload here, or else
7316 forget_old_reloads_1 will clear reg_last_reload_reg
7318 reg_has_output_reload
[nregno
] = 1;
7323 int num_regs
= hard_regno_nregs
[nregno
][GET_MODE (rld
[r
].out
)];
7325 while (num_regs
-- > 0)
7326 reg_last_reload_reg
[nregno
+ num_regs
] = 0;
7330 IOR_HARD_REG_SET (reg_reloaded_dead
, reg_reloaded_died
);
7333 /* Emit code to perform a reload from IN (which may be a reload register) to
7334 OUT (which may also be a reload register). IN or OUT is from operand
7335 OPNUM with reload type TYPE.
7337 Returns first insn emitted. */
7340 gen_reload (rtx out
, rtx in
, int opnum
, enum reload_type type
)
7342 rtx last
= get_last_insn ();
7345 /* If IN is a paradoxical SUBREG, remove it and try to put the
7346 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7347 if (GET_CODE (in
) == SUBREG
7348 && (GET_MODE_SIZE (GET_MODE (in
))
7349 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
))))
7350 && (tem
= gen_lowpart_common (GET_MODE (SUBREG_REG (in
)), out
)) != 0)
7351 in
= SUBREG_REG (in
), out
= tem
;
7352 else if (GET_CODE (out
) == SUBREG
7353 && (GET_MODE_SIZE (GET_MODE (out
))
7354 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out
))))
7355 && (tem
= gen_lowpart_common (GET_MODE (SUBREG_REG (out
)), in
)) != 0)
7356 out
= SUBREG_REG (out
), in
= tem
;
7358 /* How to do this reload can get quite tricky. Normally, we are being
7359 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7360 register that didn't get a hard register. In that case we can just
7361 call emit_move_insn.
7363 We can also be asked to reload a PLUS that adds a register or a MEM to
7364 another register, constant or MEM. This can occur during frame pointer
7365 elimination and while reloading addresses. This case is handled by
7366 trying to emit a single insn to perform the add. If it is not valid,
7367 we use a two insn sequence.
7369 Finally, we could be called to handle an 'o' constraint by putting
7370 an address into a register. In that case, we first try to do this
7371 with a named pattern of "reload_load_address". If no such pattern
7372 exists, we just emit a SET insn and hope for the best (it will normally
7373 be valid on machines that use 'o').
7375 This entire process is made complex because reload will never
7376 process the insns we generate here and so we must ensure that
7377 they will fit their constraints and also by the fact that parts of
7378 IN might be being reloaded separately and replaced with spill registers.
7379 Because of this, we are, in some sense, just guessing the right approach
7380 here. The one listed above seems to work.
7382 ??? At some point, this whole thing needs to be rethought. */
7384 if (GET_CODE (in
) == PLUS
7385 && (REG_P (XEXP (in
, 0))
7386 || GET_CODE (XEXP (in
, 0)) == SUBREG
7387 || MEM_P (XEXP (in
, 0)))
7388 && (REG_P (XEXP (in
, 1))
7389 || GET_CODE (XEXP (in
, 1)) == SUBREG
7390 || CONSTANT_P (XEXP (in
, 1))
7391 || MEM_P (XEXP (in
, 1))))
7393 /* We need to compute the sum of a register or a MEM and another
7394 register, constant, or MEM, and put it into the reload
7395 register. The best possible way of doing this is if the machine
7396 has a three-operand ADD insn that accepts the required operands.
7398 The simplest approach is to try to generate such an insn and see if it
7399 is recognized and matches its constraints. If so, it can be used.
7401 It might be better not to actually emit the insn unless it is valid,
7402 but we need to pass the insn as an operand to `recog' and
7403 `extract_insn' and it is simpler to emit and then delete the insn if
7404 not valid than to dummy things up. */
7406 rtx op0
, op1
, tem
, insn
;
7409 op0
= find_replacement (&XEXP (in
, 0));
7410 op1
= find_replacement (&XEXP (in
, 1));
7412 /* Since constraint checking is strict, commutativity won't be
7413 checked, so we need to do that here to avoid spurious failure
7414 if the add instruction is two-address and the second operand
7415 of the add is the same as the reload reg, which is frequently
7416 the case. If the insn would be A = B + A, rearrange it so
7417 it will be A = A + B as constrain_operands expects. */
7419 if (REG_P (XEXP (in
, 1))
7420 && REGNO (out
) == REGNO (XEXP (in
, 1)))
7421 tem
= op0
, op0
= op1
, op1
= tem
;
7423 if (op0
!= XEXP (in
, 0) || op1
!= XEXP (in
, 1))
7424 in
= gen_rtx_PLUS (GET_MODE (in
), op0
, op1
);
7426 insn
= emit_insn (gen_rtx_SET (VOIDmode
, out
, in
));
7427 code
= recog_memoized (insn
);
7431 extract_insn (insn
);
7432 /* We want constrain operands to treat this insn strictly in
7433 its validity determination, i.e., the way it would after reload
7435 if (constrain_operands (1))
7439 delete_insns_since (last
);
7441 /* If that failed, we must use a conservative two-insn sequence.
7443 Use a move to copy one operand into the reload register. Prefer
7444 to reload a constant, MEM or pseudo since the move patterns can
7445 handle an arbitrary operand. If OP1 is not a constant, MEM or
7446 pseudo and OP1 is not a valid operand for an add instruction, then
7449 After reloading one of the operands into the reload register, add
7450 the reload register to the output register.
7452 If there is another way to do this for a specific machine, a
7453 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7456 code
= (int) add_optab
->handlers
[(int) GET_MODE (out
)].insn_code
;
7458 if (CONSTANT_P (op1
) || MEM_P (op1
) || GET_CODE (op1
) == SUBREG
7460 && REGNO (op1
) >= FIRST_PSEUDO_REGISTER
)
7461 || (code
!= CODE_FOR_nothing
7462 && ! ((*insn_data
[code
].operand
[2].predicate
)
7463 (op1
, insn_data
[code
].operand
[2].mode
))))
7464 tem
= op0
, op0
= op1
, op1
= tem
;
7466 gen_reload (out
, op0
, opnum
, type
);
7468 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7469 This fixes a problem on the 32K where the stack pointer cannot
7470 be used as an operand of an add insn. */
7472 if (rtx_equal_p (op0
, op1
))
7475 insn
= emit_insn (gen_add2_insn (out
, op1
));
7477 /* If that failed, copy the address register to the reload register.
7478 Then add the constant to the reload register. */
7480 code
= recog_memoized (insn
);
7484 extract_insn (insn
);
7485 /* We want constrain operands to treat this insn strictly in
7486 its validity determination, i.e., the way it would after reload
7488 if (constrain_operands (1))
7490 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
7492 = gen_rtx_EXPR_LIST (REG_EQUIV
, in
, REG_NOTES (insn
));
7497 delete_insns_since (last
);
7499 gen_reload (out
, op1
, opnum
, type
);
7500 insn
= emit_insn (gen_add2_insn (out
, op0
));
7501 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_EQUIV
, in
, REG_NOTES (insn
));
7504 #ifdef SECONDARY_MEMORY_NEEDED
7505 /* If we need a memory location to do the move, do it that way. */
7506 else if ((REG_P (in
) || GET_CODE (in
) == SUBREG
)
7507 && reg_or_subregno (in
) < FIRST_PSEUDO_REGISTER
7508 && (REG_P (out
) || GET_CODE (out
) == SUBREG
)
7509 && reg_or_subregno (out
) < FIRST_PSEUDO_REGISTER
7510 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in
)),
7511 REGNO_REG_CLASS (reg_or_subregno (out
)),
7514 /* Get the memory to use and rewrite both registers to its mode. */
7515 rtx loc
= get_secondary_mem (in
, GET_MODE (out
), opnum
, type
);
7517 if (GET_MODE (loc
) != GET_MODE (out
))
7518 out
= gen_rtx_REG (GET_MODE (loc
), REGNO (out
));
7520 if (GET_MODE (loc
) != GET_MODE (in
))
7521 in
= gen_rtx_REG (GET_MODE (loc
), REGNO (in
));
7523 gen_reload (loc
, in
, opnum
, type
);
7524 gen_reload (out
, loc
, opnum
, type
);
7528 /* If IN is a simple operand, use gen_move_insn. */
7529 else if (OBJECT_P (in
) || GET_CODE (in
) == SUBREG
)
7530 emit_insn (gen_move_insn (out
, in
));
7532 #ifdef HAVE_reload_load_address
7533 else if (HAVE_reload_load_address
)
7534 emit_insn (gen_reload_load_address (out
, in
));
7537 /* Otherwise, just write (set OUT IN) and hope for the best. */
7539 emit_insn (gen_rtx_SET (VOIDmode
, out
, in
));
7541 /* Return the first insn emitted.
7542 We can not just return get_last_insn, because there may have
7543 been multiple instructions emitted. Also note that gen_move_insn may
7544 emit more than one insn itself, so we can not assume that there is one
7545 insn emitted per emit_insn_before call. */
7547 return last
? NEXT_INSN (last
) : get_insns ();
7550 /* Delete a previously made output-reload whose result we now believe
7551 is not needed. First we double-check.
7553 INSN is the insn now being processed.
7554 LAST_RELOAD_REG is the hard register number for which we want to delete
7555 the last output reload.
7556 J is the reload-number that originally used REG. The caller has made
7557 certain that reload J doesn't use REG any longer for input. */
7560 delete_output_reload (rtx insn
, int j
, int last_reload_reg
)
7562 rtx output_reload_insn
= spill_reg_store
[last_reload_reg
];
7563 rtx reg
= spill_reg_stored_to
[last_reload_reg
];
7566 int n_inherited
= 0;
7570 /* It is possible that this reload has been only used to set another reload
7571 we eliminated earlier and thus deleted this instruction too. */
7572 if (INSN_DELETED_P (output_reload_insn
))
7575 /* Get the raw pseudo-register referred to. */
7577 while (GET_CODE (reg
) == SUBREG
)
7578 reg
= SUBREG_REG (reg
);
7579 substed
= reg_equiv_memory_loc
[REGNO (reg
)];
7581 /* This is unsafe if the operand occurs more often in the current
7582 insn than it is inherited. */
7583 for (k
= n_reloads
- 1; k
>= 0; k
--)
7585 rtx reg2
= rld
[k
].in
;
7588 if (MEM_P (reg2
) || reload_override_in
[k
])
7589 reg2
= rld
[k
].in_reg
;
7591 if (rld
[k
].out
&& ! rld
[k
].out_reg
)
7592 reg2
= XEXP (rld
[k
].in_reg
, 0);
7594 while (GET_CODE (reg2
) == SUBREG
)
7595 reg2
= SUBREG_REG (reg2
);
7596 if (rtx_equal_p (reg2
, reg
))
7598 if (reload_inherited
[k
] || reload_override_in
[k
] || k
== j
)
7601 reg2
= rld
[k
].out_reg
;
7604 while (GET_CODE (reg2
) == SUBREG
)
7605 reg2
= XEXP (reg2
, 0);
7606 if (rtx_equal_p (reg2
, reg
))
7613 n_occurrences
= count_occurrences (PATTERN (insn
), reg
, 0);
7615 n_occurrences
+= count_occurrences (PATTERN (insn
),
7616 eliminate_regs (substed
, 0,
7618 if (n_occurrences
> n_inherited
)
7621 /* If the pseudo-reg we are reloading is no longer referenced
7622 anywhere between the store into it and here,
7623 and no jumps or labels intervene, then the value can get
7624 here through the reload reg alone.
7625 Otherwise, give up--return. */
7626 for (i1
= NEXT_INSN (output_reload_insn
);
7627 i1
!= insn
; i1
= NEXT_INSN (i1
))
7629 if (LABEL_P (i1
) || JUMP_P (i1
))
7631 if ((NONJUMP_INSN_P (i1
) || CALL_P (i1
))
7632 && reg_mentioned_p (reg
, PATTERN (i1
)))
7634 /* If this is USE in front of INSN, we only have to check that
7635 there are no more references than accounted for by inheritance. */
7636 while (NONJUMP_INSN_P (i1
) && GET_CODE (PATTERN (i1
)) == USE
)
7638 n_occurrences
+= rtx_equal_p (reg
, XEXP (PATTERN (i1
), 0)) != 0;
7639 i1
= NEXT_INSN (i1
);
7641 if (n_occurrences
<= n_inherited
&& i1
== insn
)
7647 /* We will be deleting the insn. Remove the spill reg information. */
7648 for (k
= hard_regno_nregs
[last_reload_reg
][GET_MODE (reg
)]; k
-- > 0; )
7650 spill_reg_store
[last_reload_reg
+ k
] = 0;
7651 spill_reg_stored_to
[last_reload_reg
+ k
] = 0;
7654 /* The caller has already checked that REG dies or is set in INSN.
7655 It has also checked that we are optimizing, and thus some
7656 inaccuracies in the debugging information are acceptable.
7657 So we could just delete output_reload_insn. But in some cases
7658 we can improve the debugging information without sacrificing
7659 optimization - maybe even improving the code: See if the pseudo
7660 reg has been completely replaced with reload regs. If so, delete
7661 the store insn and forget we had a stack slot for the pseudo. */
7662 if (rld
[j
].out
!= rld
[j
].in
7663 && REG_N_DEATHS (REGNO (reg
)) == 1
7664 && REG_N_SETS (REGNO (reg
)) == 1
7665 && REG_BASIC_BLOCK (REGNO (reg
)) >= 0
7666 && find_regno_note (insn
, REG_DEAD
, REGNO (reg
)))
7670 /* We know that it was used only between here and the beginning of
7671 the current basic block. (We also know that the last use before
7672 INSN was the output reload we are thinking of deleting, but never
7673 mind that.) Search that range; see if any ref remains. */
7674 for (i2
= PREV_INSN (insn
); i2
; i2
= PREV_INSN (i2
))
7676 rtx set
= single_set (i2
);
7678 /* Uses which just store in the pseudo don't count,
7679 since if they are the only uses, they are dead. */
7680 if (set
!= 0 && SET_DEST (set
) == reg
)
7685 if ((NONJUMP_INSN_P (i2
) || CALL_P (i2
))
7686 && reg_mentioned_p (reg
, PATTERN (i2
)))
7688 /* Some other ref remains; just delete the output reload we
7690 delete_address_reloads (output_reload_insn
, insn
);
7691 delete_insn (output_reload_insn
);
7696 /* Delete the now-dead stores into this pseudo. Note that this
7697 loop also takes care of deleting output_reload_insn. */
7698 for (i2
= PREV_INSN (insn
); i2
; i2
= PREV_INSN (i2
))
7700 rtx set
= single_set (i2
);
7702 if (set
!= 0 && SET_DEST (set
) == reg
)
7704 delete_address_reloads (i2
, insn
);
7712 /* For the debugging info, say the pseudo lives in this reload reg. */
7713 reg_renumber
[REGNO (reg
)] = REGNO (rld
[j
].reg_rtx
);
7714 alter_reg (REGNO (reg
), -1);
7718 delete_address_reloads (output_reload_insn
, insn
);
7719 delete_insn (output_reload_insn
);
7723 /* We are going to delete DEAD_INSN. Recursively delete loads of
7724 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
7725 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
7727 delete_address_reloads (rtx dead_insn
, rtx current_insn
)
7729 rtx set
= single_set (dead_insn
);
7730 rtx set2
, dst
, prev
, next
;
7733 rtx dst
= SET_DEST (set
);
7735 delete_address_reloads_1 (dead_insn
, XEXP (dst
, 0), current_insn
);
7737 /* If we deleted the store from a reloaded post_{in,de}c expression,
7738 we can delete the matching adds. */
7739 prev
= PREV_INSN (dead_insn
);
7740 next
= NEXT_INSN (dead_insn
);
7741 if (! prev
|| ! next
)
7743 set
= single_set (next
);
7744 set2
= single_set (prev
);
7746 || GET_CODE (SET_SRC (set
)) != PLUS
|| GET_CODE (SET_SRC (set2
)) != PLUS
7747 || GET_CODE (XEXP (SET_SRC (set
), 1)) != CONST_INT
7748 || GET_CODE (XEXP (SET_SRC (set2
), 1)) != CONST_INT
)
7750 dst
= SET_DEST (set
);
7751 if (! rtx_equal_p (dst
, SET_DEST (set2
))
7752 || ! rtx_equal_p (dst
, XEXP (SET_SRC (set
), 0))
7753 || ! rtx_equal_p (dst
, XEXP (SET_SRC (set2
), 0))
7754 || (INTVAL (XEXP (SET_SRC (set
), 1))
7755 != -INTVAL (XEXP (SET_SRC (set2
), 1))))
7757 delete_related_insns (prev
);
7758 delete_related_insns (next
);
7761 /* Subfunction of delete_address_reloads: process registers found in X. */
7763 delete_address_reloads_1 (rtx dead_insn
, rtx x
, rtx current_insn
)
7765 rtx prev
, set
, dst
, i2
;
7767 enum rtx_code code
= GET_CODE (x
);
7771 const char *fmt
= GET_RTX_FORMAT (code
);
7772 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
7775 delete_address_reloads_1 (dead_insn
, XEXP (x
, i
), current_insn
);
7776 else if (fmt
[i
] == 'E')
7778 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
7779 delete_address_reloads_1 (dead_insn
, XVECEXP (x
, i
, j
),
7786 if (spill_reg_order
[REGNO (x
)] < 0)
7789 /* Scan backwards for the insn that sets x. This might be a way back due
7791 for (prev
= PREV_INSN (dead_insn
); prev
; prev
= PREV_INSN (prev
))
7793 code
= GET_CODE (prev
);
7794 if (code
== CODE_LABEL
|| code
== JUMP_INSN
)
7798 if (reg_set_p (x
, PATTERN (prev
)))
7800 if (reg_referenced_p (x
, PATTERN (prev
)))
7803 if (! prev
|| INSN_UID (prev
) < reload_first_uid
)
7805 /* Check that PREV only sets the reload register. */
7806 set
= single_set (prev
);
7809 dst
= SET_DEST (set
);
7811 || ! rtx_equal_p (dst
, x
))
7813 if (! reg_set_p (dst
, PATTERN (dead_insn
)))
7815 /* Check if DST was used in a later insn -
7816 it might have been inherited. */
7817 for (i2
= NEXT_INSN (dead_insn
); i2
; i2
= NEXT_INSN (i2
))
7823 if (reg_referenced_p (dst
, PATTERN (i2
)))
7825 /* If there is a reference to the register in the current insn,
7826 it might be loaded in a non-inherited reload. If no other
7827 reload uses it, that means the register is set before
7829 if (i2
== current_insn
)
7831 for (j
= n_reloads
- 1; j
>= 0; j
--)
7832 if ((rld
[j
].reg_rtx
== dst
&& reload_inherited
[j
])
7833 || reload_override_in
[j
] == dst
)
7835 for (j
= n_reloads
- 1; j
>= 0; j
--)
7836 if (rld
[j
].in
&& rld
[j
].reg_rtx
== dst
)
7845 /* If DST is still live at CURRENT_INSN, check if it is used for
7846 any reload. Note that even if CURRENT_INSN sets DST, we still
7847 have to check the reloads. */
7848 if (i2
== current_insn
)
7850 for (j
= n_reloads
- 1; j
>= 0; j
--)
7851 if ((rld
[j
].reg_rtx
== dst
&& reload_inherited
[j
])
7852 || reload_override_in
[j
] == dst
)
7854 /* ??? We can't finish the loop here, because dst might be
7855 allocated to a pseudo in this block if no reload in this
7856 block needs any of the classes containing DST - see
7857 spill_hard_reg. There is no easy way to tell this, so we
7858 have to scan till the end of the basic block. */
7860 if (reg_set_p (dst
, PATTERN (i2
)))
7864 delete_address_reloads_1 (prev
, SET_SRC (set
), current_insn
);
7865 reg_reloaded_contents
[REGNO (dst
)] = -1;
7869 /* Output reload-insns to reload VALUE into RELOADREG.
7870 VALUE is an autoincrement or autodecrement RTX whose operand
7871 is a register or memory location;
7872 so reloading involves incrementing that location.
7873 IN is either identical to VALUE, or some cheaper place to reload from.
7875 INC_AMOUNT is the number to increment or decrement by (always positive).
7876 This cannot be deduced from VALUE.
7878 Return the instruction that stores into RELOADREG. */
7881 inc_for_reload (rtx reloadreg
, rtx in
, rtx value
, int inc_amount
)
7883 /* REG or MEM to be copied and incremented. */
7884 rtx incloc
= XEXP (value
, 0);
7885 /* Nonzero if increment after copying. */
7886 int post
= (GET_CODE (value
) == POST_DEC
|| GET_CODE (value
) == POST_INC
);
7892 rtx real_in
= in
== value
? XEXP (in
, 0) : in
;
7894 /* No hard register is equivalent to this register after
7895 inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
7896 we could inc/dec that register as well (maybe even using it for
7897 the source), but I'm not sure it's worth worrying about. */
7899 reg_last_reload_reg
[REGNO (incloc
)] = 0;
7901 if (GET_CODE (value
) == PRE_DEC
|| GET_CODE (value
) == POST_DEC
)
7902 inc_amount
= -inc_amount
;
7904 inc
= GEN_INT (inc_amount
);
7906 /* If this is post-increment, first copy the location to the reload reg. */
7907 if (post
&& real_in
!= reloadreg
)
7908 emit_insn (gen_move_insn (reloadreg
, real_in
));
7912 /* See if we can directly increment INCLOC. Use a method similar to
7913 that in gen_reload. */
7915 last
= get_last_insn ();
7916 add_insn
= emit_insn (gen_rtx_SET (VOIDmode
, incloc
,
7917 gen_rtx_PLUS (GET_MODE (incloc
),
7920 code
= recog_memoized (add_insn
);
7923 extract_insn (add_insn
);
7924 if (constrain_operands (1))
7926 /* If this is a pre-increment and we have incremented the value
7927 where it lives, copy the incremented value to RELOADREG to
7928 be used as an address. */
7931 emit_insn (gen_move_insn (reloadreg
, incloc
));
7936 delete_insns_since (last
);
7939 /* If couldn't do the increment directly, must increment in RELOADREG.
7940 The way we do this depends on whether this is pre- or post-increment.
7941 For pre-increment, copy INCLOC to the reload register, increment it
7942 there, then save back. */
7946 if (in
!= reloadreg
)
7947 emit_insn (gen_move_insn (reloadreg
, real_in
));
7948 emit_insn (gen_add2_insn (reloadreg
, inc
));
7949 store
= emit_insn (gen_move_insn (incloc
, reloadreg
));
7954 Because this might be a jump insn or a compare, and because RELOADREG
7955 may not be available after the insn in an input reload, we must do
7956 the incrementation before the insn being reloaded for.
7958 We have already copied IN to RELOADREG. Increment the copy in
7959 RELOADREG, save that back, then decrement RELOADREG so it has
7960 the original value. */
7962 emit_insn (gen_add2_insn (reloadreg
, inc
));
7963 store
= emit_insn (gen_move_insn (incloc
, reloadreg
));
7964 emit_insn (gen_add2_insn (reloadreg
, GEN_INT (-inc_amount
)));
7972 add_auto_inc_notes (rtx insn
, rtx x
)
7974 enum rtx_code code
= GET_CODE (x
);
7978 if (code
== MEM
&& auto_inc_p (XEXP (x
, 0)))
7981 = gen_rtx_EXPR_LIST (REG_INC
, XEXP (XEXP (x
, 0), 0), REG_NOTES (insn
));
7985 /* Scan all the operand sub-expressions. */
7986 fmt
= GET_RTX_FORMAT (code
);
7987 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
7990 add_auto_inc_notes (insn
, XEXP (x
, i
));
7991 else if (fmt
[i
] == 'E')
7992 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
7993 add_auto_inc_notes (insn
, XVECEXP (x
, i
, j
));
7998 /* Copy EH notes from an insn to its reloads. */
8000 copy_eh_notes (rtx insn
, rtx x
)
8002 rtx eh_note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
8005 for (; x
!= 0; x
= NEXT_INSN (x
))
8007 if (may_trap_p (PATTERN (x
)))
8009 = gen_rtx_EXPR_LIST (REG_EH_REGION
, XEXP (eh_note
, 0),
8015 /* This is used by reload pass, that does emit some instructions after
8016 abnormal calls moving basic block end, but in fact it wants to emit
8017 them on the edge. Looks for abnormal call edges, find backward the
8018 proper call and fix the damage.
8020 Similar handle instructions throwing exceptions internally. */
8022 fixup_abnormal_edges (void)
8024 bool inserted
= false;
8032 /* Look for cases we are interested in - calls or instructions causing
8034 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8036 if (e
->flags
& EDGE_ABNORMAL_CALL
)
8038 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
8039 == (EDGE_ABNORMAL
| EDGE_EH
))
8042 if (e
&& !CALL_P (BB_END (bb
))
8043 && !can_throw_internal (BB_END (bb
)))
8045 rtx insn
= BB_END (bb
), stop
= NEXT_INSN (BB_END (bb
));
8047 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8048 if (e
->flags
& EDGE_FALLTHRU
)
8050 /* Get past the new insns generated. Allow notes, as the insns may
8051 be already deleted. */
8052 while ((NONJUMP_INSN_P (insn
) || NOTE_P (insn
))
8053 && !can_throw_internal (insn
)
8054 && insn
!= BB_HEAD (bb
))
8055 insn
= PREV_INSN (insn
);
8056 gcc_assert (CALL_P (insn
) || can_throw_internal (insn
));
8059 insn
= NEXT_INSN (insn
);
8060 while (insn
&& insn
!= stop
)
8062 next
= NEXT_INSN (insn
);
8067 /* Sometimes there's still the return value USE.
8068 If it's placed after a trapping call (i.e. that
8069 call is the last insn anyway), we have no fallthru
8070 edge. Simply delete this use and don't try to insert
8071 on the non-existent edge. */
8072 if (GET_CODE (PATTERN (insn
)) != USE
)
8074 /* We're not deleting it, we're moving it. */
8075 INSN_DELETED_P (insn
) = 0;
8076 PREV_INSN (insn
) = NULL_RTX
;
8077 NEXT_INSN (insn
) = NULL_RTX
;
8079 insert_insn_on_edge (insn
, e
);
8086 /* We've possibly turned single trapping insn into multiple ones. */
8087 if (flag_non_call_exceptions
)
8090 blocks
= sbitmap_alloc (last_basic_block
);
8091 sbitmap_ones (blocks
);
8092 find_many_sub_basic_blocks (blocks
);
8095 commit_edge_insertions ();