* ggc-common.c (ggc_print_statistics): Make arguments to fprintf
[official-gcc.git] / gcc / reload1.c
blob98a246bc79e8153b22cd94f3605b8994c8f6b8aa
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
25 #include "machmode.h"
26 #include "hard-reg-set.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "obstack.h"
30 #include "insn-config.h"
31 #include "insn-flags.h"
32 #include "insn-codes.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "regs.h"
37 #include "basic-block.h"
38 #include "reload.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "real.h"
42 #include "toplev.h"
44 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
45 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
46 #endif
48 /* This file contains the reload pass of the compiler, which is
49 run after register allocation has been done. It checks that
50 each insn is valid (operands required to be in registers really
51 are in registers of the proper class) and fixes up invalid ones
52 by copying values temporarily into registers for the insns
53 that need them.
55 The results of register allocation are described by the vector
56 reg_renumber; the insns still contain pseudo regs, but reg_renumber
57 can be used to find which hard reg, if any, a pseudo reg is in.
59 The technique we always use is to free up a few hard regs that are
60 called ``reload regs'', and for each place where a pseudo reg
61 must be in a hard reg, copy it temporarily into one of the reload regs.
63 Reload regs are allocated locally for every instruction that needs
64 reloads. When there are pseudos which are allocated to a register that
65 has been chosen as a reload reg, such pseudos must be ``spilled''.
66 This means that they go to other hard regs, or to stack slots if no other
67 available hard regs can be found. Spilling can invalidate more
68 insns, requiring additional need for reloads, so we must keep checking
69 until the process stabilizes.
71 For machines with different classes of registers, we must keep track
72 of the register class needed for each reload, and make sure that
73 we allocate enough reload registers of each class.
75 The file reload.c contains the code that checks one insn for
76 validity and reports the reloads that it needs. This file
77 is in charge of scanning the entire rtl code, accumulating the
78 reload needs, spilling, assigning reload registers to use for
79 fixing up each insn, and generating the new insns to copy values
80 into the reload registers. */
83 #ifndef REGISTER_MOVE_COST
84 #define REGISTER_MOVE_COST(x, y) 2
85 #endif
87 /* During reload_as_needed, element N contains a REG rtx for the hard reg
88 into which reg N has been reloaded (perhaps for a previous insn). */
89 static rtx *reg_last_reload_reg;
91 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
92 for an output reload that stores into reg N. */
93 static char *reg_has_output_reload;
95 /* Indicates which hard regs are reload-registers for an output reload
96 in the current insn. */
97 static HARD_REG_SET reg_is_output_reload;
99 /* Element N is the constant value to which pseudo reg N is equivalent,
100 or zero if pseudo reg N is not equivalent to a constant.
101 find_reloads looks at this in order to replace pseudo reg N
102 with the constant it stands for. */
103 rtx *reg_equiv_constant;
105 /* Element N is a memory location to which pseudo reg N is equivalent,
106 prior to any register elimination (such as frame pointer to stack
107 pointer). Depending on whether or not it is a valid address, this value
108 is transferred to either reg_equiv_address or reg_equiv_mem. */
109 rtx *reg_equiv_memory_loc;
111 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
112 This is used when the address is not valid as a memory address
113 (because its displacement is too big for the machine.) */
114 rtx *reg_equiv_address;
116 /* Element N is the memory slot to which pseudo reg N is equivalent,
117 or zero if pseudo reg N is not equivalent to a memory slot. */
118 rtx *reg_equiv_mem;
120 /* Widest width in which each pseudo reg is referred to (via subreg). */
121 static int *reg_max_ref_width;
123 /* Element N is the list of insns that initialized reg N from its equivalent
124 constant or memory slot. */
125 static rtx *reg_equiv_init;
127 /* Vector to remember old contents of reg_renumber before spilling. */
128 static short *reg_old_renumber;
130 /* During reload_as_needed, element N contains the last pseudo regno reloaded
131 into hard register N. If that pseudo reg occupied more than one register,
132 reg_reloaded_contents points to that pseudo for each spill register in
133 use; all of these must remain set for an inheritance to occur. */
134 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
136 /* During reload_as_needed, element N contains the insn for which
137 hard register N was last used. Its contents are significant only
138 when reg_reloaded_valid is set for this register. */
139 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
141 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid */
142 static HARD_REG_SET reg_reloaded_valid;
143 /* Indicate if the register was dead at the end of the reload.
144 This is only valid if reg_reloaded_contents is set and valid. */
145 static HARD_REG_SET reg_reloaded_dead;
147 /* Number of spill-regs so far; number of valid elements of spill_regs. */
148 static int n_spills;
150 /* In parallel with spill_regs, contains REG rtx's for those regs.
151 Holds the last rtx used for any given reg, or 0 if it has never
152 been used for spilling yet. This rtx is reused, provided it has
153 the proper mode. */
154 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
156 /* In parallel with spill_regs, contains nonzero for a spill reg
157 that was stored after the last time it was used.
158 The precise value is the insn generated to do the store. */
159 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
161 /* This is the register that was stored with spill_reg_store. This is a
162 copy of reload_out / reload_out_reg when the value was stored; if
163 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
164 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
166 /* This table is the inverse mapping of spill_regs:
167 indexed by hard reg number,
168 it contains the position of that reg in spill_regs,
169 or -1 for something that is not in spill_regs.
171 ?!? This is no longer accurate. */
172 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
174 /* This reg set indicates registers that can't be used as spill registers for
175 the currently processed insn. These are the hard registers which are live
176 during the insn, but not allocated to pseudos, as well as fixed
177 registers. */
178 static HARD_REG_SET bad_spill_regs;
180 /* These are the hard registers that can't be used as spill register for any
181 insn. This includes registers used for user variables and registers that
182 we can't eliminate. A register that appears in this set also can't be used
183 to retry register allocation. */
184 static HARD_REG_SET bad_spill_regs_global;
186 /* Describes order of use of registers for reloading
187 of spilled pseudo-registers. `n_spills' is the number of
188 elements that are actually valid; new ones are added at the end.
190 Both spill_regs and spill_reg_order are used on two occasions:
191 once during find_reload_regs, where they keep track of the spill registers
192 for a single insn, but also during reload_as_needed where they show all
193 the registers ever used by reload. For the latter case, the information
194 is calculated during finish_spills. */
195 static short spill_regs[FIRST_PSEUDO_REGISTER];
197 /* This vector of reg sets indicates, for each pseudo, which hard registers
198 may not be used for retrying global allocation because the register was
199 formerly spilled from one of them. If we allowed reallocating a pseudo to
200 a register that it was already allocated to, reload might not
201 terminate. */
202 static HARD_REG_SET *pseudo_previous_regs;
204 /* This vector of reg sets indicates, for each pseudo, which hard
205 registers may not be used for retrying global allocation because they
206 are used as spill registers during one of the insns in which the
207 pseudo is live. */
208 static HARD_REG_SET *pseudo_forbidden_regs;
210 /* All hard regs that have been used as spill registers for any insn are
211 marked in this set. */
212 static HARD_REG_SET used_spill_regs;
214 /* Index of last register assigned as a spill register. We allocate in
215 a round-robin fashion. */
216 static int last_spill_reg;
218 /* Describes order of preference for putting regs into spill_regs.
219 Contains the numbers of all the hard regs, in order most preferred first.
220 This order is different for each function.
221 It is set up by order_regs_for_reload.
222 Empty elements at the end contain -1. */
223 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
225 /* Nonzero if indirect addressing is supported on the machine; this means
226 that spilling (REG n) does not require reloading it into a register in
227 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
228 value indicates the level of indirect addressing supported, e.g., two
229 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
230 a hard register. */
231 static char spill_indirect_levels;
233 /* Nonzero if indirect addressing is supported when the innermost MEM is
234 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
235 which these are valid is the same as spill_indirect_levels, above. */
236 char indirect_symref_ok;
238 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
239 char double_reg_address_ok;
241 /* Record the stack slot for each spilled hard register. */
242 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
244 /* Width allocated so far for that stack slot. */
245 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
247 /* Record which pseudos needed to be spilled. */
248 static regset spilled_pseudos;
250 /* First uid used by insns created by reload in this function.
251 Used in find_equiv_reg. */
252 int reload_first_uid;
254 /* Flag set by local-alloc or global-alloc if anything is live in
255 a call-clobbered reg across calls. */
256 int caller_save_needed;
258 /* Set to 1 while reload_as_needed is operating.
259 Required by some machines to handle any generated moves differently. */
260 int reload_in_progress = 0;
262 /* These arrays record the insn_code of insns that may be needed to
263 perform input and output reloads of special objects. They provide a
264 place to pass a scratch register. */
265 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
266 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
268 /* This obstack is used for allocation of rtl during register elimination.
269 The allocated storage can be freed once find_reloads has processed the
270 insn. */
271 struct obstack reload_obstack;
273 /* Points to the beginning of the reload_obstack. All insn_chain structures
274 are allocated first. */
275 char *reload_startobj;
277 /* The point after all insn_chain structures. Used to quickly deallocate
278 memory used while processing one insn. */
279 char *reload_firstobj;
281 #define obstack_chunk_alloc xmalloc
282 #define obstack_chunk_free free
284 /* List of insn_chain instructions, one for every insn that reload needs to
285 examine. */
286 struct insn_chain *reload_insn_chain;
288 #ifdef TREE_CODE
289 extern tree current_function_decl;
290 #else
291 extern union tree_node *current_function_decl;
292 #endif
294 /* List of all insns needing reloads. */
295 static struct insn_chain *insns_need_reload;
297 /* This structure is used to record information about register eliminations.
298 Each array entry describes one possible way of eliminating a register
299 in favor of another. If there is more than one way of eliminating a
300 particular register, the most preferred should be specified first. */
302 struct elim_table
304 int from; /* Register number to be eliminated. */
305 int to; /* Register number used as replacement. */
306 int initial_offset; /* Initial difference between values. */
307 int can_eliminate; /* Non-zero if this elimination can be done. */
308 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
309 insns made by reload. */
310 int offset; /* Current offset between the two regs. */
311 int previous_offset; /* Offset at end of previous insn. */
312 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
313 rtx from_rtx; /* REG rtx for the register to be eliminated.
314 We cannot simply compare the number since
315 we might then spuriously replace a hard
316 register corresponding to a pseudo
317 assigned to the reg to be eliminated. */
318 rtx to_rtx; /* REG rtx for the replacement. */
321 static struct elim_table * reg_eliminate = 0;
323 /* This is an intermediate structure to initialize the table. It has
324 exactly the members provided by ELIMINABLE_REGS. */
325 static struct elim_table_1
327 int from;
328 int to;
329 } reg_eliminate_1[] =
331 /* If a set of eliminable registers was specified, define the table from it.
332 Otherwise, default to the normal case of the frame pointer being
333 replaced by the stack pointer. */
335 #ifdef ELIMINABLE_REGS
336 ELIMINABLE_REGS;
337 #else
338 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
339 #endif
341 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate_1/sizeof reg_eliminate_1[0])
343 /* Record the number of pending eliminations that have an offset not equal
344 to their initial offset. If non-zero, we use a new copy of each
345 replacement result in any insns encountered. */
346 int num_not_at_initial_offset;
348 /* Count the number of registers that we may be able to eliminate. */
349 static int num_eliminable;
350 /* And the number of registers that are equivalent to a constant that
351 can be eliminated to frame_pointer / arg_pointer + constant. */
352 static int num_eliminable_invariants;
354 /* For each label, we record the offset of each elimination. If we reach
355 a label by more than one path and an offset differs, we cannot do the
356 elimination. This information is indexed by the number of the label.
357 The first table is an array of flags that records whether we have yet
358 encountered a label and the second table is an array of arrays, one
359 entry in the latter array for each elimination. */
361 static char *offsets_known_at;
362 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
364 /* Number of labels in the current function. */
366 static int num_labels;
368 struct hard_reg_n_uses
370 int regno;
371 unsigned int uses;
374 static void maybe_fix_stack_asms PROTO((void));
375 static void calculate_needs_all_insns PROTO((int));
376 static void calculate_needs PROTO((struct insn_chain *));
377 static void find_reload_regs PROTO((struct insn_chain *chain,
378 FILE *));
379 static void find_tworeg_group PROTO((struct insn_chain *, int,
380 FILE *));
381 static void find_group PROTO((struct insn_chain *, int,
382 FILE *));
383 static int possible_group_p PROTO((struct insn_chain *, int));
384 static void count_possible_groups PROTO((struct insn_chain *, int));
385 static int modes_equiv_for_class_p PROTO((enum machine_mode,
386 enum machine_mode,
387 enum reg_class));
388 static void delete_caller_save_insns PROTO((void));
390 static void spill_failure PROTO((rtx));
391 static void new_spill_reg PROTO((struct insn_chain *, int, int,
392 int, FILE *));
393 static void maybe_mark_pseudo_spilled PROTO((int));
394 static void delete_dead_insn PROTO((rtx));
395 static void alter_reg PROTO((int, int));
396 static void set_label_offsets PROTO((rtx, rtx, int));
397 static void check_eliminable_occurrences PROTO((rtx));
398 static void elimination_effects PROTO((rtx, enum machine_mode));
399 static int eliminate_regs_in_insn PROTO((rtx, int));
400 static void update_eliminable_offsets PROTO((void));
401 static void mark_not_eliminable PROTO((rtx, rtx, void *));
402 static void set_initial_elim_offsets PROTO((void));
403 static void verify_initial_elim_offsets PROTO((void));
404 static void set_initial_label_offsets PROTO((void));
405 static void set_offsets_for_label PROTO((rtx));
406 static void init_elim_table PROTO((void));
407 static void update_eliminables PROTO((HARD_REG_SET *));
408 static void spill_hard_reg PROTO((int, FILE *, int));
409 static int finish_spills PROTO((int, FILE *));
410 static void ior_hard_reg_set PROTO((HARD_REG_SET *, HARD_REG_SET *));
411 static void scan_paradoxical_subregs PROTO((rtx));
412 static int hard_reg_use_compare PROTO((const PTR, const PTR));
413 static void count_pseudo PROTO((struct hard_reg_n_uses *, int));
414 static void order_regs_for_reload PROTO((struct insn_chain *));
415 static void reload_as_needed PROTO((int));
416 static void forget_old_reloads_1 PROTO((rtx, rtx, void *));
417 static int reload_reg_class_lower PROTO((const PTR, const PTR));
418 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
419 enum machine_mode));
420 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
421 enum machine_mode));
422 static int reload_reg_free_p PROTO((int, int, enum reload_type));
423 static int reload_reg_free_for_value_p PROTO((int, int, enum reload_type, rtx, rtx, int, int));
424 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
425 static int allocate_reload_reg PROTO((struct insn_chain *, int, int,
426 int));
427 static void choose_reload_regs_init PROTO((struct insn_chain *, rtx *));
428 static void choose_reload_regs PROTO((struct insn_chain *));
429 static void merge_assigned_reloads PROTO((rtx));
430 static void emit_reload_insns PROTO((struct insn_chain *));
431 static void delete_output_reload PROTO((rtx, int, int));
432 static void delete_address_reloads PROTO((rtx, rtx));
433 static void delete_address_reloads_1 PROTO((rtx, rtx, rtx));
434 static rtx inc_for_reload PROTO((rtx, rtx, rtx, int));
435 static int constraint_accepts_reg_p PROTO((const char *, rtx));
436 static void reload_cse_regs_1 PROTO((rtx));
437 static void reload_cse_invalidate_regno PROTO((int, enum machine_mode, int));
438 static int reload_cse_mem_conflict_p PROTO((rtx, rtx));
439 static void reload_cse_invalidate_mem PROTO((rtx));
440 static void reload_cse_invalidate_rtx PROTO((rtx, rtx, void *));
441 static int reload_cse_regno_equal_p PROTO((int, rtx, enum machine_mode));
442 static int reload_cse_noop_set_p PROTO((rtx, rtx));
443 static int reload_cse_simplify_set PROTO((rtx, rtx));
444 static int reload_cse_simplify_operands PROTO((rtx));
445 static void reload_cse_check_clobber PROTO((rtx, rtx, void *));
446 static void reload_cse_record_set PROTO((rtx, rtx));
447 static void reload_combine PROTO((void));
448 static void reload_combine_note_use PROTO((rtx *, rtx));
449 static void reload_combine_note_store PROTO((rtx, rtx, void *));
450 static void reload_cse_move2add PROTO((rtx));
451 static void move2add_note_store PROTO((rtx, rtx, void *));
452 #ifdef AUTO_INC_DEC
453 static void add_auto_inc_notes PROTO((rtx, rtx));
454 #endif
455 static rtx gen_mode_int PROTO((enum machine_mode,
456 HOST_WIDE_INT));
457 static void failed_reload PROTO((rtx, int));
458 static int set_reload_reg PROTO((int, int));
459 extern void dump_needs PROTO((struct insn_chain *, FILE *));
461 /* Initialize the reload pass once per compilation. */
463 void
464 init_reload ()
466 register int i;
468 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
469 Set spill_indirect_levels to the number of levels such addressing is
470 permitted, zero if it is not permitted at all. */
472 register rtx tem
473 = gen_rtx_MEM (Pmode,
474 gen_rtx_PLUS (Pmode,
475 gen_rtx_REG (Pmode,
476 LAST_VIRTUAL_REGISTER + 1),
477 GEN_INT (4)));
478 spill_indirect_levels = 0;
480 while (memory_address_p (QImode, tem))
482 spill_indirect_levels++;
483 tem = gen_rtx_MEM (Pmode, tem);
486 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
488 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
489 indirect_symref_ok = memory_address_p (QImode, tem);
491 /* See if reg+reg is a valid (and offsettable) address. */
493 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
495 tem = gen_rtx_PLUS (Pmode,
496 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
497 gen_rtx_REG (Pmode, i));
499 /* This way, we make sure that reg+reg is an offsettable address. */
500 tem = plus_constant (tem, 4);
502 if (memory_address_p (QImode, tem))
504 double_reg_address_ok = 1;
505 break;
509 /* Initialize obstack for our rtl allocation. */
510 gcc_obstack_init (&reload_obstack);
511 reload_startobj = (char *) obstack_alloc (&reload_obstack, 0);
514 /* List of insn chains that are currently unused. */
515 static struct insn_chain *unused_insn_chains = 0;
517 /* Allocate an empty insn_chain structure. */
518 struct insn_chain *
519 new_insn_chain ()
521 struct insn_chain *c;
523 if (unused_insn_chains == 0)
525 c = (struct insn_chain *)
526 obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
527 c->live_before = OBSTACK_ALLOC_REG_SET (&reload_obstack);
528 c->live_after = OBSTACK_ALLOC_REG_SET (&reload_obstack);
530 else
532 c = unused_insn_chains;
533 unused_insn_chains = c->next;
535 c->is_caller_save_insn = 0;
536 c->need_operand_change = 0;
537 c->need_reload = 0;
538 c->need_elim = 0;
539 return c;
542 /* Small utility function to set all regs in hard reg set TO which are
543 allocated to pseudos in regset FROM. */
544 void
545 compute_use_by_pseudos (to, from)
546 HARD_REG_SET *to;
547 regset from;
549 int regno;
550 EXECUTE_IF_SET_IN_REG_SET
551 (from, FIRST_PSEUDO_REGISTER, regno,
553 int r = reg_renumber[regno];
554 int nregs;
555 if (r < 0)
557 /* reload_combine uses the information from
558 BASIC_BLOCK->global_live_at_start, which might still
559 contain registers that have not actually been allocated
560 since they have an equivalence. */
561 if (! reload_completed)
562 abort ();
564 else
566 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (regno));
567 while (nregs-- > 0)
568 SET_HARD_REG_BIT (*to, r + nregs);
573 /* Global variables used by reload and its subroutines. */
575 /* Set during calculate_needs if an insn needs register elimination. */
576 static int something_needs_elimination;
577 /* Set during calculate_needs if an insn needs an operand changed. */
578 int something_needs_operands_changed;
580 /* Nonzero means we couldn't get enough spill regs. */
581 static int failure;
583 /* Main entry point for the reload pass.
585 FIRST is the first insn of the function being compiled.
587 GLOBAL nonzero means we were called from global_alloc
588 and should attempt to reallocate any pseudoregs that we
589 displace from hard regs we will use for reloads.
590 If GLOBAL is zero, we do not have enough information to do that,
591 so any pseudo reg that is spilled must go to the stack.
593 DUMPFILE is the global-reg debugging dump file stream, or 0.
594 If it is nonzero, messages are written to it to describe
595 which registers are seized as reload regs, which pseudo regs
596 are spilled from them, and where the pseudo regs are reallocated to.
598 Return value is nonzero if reload failed
599 and we must not do any more for this function. */
602 reload (first, global, dumpfile)
603 rtx first;
604 int global;
605 FILE *dumpfile;
607 register int i;
608 register rtx insn;
609 register struct elim_table *ep;
611 /* The two pointers used to track the true location of the memory used
612 for label offsets. */
613 char *real_known_ptr = NULL_PTR;
614 int (*real_at_ptr)[NUM_ELIMINABLE_REGS];
616 /* Make sure even insns with volatile mem refs are recognizable. */
617 init_recog ();
619 failure = 0;
621 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
623 /* Make sure that the last insn in the chain
624 is not something that needs reloading. */
625 emit_note (NULL_PTR, NOTE_INSN_DELETED);
627 /* Enable find_equiv_reg to distinguish insns made by reload. */
628 reload_first_uid = get_max_uid ();
630 #ifdef SECONDARY_MEMORY_NEEDED
631 /* Initialize the secondary memory table. */
632 clear_secondary_mem ();
633 #endif
635 /* We don't have a stack slot for any spill reg yet. */
636 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
637 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
639 /* Initialize the save area information for caller-save, in case some
640 are needed. */
641 init_save_areas ();
643 /* Compute which hard registers are now in use
644 as homes for pseudo registers.
645 This is done here rather than (eg) in global_alloc
646 because this point is reached even if not optimizing. */
647 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
648 mark_home_live (i);
650 /* A function that receives a nonlocal goto must save all call-saved
651 registers. */
652 if (current_function_has_nonlocal_label)
653 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
655 if (! call_used_regs[i] && ! fixed_regs[i])
656 regs_ever_live[i] = 1;
659 /* Find all the pseudo registers that didn't get hard regs
660 but do have known equivalent constants or memory slots.
661 These include parameters (known equivalent to parameter slots)
662 and cse'd or loop-moved constant memory addresses.
664 Record constant equivalents in reg_equiv_constant
665 so they will be substituted by find_reloads.
666 Record memory equivalents in reg_mem_equiv so they can
667 be substituted eventually by altering the REG-rtx's. */
669 reg_equiv_constant = (rtx *) xcalloc (max_regno, sizeof (rtx));
670 reg_equiv_memory_loc = (rtx *) xcalloc (max_regno, sizeof (rtx));
671 reg_equiv_mem = (rtx *) xcalloc (max_regno, sizeof (rtx));
672 reg_equiv_init = (rtx *) xcalloc (max_regno, sizeof (rtx));
673 reg_equiv_address = (rtx *) xcalloc (max_regno, sizeof (rtx));
674 reg_max_ref_width = (int *) xcalloc (max_regno, sizeof (int));
675 reg_old_renumber = (short *) xcalloc (max_regno, sizeof (short));
676 bcopy ((PTR) reg_renumber, (PTR) reg_old_renumber, max_regno * sizeof (short));
677 pseudo_forbidden_regs
678 = (HARD_REG_SET *) xmalloc (max_regno * sizeof (HARD_REG_SET));
679 pseudo_previous_regs
680 = (HARD_REG_SET *) xcalloc (max_regno, sizeof (HARD_REG_SET));
682 CLEAR_HARD_REG_SET (bad_spill_regs_global);
684 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
685 Also find all paradoxical subregs and find largest such for each pseudo.
686 On machines with small register classes, record hard registers that
687 are used for user variables. These can never be used for spills.
688 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
689 caller-saved registers must be marked live. */
691 num_eliminable_invariants = 0;
692 for (insn = first; insn; insn = NEXT_INSN (insn))
694 rtx set = single_set (insn);
696 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
697 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
698 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
699 if (! call_used_regs[i])
700 regs_ever_live[i] = 1;
702 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
704 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
705 if (note
706 #ifdef LEGITIMATE_PIC_OPERAND_P
707 && (! function_invariant_p (XEXP (note, 0))
708 || ! flag_pic
709 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
710 #endif
713 rtx x = XEXP (note, 0);
714 i = REGNO (SET_DEST (set));
715 if (i > LAST_VIRTUAL_REGISTER)
717 if (GET_CODE (x) == MEM)
719 /* If the operand is a PLUS, the MEM may be shared,
720 so make sure we have an unshared copy here. */
721 if (GET_CODE (XEXP (x, 0)) == PLUS)
722 x = copy_rtx (x);
724 reg_equiv_memory_loc[i] = x;
726 else if (function_invariant_p (x))
728 if (GET_CODE (x) == PLUS)
730 /* This is PLUS of frame pointer and a constant,
731 and might be shared. Unshare it. */
732 reg_equiv_constant[i] = copy_rtx (x);
733 num_eliminable_invariants++;
735 else if (x == frame_pointer_rtx
736 || x == arg_pointer_rtx)
738 reg_equiv_constant[i] = x;
739 num_eliminable_invariants++;
741 else if (LEGITIMATE_CONSTANT_P (x))
742 reg_equiv_constant[i] = x;
743 else
744 reg_equiv_memory_loc[i]
745 = force_const_mem (GET_MODE (SET_DEST (set)), x);
747 else
748 continue;
750 /* If this register is being made equivalent to a MEM
751 and the MEM is not SET_SRC, the equivalencing insn
752 is one with the MEM as a SET_DEST and it occurs later.
753 So don't mark this insn now. */
754 if (GET_CODE (x) != MEM
755 || rtx_equal_p (SET_SRC (set), x))
756 reg_equiv_init[i]
757 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[i]);
762 /* If this insn is setting a MEM from a register equivalent to it,
763 this is the equivalencing insn. */
764 else if (set && GET_CODE (SET_DEST (set)) == MEM
765 && GET_CODE (SET_SRC (set)) == REG
766 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
767 && rtx_equal_p (SET_DEST (set),
768 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
769 reg_equiv_init[REGNO (SET_SRC (set))]
770 = gen_rtx_INSN_LIST (VOIDmode, insn,
771 reg_equiv_init[REGNO (SET_SRC (set))]);
773 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
774 scan_paradoxical_subregs (PATTERN (insn));
777 init_elim_table ();
779 num_labels = max_label_num () - get_first_label_num ();
781 /* Allocate the tables used to store offset information at labels. */
782 /* We used to use alloca here, but the size of what it would try to
783 allocate would occasionally cause it to exceed the stack limit and
784 cause a core dump. */
785 real_known_ptr = xmalloc (num_labels);
786 real_at_ptr
787 = (int (*)[NUM_ELIMINABLE_REGS])
788 xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
790 offsets_known_at = real_known_ptr - get_first_label_num ();
791 offsets_at
792 = (int (*)[NUM_ELIMINABLE_REGS]) (real_at_ptr - get_first_label_num ());
794 /* Alter each pseudo-reg rtx to contain its hard reg number.
795 Assign stack slots to the pseudos that lack hard regs or equivalents.
796 Do not touch virtual registers. */
798 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
799 alter_reg (i, -1);
801 /* If we have some registers we think can be eliminated, scan all insns to
802 see if there is an insn that sets one of these registers to something
803 other than itself plus a constant. If so, the register cannot be
804 eliminated. Doing this scan here eliminates an extra pass through the
805 main reload loop in the most common case where register elimination
806 cannot be done. */
807 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
808 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
809 || GET_CODE (insn) == CALL_INSN)
810 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
812 maybe_fix_stack_asms ();
814 insns_need_reload = 0;
815 something_needs_elimination = 0;
817 /* Initialize to -1, which means take the first spill register. */
818 last_spill_reg = -1;
820 spilled_pseudos = ALLOCA_REG_SET ();
822 /* Spill any hard regs that we know we can't eliminate. */
823 CLEAR_HARD_REG_SET (used_spill_regs);
824 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
825 if (! ep->can_eliminate)
826 spill_hard_reg (ep->from, dumpfile, 1);
828 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
829 if (frame_pointer_needed)
830 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, dumpfile, 1);
831 #endif
832 finish_spills (global, dumpfile);
834 /* From now on, we may need to generate moves differently. We may also
835 allow modifications of insns which cause them to not be recognized.
836 Any such modifications will be cleaned up during reload itself. */
837 reload_in_progress = 1;
839 /* This loop scans the entire function each go-round
840 and repeats until one repetition spills no additional hard regs. */
841 for (;;)
843 int something_changed;
844 int did_spill;
845 struct insn_chain *chain;
847 HOST_WIDE_INT starting_frame_size;
849 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done
850 here because the stack size may be a part of the offset computation
851 for register elimination, and there might have been new stack slots
852 created in the last iteration of this loop. */
853 assign_stack_local (BLKmode, 0, 0);
855 starting_frame_size = get_frame_size ();
857 set_initial_elim_offsets ();
858 set_initial_label_offsets ();
860 /* For each pseudo register that has an equivalent location defined,
861 try to eliminate any eliminable registers (such as the frame pointer)
862 assuming initial offsets for the replacement register, which
863 is the normal case.
865 If the resulting location is directly addressable, substitute
866 the MEM we just got directly for the old REG.
868 If it is not addressable but is a constant or the sum of a hard reg
869 and constant, it is probably not addressable because the constant is
870 out of range, in that case record the address; we will generate
871 hairy code to compute the address in a register each time it is
872 needed. Similarly if it is a hard register, but one that is not
873 valid as an address register.
875 If the location is not addressable, but does not have one of the
876 above forms, assign a stack slot. We have to do this to avoid the
877 potential of producing lots of reloads if, e.g., a location involves
878 a pseudo that didn't get a hard register and has an equivalent memory
879 location that also involves a pseudo that didn't get a hard register.
881 Perhaps at some point we will improve reload_when_needed handling
882 so this problem goes away. But that's very hairy. */
884 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
885 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
887 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
889 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
890 XEXP (x, 0)))
891 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
892 else if (CONSTANT_P (XEXP (x, 0))
893 || (GET_CODE (XEXP (x, 0)) == REG
894 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
895 || (GET_CODE (XEXP (x, 0)) == PLUS
896 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
897 && (REGNO (XEXP (XEXP (x, 0), 0))
898 < FIRST_PSEUDO_REGISTER)
899 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
900 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
901 else
903 /* Make a new stack slot. Then indicate that something
904 changed so we go back and recompute offsets for
905 eliminable registers because the allocation of memory
906 below might change some offset. reg_equiv_{mem,address}
907 will be set up for this pseudo on the next pass around
908 the loop. */
909 reg_equiv_memory_loc[i] = 0;
910 reg_equiv_init[i] = 0;
911 alter_reg (i, -1);
915 if (caller_save_needed)
916 setup_save_areas ();
918 /* If we allocated another stack slot, redo elimination bookkeeping. */
919 if (starting_frame_size != get_frame_size ())
920 continue;
922 if (caller_save_needed)
924 save_call_clobbered_regs ();
925 /* That might have allocated new insn_chain structures. */
926 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
929 calculate_needs_all_insns (global);
931 CLEAR_REG_SET (spilled_pseudos);
932 did_spill = 0;
934 something_changed = 0;
936 /* If we allocated any new memory locations, make another pass
937 since it might have changed elimination offsets. */
938 if (starting_frame_size != get_frame_size ())
939 something_changed = 1;
942 HARD_REG_SET to_spill;
943 CLEAR_HARD_REG_SET (to_spill);
944 update_eliminables (&to_spill);
945 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
946 if (TEST_HARD_REG_BIT (to_spill, i))
948 spill_hard_reg (i, dumpfile, 1);
949 did_spill = 1;
951 /* Regardless of the state of spills, if we previously had
952 a register that we thought we could eliminate, but no can
953 not eliminate, we must run another pass.
955 Consider pseudos which have an entry in reg_equiv_* which
956 reference an eliminable register. We must make another pass
957 to update reg_equiv_* so that we do not substitute in the
958 old value from when we thought the elimination could be
959 performed. */
960 something_changed = 1;
964 CLEAR_HARD_REG_SET (used_spill_regs);
965 /* Try to satisfy the needs for each insn. */
966 for (chain = insns_need_reload; chain != 0;
967 chain = chain->next_need_reload)
968 find_reload_regs (chain, dumpfile);
970 if (failure)
971 goto failed;
973 if (insns_need_reload != 0 || did_spill)
974 something_changed |= finish_spills (global, dumpfile);
976 if (! something_changed)
977 break;
979 if (caller_save_needed)
980 delete_caller_save_insns ();
983 /* If global-alloc was run, notify it of any register eliminations we have
984 done. */
985 if (global)
986 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
987 if (ep->can_eliminate)
988 mark_elimination (ep->from, ep->to);
990 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
991 If that insn didn't set the register (i.e., it copied the register to
992 memory), just delete that insn instead of the equivalencing insn plus
993 anything now dead. If we call delete_dead_insn on that insn, we may
994 delete the insn that actually sets the register if the register dies
995 there and that is incorrect. */
997 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
999 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1001 rtx list;
1002 for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1004 rtx equiv_insn = XEXP (list, 0);
1005 if (GET_CODE (equiv_insn) == NOTE)
1006 continue;
1007 if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1008 delete_dead_insn (equiv_insn);
1009 else
1011 PUT_CODE (equiv_insn, NOTE);
1012 NOTE_SOURCE_FILE (equiv_insn) = 0;
1013 NOTE_LINE_NUMBER (equiv_insn) = NOTE_INSN_DELETED;
1019 /* Use the reload registers where necessary
1020 by generating move instructions to move the must-be-register
1021 values into or out of the reload registers. */
1023 if (insns_need_reload != 0 || something_needs_elimination
1024 || something_needs_operands_changed)
1026 int old_frame_size = get_frame_size ();
1028 reload_as_needed (global);
1030 if (old_frame_size != get_frame_size ())
1031 abort ();
1033 if (num_eliminable)
1034 verify_initial_elim_offsets ();
1037 /* If we were able to eliminate the frame pointer, show that it is no
1038 longer live at the start of any basic block. If it ls live by
1039 virtue of being in a pseudo, that pseudo will be marked live
1040 and hence the frame pointer will be known to be live via that
1041 pseudo. */
1043 if (! frame_pointer_needed)
1044 for (i = 0; i < n_basic_blocks; i++)
1045 CLEAR_REGNO_REG_SET (BASIC_BLOCK (i)->global_live_at_start,
1046 HARD_FRAME_POINTER_REGNUM);
1048 /* Come here (with failure set nonzero) if we can't get enough spill regs
1049 and we decide not to abort about it. */
1050 failed:
1052 reload_in_progress = 0;
1054 /* Now eliminate all pseudo regs by modifying them into
1055 their equivalent memory references.
1056 The REG-rtx's for the pseudos are modified in place,
1057 so all insns that used to refer to them now refer to memory.
1059 For a reg that has a reg_equiv_address, all those insns
1060 were changed by reloading so that no insns refer to it any longer;
1061 but the DECL_RTL of a variable decl may refer to it,
1062 and if so this causes the debugging info to mention the variable. */
1064 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1066 rtx addr = 0;
1067 int in_struct = 0;
1068 int is_scalar = 0;
1069 int is_readonly = 0;
1071 if (reg_equiv_memory_loc[i])
1073 in_struct = MEM_IN_STRUCT_P (reg_equiv_memory_loc[i]);
1074 is_scalar = MEM_SCALAR_P (reg_equiv_memory_loc[i]);
1075 is_readonly = RTX_UNCHANGING_P (reg_equiv_memory_loc[i]);
1078 if (reg_equiv_mem[i])
1079 addr = XEXP (reg_equiv_mem[i], 0);
1081 if (reg_equiv_address[i])
1082 addr = reg_equiv_address[i];
1084 if (addr)
1086 if (reg_renumber[i] < 0)
1088 rtx reg = regno_reg_rtx[i];
1089 PUT_CODE (reg, MEM);
1090 XEXP (reg, 0) = addr;
1091 REG_USERVAR_P (reg) = 0;
1092 RTX_UNCHANGING_P (reg) = is_readonly;
1093 MEM_IN_STRUCT_P (reg) = in_struct;
1094 MEM_SCALAR_P (reg) = is_scalar;
1095 /* We have no alias information about this newly created
1096 MEM. */
1097 MEM_ALIAS_SET (reg) = 0;
1099 else if (reg_equiv_mem[i])
1100 XEXP (reg_equiv_mem[i], 0) = addr;
1104 /* We must set reload_completed now since the cleanup_subreg_operands call
1105 below will re-recognize each insn and reload may have generated insns
1106 which are only valid during and after reload. */
1107 reload_completed = 1;
1109 /* Make a pass over all the insns and delete all USEs which we
1110 inserted only to tag a REG_EQUAL note on them. Remove all
1111 REG_DEAD and REG_UNUSED notes. Delete all CLOBBER insns and
1112 simplify (subreg (reg)) operands. Also remove all REG_RETVAL and
1113 REG_LIBCALL notes since they are no longer useful or accurate.
1114 Strip and regenerate REG_INC notes that may have been moved
1115 around. */
1117 for (insn = first; insn; insn = NEXT_INSN (insn))
1118 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1120 rtx *pnote;
1122 if ((GET_CODE (PATTERN (insn)) == USE
1123 && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1124 || GET_CODE (PATTERN (insn)) == CLOBBER)
1126 PUT_CODE (insn, NOTE);
1127 NOTE_SOURCE_FILE (insn) = 0;
1128 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1129 continue;
1132 pnote = &REG_NOTES (insn);
1133 while (*pnote != 0)
1135 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1136 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1137 || REG_NOTE_KIND (*pnote) == REG_INC
1138 || REG_NOTE_KIND (*pnote) == REG_RETVAL
1139 || REG_NOTE_KIND (*pnote) == REG_LIBCALL)
1140 *pnote = XEXP (*pnote, 1);
1141 else
1142 pnote = &XEXP (*pnote, 1);
1145 #ifdef AUTO_INC_DEC
1146 add_auto_inc_notes (insn, PATTERN (insn));
1147 #endif
1149 /* And simplify (subreg (reg)) if it appears as an operand. */
1150 cleanup_subreg_operands (insn);
1153 /* If we are doing stack checking, give a warning if this function's
1154 frame size is larger than we expect. */
1155 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1157 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1158 static int verbose_warned = 0;
1160 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1161 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
1162 size += UNITS_PER_WORD;
1164 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1166 warning ("frame size too large for reliable stack checking");
1167 if (! verbose_warned)
1169 warning ("try reducing the number of local variables");
1170 verbose_warned = 1;
1175 /* Indicate that we no longer have known memory locations or constants. */
1176 if (reg_equiv_constant)
1177 free (reg_equiv_constant);
1178 reg_equiv_constant = 0;
1179 if (reg_equiv_memory_loc)
1180 free (reg_equiv_memory_loc);
1181 reg_equiv_memory_loc = 0;
1183 if (real_known_ptr)
1184 free (real_known_ptr);
1185 if (real_at_ptr)
1186 free (real_at_ptr);
1188 free (reg_equiv_mem);
1189 free (reg_equiv_init);
1190 free (reg_equiv_address);
1191 free (reg_max_ref_width);
1192 free (reg_old_renumber);
1193 free (pseudo_previous_regs);
1194 free (pseudo_forbidden_regs);
1196 FREE_REG_SET (spilled_pseudos);
1198 CLEAR_HARD_REG_SET (used_spill_regs);
1199 for (i = 0; i < n_spills; i++)
1200 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1202 /* Free all the insn_chain structures at once. */
1203 obstack_free (&reload_obstack, reload_startobj);
1204 unused_insn_chains = 0;
1206 return failure;
1209 /* Yet another special case. Unfortunately, reg-stack forces people to
1210 write incorrect clobbers in asm statements. These clobbers must not
1211 cause the register to appear in bad_spill_regs, otherwise we'll call
1212 fatal_insn later. We clear the corresponding regnos in the live
1213 register sets to avoid this.
1214 The whole thing is rather sick, I'm afraid. */
1215 static void
1216 maybe_fix_stack_asms ()
1218 #ifdef STACK_REGS
1219 const char *constraints[MAX_RECOG_OPERANDS];
1220 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1221 struct insn_chain *chain;
1223 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1225 int i, noperands;
1226 HARD_REG_SET clobbered, allowed;
1227 rtx pat;
1229 if (GET_RTX_CLASS (GET_CODE (chain->insn)) != 'i'
1230 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1231 continue;
1232 pat = PATTERN (chain->insn);
1233 if (GET_CODE (pat) != PARALLEL)
1234 continue;
1236 CLEAR_HARD_REG_SET (clobbered);
1237 CLEAR_HARD_REG_SET (allowed);
1239 /* First, make a mask of all stack regs that are clobbered. */
1240 for (i = 0; i < XVECLEN (pat, 0); i++)
1242 rtx t = XVECEXP (pat, 0, i);
1243 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1244 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1247 /* Get the operand values and constraints out of the insn. */
1248 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1249 constraints, operand_mode);
1251 /* For every operand, see what registers are allowed. */
1252 for (i = 0; i < noperands; i++)
1254 const char *p = constraints[i];
1255 /* For every alternative, we compute the class of registers allowed
1256 for reloading in CLS, and merge its contents into the reg set
1257 ALLOWED. */
1258 int cls = (int) NO_REGS;
1260 for (;;)
1262 char c = *p++;
1264 if (c == '\0' || c == ',' || c == '#')
1266 /* End of one alternative - mark the regs in the current
1267 class, and reset the class. */
1268 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1269 cls = NO_REGS;
1270 if (c == '#')
1271 do {
1272 c = *p++;
1273 } while (c != '\0' && c != ',');
1274 if (c == '\0')
1275 break;
1276 continue;
1279 switch (c)
1281 case '=': case '+': case '*': case '%': case '?': case '!':
1282 case '0': case '1': case '2': case '3': case '4': case 'm':
1283 case '<': case '>': case 'V': case 'o': case '&': case 'E':
1284 case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1285 case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1286 case 'P':
1287 #ifdef EXTRA_CONSTRAINT
1288 case 'Q': case 'R': case 'S': case 'T': case 'U':
1289 #endif
1290 break;
1292 case 'p':
1293 cls = (int) reg_class_subunion[cls][(int) BASE_REG_CLASS];
1294 break;
1296 case 'g':
1297 case 'r':
1298 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1299 break;
1301 default:
1302 cls = (int) reg_class_subunion[cls][(int) REG_CLASS_FROM_LETTER (c)];
1307 /* Those of the registers which are clobbered, but allowed by the
1308 constraints, must be usable as reload registers. So clear them
1309 out of the life information. */
1310 AND_HARD_REG_SET (allowed, clobbered);
1311 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1312 if (TEST_HARD_REG_BIT (allowed, i))
1314 CLEAR_REGNO_REG_SET (chain->live_before, i);
1315 CLEAR_REGNO_REG_SET (chain->live_after, i);
1319 #endif
1323 /* Walk the chain of insns, and determine for each whether it needs reloads
1324 and/or eliminations. Build the corresponding insns_need_reload list, and
1325 set something_needs_elimination as appropriate. */
1326 static void
1327 calculate_needs_all_insns (global)
1328 int global;
1330 struct insn_chain **pprev_reload = &insns_need_reload;
1331 struct insn_chain *chain;
1333 something_needs_elimination = 0;
1335 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1337 rtx insn = chain->insn;
1339 /* Clear out the shortcuts, in case they were set last time through. */
1340 chain->need_elim = 0;
1341 chain->need_reload = 0;
1342 chain->need_operand_change = 0;
1344 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1345 include REG_LABEL), we need to see what effects this has on the
1346 known offsets at labels. */
1348 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1349 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1350 && REG_NOTES (insn) != 0))
1351 set_label_offsets (insn, insn, 0);
1353 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1355 rtx old_body = PATTERN (insn);
1356 int old_code = INSN_CODE (insn);
1357 rtx old_notes = REG_NOTES (insn);
1358 int did_elimination = 0;
1359 int operands_changed = 0;
1360 rtx set = single_set (insn);
1362 /* Skip insns that only set an equivalence. */
1363 if (set && GET_CODE (SET_DEST (set)) == REG
1364 && reg_renumber[REGNO (SET_DEST (set))] < 0
1365 && reg_equiv_constant[REGNO (SET_DEST (set))])
1366 continue;
1368 /* If needed, eliminate any eliminable registers. */
1369 if (num_eliminable || num_eliminable_invariants)
1370 did_elimination = eliminate_regs_in_insn (insn, 0);
1372 /* Analyze the instruction. */
1373 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1374 global, spill_reg_order);
1376 /* If a no-op set needs more than one reload, this is likely
1377 to be something that needs input address reloads. We
1378 can't get rid of this cleanly later, and it is of no use
1379 anyway, so discard it now.
1380 We only do this when expensive_optimizations is enabled,
1381 since this complements reload inheritance / output
1382 reload deletion, and it can make debugging harder. */
1383 if (flag_expensive_optimizations && n_reloads > 1)
1385 rtx set = single_set (insn);
1386 if (set
1387 && SET_SRC (set) == SET_DEST (set)
1388 && GET_CODE (SET_SRC (set)) == REG
1389 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1391 PUT_CODE (insn, NOTE);
1392 NOTE_SOURCE_FILE (insn) = 0;
1393 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1394 continue;
1397 if (num_eliminable)
1398 update_eliminable_offsets ();
1400 /* Remember for later shortcuts which insns had any reloads or
1401 register eliminations. */
1402 chain->need_elim = did_elimination;
1403 chain->need_reload = n_reloads > 0;
1404 chain->need_operand_change = operands_changed;
1406 /* Discard any register replacements done. */
1407 if (did_elimination)
1409 obstack_free (&reload_obstack, reload_firstobj);
1410 PATTERN (insn) = old_body;
1411 INSN_CODE (insn) = old_code;
1412 REG_NOTES (insn) = old_notes;
1413 something_needs_elimination = 1;
1416 something_needs_operands_changed |= operands_changed;
1418 if (n_reloads != 0)
1420 *pprev_reload = chain;
1421 pprev_reload = &chain->next_need_reload;
1423 calculate_needs (chain);
1427 *pprev_reload = 0;
1430 /* Compute the most additional registers needed by one instruction,
1431 given by CHAIN. Collect information separately for each class of regs.
1433 To compute the number of reload registers of each class needed for an
1434 insn, we must simulate what choose_reload_regs can do. We do this by
1435 splitting an insn into an "input" and an "output" part. RELOAD_OTHER
1436 reloads are used in both. The input part uses those reloads,
1437 RELOAD_FOR_INPUT reloads, which must be live over the entire input section
1438 of reloads, and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
1439 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the inputs.
1441 The registers needed for output are RELOAD_OTHER and RELOAD_FOR_OUTPUT,
1442 which are live for the entire output portion, and the maximum of all the
1443 RELOAD_FOR_OUTPUT_ADDRESS reloads for each operand.
1445 The total number of registers needed is the maximum of the
1446 inputs and outputs. */
1448 static void
1449 calculate_needs (chain)
1450 struct insn_chain *chain;
1452 int i;
1454 /* Each `struct needs' corresponds to one RELOAD_... type. */
1455 struct {
1456 struct needs other;
1457 struct needs input;
1458 struct needs output;
1459 struct needs insn;
1460 struct needs other_addr;
1461 struct needs op_addr;
1462 struct needs op_addr_reload;
1463 struct needs in_addr[MAX_RECOG_OPERANDS];
1464 struct needs in_addr_addr[MAX_RECOG_OPERANDS];
1465 struct needs out_addr[MAX_RECOG_OPERANDS];
1466 struct needs out_addr_addr[MAX_RECOG_OPERANDS];
1467 } insn_needs;
1469 bzero ((char *) chain->group_size, sizeof chain->group_size);
1470 for (i = 0; i < N_REG_CLASSES; i++)
1471 chain->group_mode[i] = VOIDmode;
1472 bzero ((char *) &insn_needs, sizeof insn_needs);
1474 /* Count each reload once in every class
1475 containing the reload's own class. */
1477 for (i = 0; i < n_reloads; i++)
1479 register enum reg_class *p;
1480 enum reg_class class = rld[i].class;
1481 int size;
1482 enum machine_mode mode;
1483 struct needs *this_needs;
1485 /* Don't count the dummy reloads, for which one of the
1486 regs mentioned in the insn can be used for reloading.
1487 Don't count optional reloads.
1488 Don't count reloads that got combined with others. */
1489 if (rld[i].reg_rtx != 0
1490 || rld[i].optional != 0
1491 || (rld[i].out == 0 && rld[i].in == 0
1492 && ! rld[i].secondary_p))
1493 continue;
1495 mode = rld[i].mode;
1496 size = rld[i].nregs;
1498 /* Decide which time-of-use to count this reload for. */
1499 switch (rld[i].when_needed)
1501 case RELOAD_OTHER:
1502 this_needs = &insn_needs.other;
1503 break;
1504 case RELOAD_FOR_INPUT:
1505 this_needs = &insn_needs.input;
1506 break;
1507 case RELOAD_FOR_OUTPUT:
1508 this_needs = &insn_needs.output;
1509 break;
1510 case RELOAD_FOR_INSN:
1511 this_needs = &insn_needs.insn;
1512 break;
1513 case RELOAD_FOR_OTHER_ADDRESS:
1514 this_needs = &insn_needs.other_addr;
1515 break;
1516 case RELOAD_FOR_INPUT_ADDRESS:
1517 this_needs = &insn_needs.in_addr[rld[i].opnum];
1518 break;
1519 case RELOAD_FOR_INPADDR_ADDRESS:
1520 this_needs = &insn_needs.in_addr_addr[rld[i].opnum];
1521 break;
1522 case RELOAD_FOR_OUTPUT_ADDRESS:
1523 this_needs = &insn_needs.out_addr[rld[i].opnum];
1524 break;
1525 case RELOAD_FOR_OUTADDR_ADDRESS:
1526 this_needs = &insn_needs.out_addr_addr[rld[i].opnum];
1527 break;
1528 case RELOAD_FOR_OPERAND_ADDRESS:
1529 this_needs = &insn_needs.op_addr;
1530 break;
1531 case RELOAD_FOR_OPADDR_ADDR:
1532 this_needs = &insn_needs.op_addr_reload;
1533 break;
1534 default:
1535 abort();
1538 if (size > 1)
1540 enum machine_mode other_mode, allocate_mode;
1542 /* Count number of groups needed separately from
1543 number of individual regs needed. */
1544 this_needs->groups[(int) class]++;
1545 p = reg_class_superclasses[(int) class];
1546 while (*p != LIM_REG_CLASSES)
1547 this_needs->groups[(int) *p++]++;
1549 /* Record size and mode of a group of this class. */
1550 /* If more than one size group is needed,
1551 make all groups the largest needed size. */
1552 if (chain->group_size[(int) class] < size)
1554 other_mode = chain->group_mode[(int) class];
1555 allocate_mode = mode;
1557 chain->group_size[(int) class] = size;
1558 chain->group_mode[(int) class] = mode;
1560 else
1562 other_mode = mode;
1563 allocate_mode = chain->group_mode[(int) class];
1566 /* Crash if two dissimilar machine modes both need
1567 groups of consecutive regs of the same class. */
1569 if (other_mode != VOIDmode && other_mode != allocate_mode
1570 && ! modes_equiv_for_class_p (allocate_mode,
1571 other_mode, class))
1572 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1573 chain->insn);
1575 else if (size == 1)
1577 this_needs->regs[(unsigned char)rld[i].nongroup][(int) class] += 1;
1578 p = reg_class_superclasses[(int) class];
1579 while (*p != LIM_REG_CLASSES)
1580 this_needs->regs[(unsigned char)rld[i].nongroup][(int) *p++] += 1;
1582 else
1583 abort ();
1586 /* All reloads have been counted for this insn;
1587 now merge the various times of use.
1588 This sets insn_needs, etc., to the maximum total number
1589 of registers needed at any point in this insn. */
1591 for (i = 0; i < N_REG_CLASSES; i++)
1593 int j, in_max, out_max;
1595 /* Compute normal and nongroup needs. */
1596 for (j = 0; j <= 1; j++)
1598 int k;
1599 for (in_max = 0, out_max = 0, k = 0; k < reload_n_operands; k++)
1601 in_max = MAX (in_max,
1602 (insn_needs.in_addr[k].regs[j][i]
1603 + insn_needs.in_addr_addr[k].regs[j][i]));
1604 out_max = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1605 out_max = MAX (out_max,
1606 insn_needs.out_addr_addr[k].regs[j][i]);
1609 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1610 and operand addresses but not things used to reload
1611 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1612 don't conflict with things needed to reload inputs or
1613 outputs. */
1615 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1616 insn_needs.op_addr_reload.regs[j][i]),
1617 in_max);
1619 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1621 insn_needs.input.regs[j][i]
1622 = MAX (insn_needs.input.regs[j][i]
1623 + insn_needs.op_addr.regs[j][i]
1624 + insn_needs.insn.regs[j][i],
1625 in_max + insn_needs.input.regs[j][i]);
1627 insn_needs.output.regs[j][i] += out_max;
1628 insn_needs.other.regs[j][i]
1629 += MAX (MAX (insn_needs.input.regs[j][i],
1630 insn_needs.output.regs[j][i]),
1631 insn_needs.other_addr.regs[j][i]);
1635 /* Now compute group needs. */
1636 for (in_max = 0, out_max = 0, j = 0; j < reload_n_operands; j++)
1638 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1639 in_max = MAX (in_max, insn_needs.in_addr_addr[j].groups[i]);
1640 out_max = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1641 out_max = MAX (out_max, insn_needs.out_addr_addr[j].groups[i]);
1644 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1645 insn_needs.op_addr_reload.groups[i]),
1646 in_max);
1647 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1649 insn_needs.input.groups[i]
1650 = MAX (insn_needs.input.groups[i]
1651 + insn_needs.op_addr.groups[i]
1652 + insn_needs.insn.groups[i],
1653 in_max + insn_needs.input.groups[i]);
1655 insn_needs.output.groups[i] += out_max;
1656 insn_needs.other.groups[i]
1657 += MAX (MAX (insn_needs.input.groups[i],
1658 insn_needs.output.groups[i]),
1659 insn_needs.other_addr.groups[i]);
1662 /* Record the needs for later. */
1663 chain->need = insn_needs.other;
1666 /* Find a group of exactly 2 registers.
1668 First try to fill out the group by spilling a single register which
1669 would allow completion of the group.
1671 Then try to create a new group from a pair of registers, neither of
1672 which are explicitly used.
1674 Then try to create a group from any pair of registers. */
1676 static void
1677 find_tworeg_group (chain, class, dumpfile)
1678 struct insn_chain *chain;
1679 int class;
1680 FILE *dumpfile;
1682 int i;
1683 /* First, look for a register that will complete a group. */
1684 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1686 int j, other;
1688 j = potential_reload_regs[i];
1689 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1690 && ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1691 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1692 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1693 && HARD_REGNO_MODE_OK (other, chain->group_mode[class])
1694 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, other)
1695 /* We don't want one part of another group.
1696 We could get "two groups" that overlap! */
1697 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, other))
1698 || (j < FIRST_PSEUDO_REGISTER - 1
1699 && (other = j + 1, spill_reg_order[other] >= 0)
1700 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1701 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1702 && HARD_REGNO_MODE_OK (j, chain->group_mode[class])
1703 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, other)
1704 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, other))))
1706 register enum reg_class *p;
1708 /* We have found one that will complete a group,
1709 so count off one group as provided. */
1710 chain->need.groups[class]--;
1711 p = reg_class_superclasses[class];
1712 while (*p != LIM_REG_CLASSES)
1714 if (chain->group_size [(int) *p] <= chain->group_size [class])
1715 chain->need.groups[(int) *p]--;
1716 p++;
1719 /* Indicate both these regs are part of a group. */
1720 SET_HARD_REG_BIT (chain->counted_for_groups, j);
1721 SET_HARD_REG_BIT (chain->counted_for_groups, other);
1722 break;
1725 /* We can't complete a group, so start one. */
1726 if (i == FIRST_PSEUDO_REGISTER)
1727 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1729 int j, k;
1730 j = potential_reload_regs[i];
1731 /* Verify that J+1 is a potential reload reg. */
1732 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1733 if (potential_reload_regs[k] == j + 1)
1734 break;
1735 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1736 && k < FIRST_PSEUDO_REGISTER
1737 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1738 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1739 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1740 && HARD_REGNO_MODE_OK (j, chain->group_mode[class])
1741 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, j + 1)
1742 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1743 break;
1746 /* I should be the index in potential_reload_regs
1747 of the new reload reg we have found. */
1749 new_spill_reg (chain, i, class, 0, dumpfile);
1752 /* Find a group of more than 2 registers.
1753 Look for a sufficient sequence of unspilled registers, and spill them all
1754 at once. */
1756 static void
1757 find_group (chain, class, dumpfile)
1758 struct insn_chain *chain;
1759 int class;
1760 FILE *dumpfile;
1762 int i;
1764 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1766 int j = potential_reload_regs[i];
1768 if (j >= 0
1769 && j + chain->group_size[class] <= FIRST_PSEUDO_REGISTER
1770 && HARD_REGNO_MODE_OK (j, chain->group_mode[class]))
1772 int k;
1773 /* Check each reg in the sequence. */
1774 for (k = 0; k < chain->group_size[class]; k++)
1775 if (! (spill_reg_order[j + k] < 0
1776 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1777 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1778 break;
1779 /* We got a full sequence, so spill them all. */
1780 if (k == chain->group_size[class])
1782 register enum reg_class *p;
1783 for (k = 0; k < chain->group_size[class]; k++)
1785 int idx;
1786 SET_HARD_REG_BIT (chain->counted_for_groups, j + k);
1787 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1788 if (potential_reload_regs[idx] == j + k)
1789 break;
1790 new_spill_reg (chain, idx, class, 0, dumpfile);
1793 /* We have found one that will complete a group,
1794 so count off one group as provided. */
1795 chain->need.groups[class]--;
1796 p = reg_class_superclasses[class];
1797 while (*p != LIM_REG_CLASSES)
1799 if (chain->group_size [(int) *p]
1800 <= chain->group_size [class])
1801 chain->need.groups[(int) *p]--;
1802 p++;
1804 return;
1808 /* There are no groups left. */
1809 spill_failure (chain->insn);
1810 failure = 1;
1813 /* If pseudo REG conflicts with one of our reload registers, mark it as
1814 spilled. */
1815 static void
1816 maybe_mark_pseudo_spilled (reg)
1817 int reg;
1819 int i;
1820 int r = reg_renumber[reg];
1821 int nregs;
1823 if (r < 0)
1824 abort ();
1825 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg));
1826 for (i = 0; i < n_spills; i++)
1827 if (r <= spill_regs[i] && r + nregs > spill_regs[i])
1829 SET_REGNO_REG_SET (spilled_pseudos, reg);
1830 return;
1834 /* Find more reload regs to satisfy the remaining need of an insn, which
1835 is given by CHAIN.
1836 Do it by ascending class number, since otherwise a reg
1837 might be spilled for a big class and might fail to count
1838 for a smaller class even though it belongs to that class.
1840 Count spilled regs in `spills', and add entries to
1841 `spill_regs' and `spill_reg_order'.
1843 ??? Note there is a problem here.
1844 When there is a need for a group in a high-numbered class,
1845 and also need for non-group regs that come from a lower class,
1846 the non-group regs are chosen first. If there aren't many regs,
1847 they might leave no room for a group.
1849 This was happening on the 386. To fix it, we added the code
1850 that calls possible_group_p, so that the lower class won't
1851 break up the last possible group.
1853 Really fixing the problem would require changes above
1854 in counting the regs already spilled, and in choose_reload_regs.
1855 It might be hard to avoid introducing bugs there. */
1857 static void
1858 find_reload_regs (chain, dumpfile)
1859 struct insn_chain *chain;
1860 FILE *dumpfile;
1862 int i, class;
1863 short *group_needs = chain->need.groups;
1864 short *simple_needs = chain->need.regs[0];
1865 short *nongroup_needs = chain->need.regs[1];
1867 if (dumpfile)
1868 fprintf (dumpfile, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1870 /* Compute the order of preference for hard registers to spill.
1871 Store them by decreasing preference in potential_reload_regs. */
1873 order_regs_for_reload (chain);
1875 /* So far, no hard regs have been spilled. */
1876 n_spills = 0;
1877 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1878 spill_reg_order[i] = -1;
1880 CLEAR_HARD_REG_SET (chain->used_spill_regs);
1881 CLEAR_HARD_REG_SET (chain->counted_for_groups);
1882 CLEAR_HARD_REG_SET (chain->counted_for_nongroups);
1884 for (class = 0; class < N_REG_CLASSES; class++)
1886 /* First get the groups of registers.
1887 If we got single registers first, we might fragment
1888 possible groups. */
1889 while (group_needs[class] > 0)
1891 /* If any single spilled regs happen to form groups,
1892 count them now. Maybe we don't really need
1893 to spill another group. */
1894 count_possible_groups (chain, class);
1896 if (group_needs[class] <= 0)
1897 break;
1899 /* Groups of size 2, the only groups used on most machines,
1900 are treated specially. */
1901 if (chain->group_size[class] == 2)
1902 find_tworeg_group (chain, class, dumpfile);
1903 else
1904 find_group (chain, class, dumpfile);
1905 if (failure)
1906 return;
1909 /* Now similarly satisfy all need for single registers. */
1911 while (simple_needs[class] > 0 || nongroup_needs[class] > 0)
1913 /* If we spilled enough regs, but they weren't counted
1914 against the non-group need, see if we can count them now.
1915 If so, we can avoid some actual spilling. */
1916 if (simple_needs[class] <= 0 && nongroup_needs[class] > 0)
1917 for (i = 0; i < n_spills; i++)
1919 int regno = spill_regs[i];
1920 if (TEST_HARD_REG_BIT (reg_class_contents[class], regno)
1921 && !TEST_HARD_REG_BIT (chain->counted_for_groups, regno)
1922 && !TEST_HARD_REG_BIT (chain->counted_for_nongroups, regno)
1923 && nongroup_needs[class] > 0)
1925 register enum reg_class *p;
1927 SET_HARD_REG_BIT (chain->counted_for_nongroups, regno);
1928 nongroup_needs[class]--;
1929 p = reg_class_superclasses[class];
1930 while (*p != LIM_REG_CLASSES)
1931 nongroup_needs[(int) *p++]--;
1935 if (simple_needs[class] <= 0 && nongroup_needs[class] <= 0)
1936 break;
1938 /* Consider the potential reload regs that aren't
1939 yet in use as reload regs, in order of preference.
1940 Find the most preferred one that's in this class. */
1942 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1944 int regno = potential_reload_regs[i];
1945 if (regno >= 0
1946 && TEST_HARD_REG_BIT (reg_class_contents[class], regno)
1947 /* If this reg will not be available for groups,
1948 pick one that does not foreclose possible groups.
1949 This is a kludge, and not very general,
1950 but it should be sufficient to make the 386 work,
1951 and the problem should not occur on machines with
1952 more registers. */
1953 && (nongroup_needs[class] == 0
1954 || possible_group_p (chain, regno)))
1955 break;
1958 /* If we couldn't get a register, try to get one even if we
1959 might foreclose possible groups. This may cause problems
1960 later, but that's better than aborting now, since it is
1961 possible that we will, in fact, be able to form the needed
1962 group even with this allocation. */
1964 if (i >= FIRST_PSEUDO_REGISTER
1965 && asm_noperands (chain->insn) < 0)
1966 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1967 if (potential_reload_regs[i] >= 0
1968 && TEST_HARD_REG_BIT (reg_class_contents[class],
1969 potential_reload_regs[i]))
1970 break;
1972 /* I should be the index in potential_reload_regs
1973 of the new reload reg we have found. */
1975 new_spill_reg (chain, i, class, 1, dumpfile);
1976 if (failure)
1977 return;
1981 /* We know which hard regs to use, now mark the pseudos that live in them
1982 as needing to be kicked out. */
1983 EXECUTE_IF_SET_IN_REG_SET
1984 (chain->live_before, FIRST_PSEUDO_REGISTER, i,
1986 maybe_mark_pseudo_spilled (i);
1988 EXECUTE_IF_SET_IN_REG_SET
1989 (chain->live_after, FIRST_PSEUDO_REGISTER, i,
1991 maybe_mark_pseudo_spilled (i);
1994 IOR_HARD_REG_SET (used_spill_regs, chain->used_spill_regs);
1997 void
1998 dump_needs (chain, dumpfile)
1999 struct insn_chain *chain;
2000 FILE *dumpfile;
2002 static const char * const reg_class_names[] = REG_CLASS_NAMES;
2003 int i;
2004 struct needs *n = &chain->need;
2006 for (i = 0; i < N_REG_CLASSES; i++)
2008 if (n->regs[i][0] > 0)
2009 fprintf (dumpfile,
2010 ";; Need %d reg%s of class %s.\n",
2011 n->regs[i][0], n->regs[i][0] == 1 ? "" : "s",
2012 reg_class_names[i]);
2013 if (n->regs[i][1] > 0)
2014 fprintf (dumpfile,
2015 ";; Need %d nongroup reg%s of class %s.\n",
2016 n->regs[i][1], n->regs[i][1] == 1 ? "" : "s",
2017 reg_class_names[i]);
2018 if (n->groups[i] > 0)
2019 fprintf (dumpfile,
2020 ";; Need %d group%s (%smode) of class %s.\n",
2021 n->groups[i], n->groups[i] == 1 ? "" : "s",
2022 GET_MODE_NAME(chain->group_mode[i]),
2023 reg_class_names[i]);
2027 /* Delete all insns that were inserted by emit_caller_save_insns during
2028 this iteration. */
2029 static void
2030 delete_caller_save_insns ()
2032 struct insn_chain *c = reload_insn_chain;
2034 while (c != 0)
2036 while (c != 0 && c->is_caller_save_insn)
2038 struct insn_chain *next = c->next;
2039 rtx insn = c->insn;
2041 if (insn == BLOCK_HEAD (c->block))
2042 BLOCK_HEAD (c->block) = NEXT_INSN (insn);
2043 if (insn == BLOCK_END (c->block))
2044 BLOCK_END (c->block) = PREV_INSN (insn);
2045 if (c == reload_insn_chain)
2046 reload_insn_chain = next;
2048 if (NEXT_INSN (insn) != 0)
2049 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2050 if (PREV_INSN (insn) != 0)
2051 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2053 if (next)
2054 next->prev = c->prev;
2055 if (c->prev)
2056 c->prev->next = next;
2057 c->next = unused_insn_chains;
2058 unused_insn_chains = c;
2059 c = next;
2061 if (c != 0)
2062 c = c->next;
2066 /* Nonzero if, after spilling reg REGNO for non-groups,
2067 it will still be possible to find a group if we still need one. */
2069 static int
2070 possible_group_p (chain, regno)
2071 struct insn_chain *chain;
2072 int regno;
2074 int i;
2075 int class = (int) NO_REGS;
2077 for (i = 0; i < (int) N_REG_CLASSES; i++)
2078 if (chain->need.groups[i] > 0)
2080 class = i;
2081 break;
2084 if (class == (int) NO_REGS)
2085 return 1;
2087 /* Consider each pair of consecutive registers. */
2088 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2090 /* Ignore pairs that include reg REGNO. */
2091 if (i == regno || i + 1 == regno)
2092 continue;
2094 /* Ignore pairs that are outside the class that needs the group.
2095 ??? Here we fail to handle the case where two different classes
2096 independently need groups. But this never happens with our
2097 current machine descriptions. */
2098 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2099 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2100 continue;
2102 /* A pair of consecutive regs we can still spill does the trick. */
2103 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2104 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2105 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2106 return 1;
2108 /* A pair of one already spilled and one we can spill does it
2109 provided the one already spilled is not otherwise reserved. */
2110 if (spill_reg_order[i] < 0
2111 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2112 && spill_reg_order[i + 1] >= 0
2113 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, i + 1)
2114 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, i + 1))
2115 return 1;
2116 if (spill_reg_order[i + 1] < 0
2117 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2118 && spill_reg_order[i] >= 0
2119 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, i)
2120 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, i))
2121 return 1;
2124 return 0;
2127 /* Count any groups of CLASS that can be formed from the registers recently
2128 spilled. */
2130 static void
2131 count_possible_groups (chain, class)
2132 struct insn_chain *chain;
2133 int class;
2135 HARD_REG_SET new;
2136 int i, j;
2138 /* Now find all consecutive groups of spilled registers
2139 and mark each group off against the need for such groups.
2140 But don't count them against ordinary need, yet. */
2142 if (chain->group_size[class] == 0)
2143 return;
2145 CLEAR_HARD_REG_SET (new);
2147 /* Make a mask of all the regs that are spill regs in class I. */
2148 for (i = 0; i < n_spills; i++)
2150 int regno = spill_regs[i];
2152 if (TEST_HARD_REG_BIT (reg_class_contents[class], regno)
2153 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, regno)
2154 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, regno))
2155 SET_HARD_REG_BIT (new, regno);
2158 /* Find each consecutive group of them. */
2159 for (i = 0; i < FIRST_PSEUDO_REGISTER && chain->need.groups[class] > 0; i++)
2160 if (TEST_HARD_REG_BIT (new, i)
2161 && i + chain->group_size[class] <= FIRST_PSEUDO_REGISTER
2162 && HARD_REGNO_MODE_OK (i, chain->group_mode[class]))
2164 for (j = 1; j < chain->group_size[class]; j++)
2165 if (! TEST_HARD_REG_BIT (new, i + j))
2166 break;
2168 if (j == chain->group_size[class])
2170 /* We found a group. Mark it off against this class's need for
2171 groups, and against each superclass too. */
2172 register enum reg_class *p;
2174 chain->need.groups[class]--;
2175 p = reg_class_superclasses[class];
2176 while (*p != LIM_REG_CLASSES)
2178 if (chain->group_size [(int) *p] <= chain->group_size [class])
2179 chain->need.groups[(int) *p]--;
2180 p++;
2183 /* Don't count these registers again. */
2184 for (j = 0; j < chain->group_size[class]; j++)
2185 SET_HARD_REG_BIT (chain->counted_for_groups, i + j);
2188 /* Skip to the last reg in this group. When i is incremented above,
2189 it will then point to the first reg of the next possible group. */
2190 i += j - 1;
2194 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2195 another mode that needs to be reloaded for the same register class CLASS.
2196 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2197 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2199 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2200 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2201 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2202 causes unnecessary failures on machines requiring alignment of register
2203 groups when the two modes are different sizes, because the larger mode has
2204 more strict alignment rules than the smaller mode. */
2206 static int
2207 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2208 enum machine_mode allocate_mode, other_mode;
2209 enum reg_class class;
2211 register int regno;
2212 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2214 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2215 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2216 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2217 return 0;
2219 return 1;
2222 /* Handle the failure to find a register to spill.
2223 INSN should be one of the insns which needed this particular spill reg. */
2225 static void
2226 spill_failure (insn)
2227 rtx insn;
2229 if (asm_noperands (PATTERN (insn)) >= 0)
2230 error_for_asm (insn, "`asm' needs too many reloads");
2231 else
2232 fatal_insn ("Unable to find a register to spill.", insn);
2235 /* Add a new register to the tables of available spill-registers.
2236 CHAIN is the insn for which the register will be used; we decrease the
2237 needs of that insn.
2238 I is the index of this register in potential_reload_regs.
2239 CLASS is the regclass whose need is being satisfied.
2240 NONGROUP is 0 if this register is part of a group.
2241 DUMPFILE is the same as the one that `reload' got. */
2243 static void
2244 new_spill_reg (chain, i, class, nongroup, dumpfile)
2245 struct insn_chain *chain;
2246 int i;
2247 int class;
2248 int nongroup;
2249 FILE *dumpfile;
2251 register enum reg_class *p;
2252 int regno = potential_reload_regs[i];
2254 if (i >= FIRST_PSEUDO_REGISTER)
2256 spill_failure (chain->insn);
2257 failure = 1;
2258 return;
2261 if (TEST_HARD_REG_BIT (bad_spill_regs, regno))
2263 static const char * const reg_class_names[] = REG_CLASS_NAMES;
2265 if (asm_noperands (PATTERN (chain->insn)) < 0)
2267 /* The error message is still correct - we know only that it wasn't
2268 an asm statement that caused the problem, but one of the global
2269 registers declared by the users might have screwed us. */
2270 error ("fixed or forbidden register %d (%s) was spilled for class %s.",
2271 regno, reg_names[regno], reg_class_names[class]);
2272 error ("This may be due to a compiler bug or to impossible asm");
2273 error ("statements or clauses.");
2274 fatal_insn ("This is the instruction:", chain->insn);
2276 error_for_asm (chain->insn, "Invalid `asm' statement:");
2277 error_for_asm (chain->insn,
2278 "fixed or forbidden register %d (%s) was spilled for class %s.",
2279 regno, reg_names[regno], reg_class_names[class]);
2280 failure = 1;
2281 return;
2284 /* Make reg REGNO an additional reload reg. */
2286 potential_reload_regs[i] = -1;
2287 spill_regs[n_spills] = regno;
2288 spill_reg_order[regno] = n_spills;
2289 if (dumpfile)
2290 fprintf (dumpfile, "Spilling reg %d.\n", regno);
2291 SET_HARD_REG_BIT (chain->used_spill_regs, regno);
2293 /* Clear off the needs we just satisfied. */
2295 chain->need.regs[0][class]--;
2296 p = reg_class_superclasses[class];
2297 while (*p != LIM_REG_CLASSES)
2298 chain->need.regs[0][(int) *p++]--;
2300 if (nongroup && chain->need.regs[1][class] > 0)
2302 SET_HARD_REG_BIT (chain->counted_for_nongroups, regno);
2303 chain->need.regs[1][class]--;
2304 p = reg_class_superclasses[class];
2305 while (*p != LIM_REG_CLASSES)
2306 chain->need.regs[1][(int) *p++]--;
2309 n_spills++;
2312 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2313 data that is dead in INSN. */
2315 static void
2316 delete_dead_insn (insn)
2317 rtx insn;
2319 rtx prev = prev_real_insn (insn);
2320 rtx prev_dest;
2322 /* If the previous insn sets a register that dies in our insn, delete it
2323 too. */
2324 if (prev && GET_CODE (PATTERN (prev)) == SET
2325 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2326 && reg_mentioned_p (prev_dest, PATTERN (insn))
2327 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2328 && ! side_effects_p (SET_SRC (PATTERN (prev))))
2329 delete_dead_insn (prev);
2331 PUT_CODE (insn, NOTE);
2332 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2333 NOTE_SOURCE_FILE (insn) = 0;
2336 /* Modify the home of pseudo-reg I.
2337 The new home is present in reg_renumber[I].
2339 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2340 or it may be -1, meaning there is none or it is not relevant.
2341 This is used so that all pseudos spilled from a given hard reg
2342 can share one stack slot. */
2344 static void
2345 alter_reg (i, from_reg)
2346 register int i;
2347 int from_reg;
2349 /* When outputting an inline function, this can happen
2350 for a reg that isn't actually used. */
2351 if (regno_reg_rtx[i] == 0)
2352 return;
2354 /* If the reg got changed to a MEM at rtl-generation time,
2355 ignore it. */
2356 if (GET_CODE (regno_reg_rtx[i]) != REG)
2357 return;
2359 /* Modify the reg-rtx to contain the new hard reg
2360 number or else to contain its pseudo reg number. */
2361 REGNO (regno_reg_rtx[i])
2362 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2364 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2365 allocate a stack slot for it. */
2367 if (reg_renumber[i] < 0
2368 && REG_N_REFS (i) > 0
2369 && reg_equiv_constant[i] == 0
2370 && reg_equiv_memory_loc[i] == 0)
2372 register rtx x;
2373 int inherent_size = PSEUDO_REGNO_BYTES (i);
2374 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2375 int adjust = 0;
2377 /* Each pseudo reg has an inherent size which comes from its own mode,
2378 and a total size which provides room for paradoxical subregs
2379 which refer to the pseudo reg in wider modes.
2381 We can use a slot already allocated if it provides both
2382 enough inherent space and enough total space.
2383 Otherwise, we allocate a new slot, making sure that it has no less
2384 inherent space, and no less total space, then the previous slot. */
2385 if (from_reg == -1)
2387 /* No known place to spill from => no slot to reuse. */
2388 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
2389 inherent_size == total_size ? 0 : -1);
2390 if (BYTES_BIG_ENDIAN)
2391 /* Cancel the big-endian correction done in assign_stack_local.
2392 Get the address of the beginning of the slot.
2393 This is so we can do a big-endian correction unconditionally
2394 below. */
2395 adjust = inherent_size - total_size;
2397 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2399 /* Reuse a stack slot if possible. */
2400 else if (spill_stack_slot[from_reg] != 0
2401 && spill_stack_slot_width[from_reg] >= total_size
2402 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2403 >= inherent_size))
2404 x = spill_stack_slot[from_reg];
2405 /* Allocate a bigger slot. */
2406 else
2408 /* Compute maximum size needed, both for inherent size
2409 and for total size. */
2410 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2411 rtx stack_slot;
2412 if (spill_stack_slot[from_reg])
2414 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2415 > inherent_size)
2416 mode = GET_MODE (spill_stack_slot[from_reg]);
2417 if (spill_stack_slot_width[from_reg] > total_size)
2418 total_size = spill_stack_slot_width[from_reg];
2420 /* Make a slot with that size. */
2421 x = assign_stack_local (mode, total_size,
2422 inherent_size == total_size ? 0 : -1);
2423 stack_slot = x;
2424 if (BYTES_BIG_ENDIAN)
2426 /* Cancel the big-endian correction done in assign_stack_local.
2427 Get the address of the beginning of the slot.
2428 This is so we can do a big-endian correction unconditionally
2429 below. */
2430 adjust = GET_MODE_SIZE (mode) - total_size;
2431 if (adjust)
2432 stack_slot = gen_rtx_MEM (mode_for_size (total_size
2433 * BITS_PER_UNIT,
2434 MODE_INT, 1),
2435 plus_constant (XEXP (x, 0), adjust));
2437 spill_stack_slot[from_reg] = stack_slot;
2438 spill_stack_slot_width[from_reg] = total_size;
2441 /* On a big endian machine, the "address" of the slot
2442 is the address of the low part that fits its inherent mode. */
2443 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2444 adjust += (total_size - inherent_size);
2446 /* If we have any adjustment to make, or if the stack slot is the
2447 wrong mode, make a new stack slot. */
2448 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2450 x = gen_rtx_MEM (GET_MODE (regno_reg_rtx[i]),
2451 plus_constant (XEXP (x, 0), adjust));
2453 /* If this was shared among registers, must ensure we never
2454 set it readonly since that can cause scheduling
2455 problems. Note we would only have in this adjustment
2456 case in any event, since the code above doesn't set it. */
2458 if (from_reg == -1)
2459 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2462 /* Save the stack slot for later. */
2463 reg_equiv_memory_loc[i] = x;
2467 /* Mark the slots in regs_ever_live for the hard regs
2468 used by pseudo-reg number REGNO. */
2470 void
2471 mark_home_live (regno)
2472 int regno;
2474 register int i, lim;
2475 i = reg_renumber[regno];
2476 if (i < 0)
2477 return;
2478 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2479 while (i < lim)
2480 regs_ever_live[i++] = 1;
2483 /* This function handles the tracking of elimination offsets around branches.
2485 X is a piece of RTL being scanned.
2487 INSN is the insn that it came from, if any.
2489 INITIAL_P is non-zero if we are to set the offset to be the initial
2490 offset and zero if we are setting the offset of the label to be the
2491 current offset. */
2493 static void
2494 set_label_offsets (x, insn, initial_p)
2495 rtx x;
2496 rtx insn;
2497 int initial_p;
2499 enum rtx_code code = GET_CODE (x);
2500 rtx tem;
2501 unsigned int i;
2502 struct elim_table *p;
2504 switch (code)
2506 case LABEL_REF:
2507 if (LABEL_REF_NONLOCAL_P (x))
2508 return;
2510 x = XEXP (x, 0);
2512 /* ... fall through ... */
2514 case CODE_LABEL:
2515 /* If we know nothing about this label, set the desired offsets. Note
2516 that this sets the offset at a label to be the offset before a label
2517 if we don't know anything about the label. This is not correct for
2518 the label after a BARRIER, but is the best guess we can make. If
2519 we guessed wrong, we will suppress an elimination that might have
2520 been possible had we been able to guess correctly. */
2522 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2524 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2525 offsets_at[CODE_LABEL_NUMBER (x)][i]
2526 = (initial_p ? reg_eliminate[i].initial_offset
2527 : reg_eliminate[i].offset);
2528 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2531 /* Otherwise, if this is the definition of a label and it is
2532 preceded by a BARRIER, set our offsets to the known offset of
2533 that label. */
2535 else if (x == insn
2536 && (tem = prev_nonnote_insn (insn)) != 0
2537 && GET_CODE (tem) == BARRIER)
2538 set_offsets_for_label (insn);
2539 else
2540 /* If neither of the above cases is true, compare each offset
2541 with those previously recorded and suppress any eliminations
2542 where the offsets disagree. */
2544 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2545 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2546 != (initial_p ? reg_eliminate[i].initial_offset
2547 : reg_eliminate[i].offset))
2548 reg_eliminate[i].can_eliminate = 0;
2550 return;
2552 case JUMP_INSN:
2553 set_label_offsets (PATTERN (insn), insn, initial_p);
2555 /* ... fall through ... */
2557 case INSN:
2558 case CALL_INSN:
2559 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2560 and hence must have all eliminations at their initial offsets. */
2561 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2562 if (REG_NOTE_KIND (tem) == REG_LABEL)
2563 set_label_offsets (XEXP (tem, 0), insn, 1);
2564 return;
2566 case ADDR_VEC:
2567 case ADDR_DIFF_VEC:
2568 /* Each of the labels in the address vector must be at their initial
2569 offsets. We want the first field for ADDR_VEC and the second
2570 field for ADDR_DIFF_VEC. */
2572 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2573 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2574 insn, initial_p);
2575 return;
2577 case SET:
2578 /* We only care about setting PC. If the source is not RETURN,
2579 IF_THEN_ELSE, or a label, disable any eliminations not at
2580 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2581 isn't one of those possibilities. For branches to a label,
2582 call ourselves recursively.
2584 Note that this can disable elimination unnecessarily when we have
2585 a non-local goto since it will look like a non-constant jump to
2586 someplace in the current function. This isn't a significant
2587 problem since such jumps will normally be when all elimination
2588 pairs are back to their initial offsets. */
2590 if (SET_DEST (x) != pc_rtx)
2591 return;
2593 switch (GET_CODE (SET_SRC (x)))
2595 case PC:
2596 case RETURN:
2597 return;
2599 case LABEL_REF:
2600 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2601 return;
2603 case IF_THEN_ELSE:
2604 tem = XEXP (SET_SRC (x), 1);
2605 if (GET_CODE (tem) == LABEL_REF)
2606 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2607 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2608 break;
2610 tem = XEXP (SET_SRC (x), 2);
2611 if (GET_CODE (tem) == LABEL_REF)
2612 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2613 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2614 break;
2615 return;
2617 default:
2618 break;
2621 /* If we reach here, all eliminations must be at their initial
2622 offset because we are doing a jump to a variable address. */
2623 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2624 if (p->offset != p->initial_offset)
2625 p->can_eliminate = 0;
2626 break;
2628 default:
2629 break;
2633 /* Scan X and replace any eliminable registers (such as fp) with a
2634 replacement (such as sp), plus an offset.
2636 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2637 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2638 MEM, we are allowed to replace a sum of a register and the constant zero
2639 with the register, which we cannot do outside a MEM. In addition, we need
2640 to record the fact that a register is referenced outside a MEM.
2642 If INSN is an insn, it is the insn containing X. If we replace a REG
2643 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2644 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2645 the REG is being modified.
2647 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2648 That's used when we eliminate in expressions stored in notes.
2649 This means, do not set ref_outside_mem even if the reference
2650 is outside of MEMs.
2652 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2653 replacements done assuming all offsets are at their initial values. If
2654 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2655 encounter, return the actual location so that find_reloads will do
2656 the proper thing. */
2659 eliminate_regs (x, mem_mode, insn)
2660 rtx x;
2661 enum machine_mode mem_mode;
2662 rtx insn;
2664 enum rtx_code code = GET_CODE (x);
2665 struct elim_table *ep;
2666 int regno;
2667 rtx new;
2668 int i, j;
2669 const char *fmt;
2670 int copied = 0;
2672 if (! current_function_decl)
2673 return x;
2675 switch (code)
2677 case CONST_INT:
2678 case CONST_DOUBLE:
2679 case CONST:
2680 case SYMBOL_REF:
2681 case CODE_LABEL:
2682 case PC:
2683 case CC0:
2684 case ASM_INPUT:
2685 case ADDR_VEC:
2686 case ADDR_DIFF_VEC:
2687 case RETURN:
2688 return x;
2690 case ADDRESSOF:
2691 /* This is only for the benefit of the debugging backends, which call
2692 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2693 removed after CSE. */
2694 new = eliminate_regs (XEXP (x, 0), 0, insn);
2695 if (GET_CODE (new) == MEM)
2696 return XEXP (new, 0);
2697 return x;
2699 case REG:
2700 regno = REGNO (x);
2702 /* First handle the case where we encounter a bare register that
2703 is eliminable. Replace it with a PLUS. */
2704 if (regno < FIRST_PSEUDO_REGISTER)
2706 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2707 ep++)
2708 if (ep->from_rtx == x && ep->can_eliminate)
2709 return plus_constant (ep->to_rtx, ep->previous_offset);
2712 else if (reg_renumber[regno] < 0 && reg_equiv_constant
2713 && reg_equiv_constant[regno]
2714 && ! CONSTANT_P (reg_equiv_constant[regno]))
2715 return eliminate_regs (copy_rtx (reg_equiv_constant[regno]),
2716 mem_mode, insn);
2717 return x;
2719 /* You might think handling MINUS in a manner similar to PLUS is a
2720 good idea. It is not. It has been tried multiple times and every
2721 time the change has had to have been reverted.
2723 Other parts of reload know a PLUS is special (gen_reload for example)
2724 and require special code to handle code a reloaded PLUS operand.
2726 Also consider backends where the flags register is clobbered by a
2727 MINUS, but we can emit a PLUS that does not clobber flags (ia32,
2728 lea instruction comes to mind). If we try to reload a MINUS, we
2729 may kill the flags register that was holding a useful value.
2731 So, please before trying to handle MINUS, consider reload as a
2732 whole instead of this little section as well as the backend issues. */
2733 case PLUS:
2734 /* If this is the sum of an eliminable register and a constant, rework
2735 the sum. */
2736 if (GET_CODE (XEXP (x, 0)) == REG
2737 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2738 && CONSTANT_P (XEXP (x, 1)))
2740 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2741 ep++)
2742 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2744 /* The only time we want to replace a PLUS with a REG (this
2745 occurs when the constant operand of the PLUS is the negative
2746 of the offset) is when we are inside a MEM. We won't want
2747 to do so at other times because that would change the
2748 structure of the insn in a way that reload can't handle.
2749 We special-case the commonest situation in
2750 eliminate_regs_in_insn, so just replace a PLUS with a
2751 PLUS here, unless inside a MEM. */
2752 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2753 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2754 return ep->to_rtx;
2755 else
2756 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2757 plus_constant (XEXP (x, 1),
2758 ep->previous_offset));
2761 /* If the register is not eliminable, we are done since the other
2762 operand is a constant. */
2763 return x;
2766 /* If this is part of an address, we want to bring any constant to the
2767 outermost PLUS. We will do this by doing register replacement in
2768 our operands and seeing if a constant shows up in one of them.
2770 Note that there is no risk of modifying the structure of the insn,
2771 since we only get called for its operands, thus we are either
2772 modifying the address inside a MEM, or something like an address
2773 operand of a load-address insn. */
2776 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2777 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2779 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2781 /* If one side is a PLUS and the other side is a pseudo that
2782 didn't get a hard register but has a reg_equiv_constant,
2783 we must replace the constant here since it may no longer
2784 be in the position of any operand. */
2785 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2786 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2787 && reg_renumber[REGNO (new1)] < 0
2788 && reg_equiv_constant != 0
2789 && reg_equiv_constant[REGNO (new1)] != 0)
2790 new1 = reg_equiv_constant[REGNO (new1)];
2791 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2792 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2793 && reg_renumber[REGNO (new0)] < 0
2794 && reg_equiv_constant[REGNO (new0)] != 0)
2795 new0 = reg_equiv_constant[REGNO (new0)];
2797 new = form_sum (new0, new1);
2799 /* As above, if we are not inside a MEM we do not want to
2800 turn a PLUS into something else. We might try to do so here
2801 for an addition of 0 if we aren't optimizing. */
2802 if (! mem_mode && GET_CODE (new) != PLUS)
2803 return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
2804 else
2805 return new;
2808 return x;
2810 case MULT:
2811 /* If this is the product of an eliminable register and a
2812 constant, apply the distribute law and move the constant out
2813 so that we have (plus (mult ..) ..). This is needed in order
2814 to keep load-address insns valid. This case is pathological.
2815 We ignore the possibility of overflow here. */
2816 if (GET_CODE (XEXP (x, 0)) == REG
2817 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2818 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2819 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2820 ep++)
2821 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2823 if (! mem_mode
2824 /* Refs inside notes don't count for this purpose. */
2825 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2826 || GET_CODE (insn) == INSN_LIST)))
2827 ep->ref_outside_mem = 1;
2829 return
2830 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2831 ep->previous_offset * INTVAL (XEXP (x, 1)));
2834 /* ... fall through ... */
2836 case CALL:
2837 case COMPARE:
2838 /* See comments before PLUS about handling MINUS. */
2839 case MINUS:
2840 case DIV: case UDIV:
2841 case MOD: case UMOD:
2842 case AND: case IOR: case XOR:
2843 case ROTATERT: case ROTATE:
2844 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2845 case NE: case EQ:
2846 case GE: case GT: case GEU: case GTU:
2847 case LE: case LT: case LEU: case LTU:
2849 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2850 rtx new1
2851 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2853 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2854 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2856 return x;
2858 case EXPR_LIST:
2859 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2860 if (XEXP (x, 0))
2862 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2863 if (new != XEXP (x, 0))
2865 /* If this is a REG_DEAD note, it is not valid anymore.
2866 Using the eliminated version could result in creating a
2867 REG_DEAD note for the stack or frame pointer. */
2868 if (GET_MODE (x) == REG_DEAD)
2869 return (XEXP (x, 1)
2870 ? eliminate_regs (XEXP (x, 1), mem_mode, insn)
2871 : NULL_RTX);
2873 x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
2877 /* ... fall through ... */
2879 case INSN_LIST:
2880 /* Now do eliminations in the rest of the chain. If this was
2881 an EXPR_LIST, this might result in allocating more memory than is
2882 strictly needed, but it simplifies the code. */
2883 if (XEXP (x, 1))
2885 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2886 if (new != XEXP (x, 1))
2887 return gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2889 return x;
2891 case PRE_INC:
2892 case POST_INC:
2893 case PRE_DEC:
2894 case POST_DEC:
2895 case STRICT_LOW_PART:
2896 case NEG: case NOT:
2897 case SIGN_EXTEND: case ZERO_EXTEND:
2898 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2899 case FLOAT: case FIX:
2900 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2901 case ABS:
2902 case SQRT:
2903 case FFS:
2904 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2905 if (new != XEXP (x, 0))
2906 return gen_rtx_fmt_e (code, GET_MODE (x), new);
2907 return x;
2909 case SUBREG:
2910 /* Similar to above processing, but preserve SUBREG_WORD.
2911 Convert (subreg (mem)) to (mem) if not paradoxical.
2912 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2913 pseudo didn't get a hard reg, we must replace this with the
2914 eliminated version of the memory location because push_reloads
2915 may do the replacement in certain circumstances. */
2916 if (GET_CODE (SUBREG_REG (x)) == REG
2917 && (GET_MODE_SIZE (GET_MODE (x))
2918 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2919 && reg_equiv_memory_loc != 0
2920 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2922 new = SUBREG_REG (x);
2924 else
2925 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2927 if (new != XEXP (x, 0))
2929 int x_size = GET_MODE_SIZE (GET_MODE (x));
2930 int new_size = GET_MODE_SIZE (GET_MODE (new));
2932 if (GET_CODE (new) == MEM
2933 && ((x_size < new_size
2934 #ifdef WORD_REGISTER_OPERATIONS
2935 /* On these machines, combine can create rtl of the form
2936 (set (subreg:m1 (reg:m2 R) 0) ...)
2937 where m1 < m2, and expects something interesting to
2938 happen to the entire word. Moreover, it will use the
2939 (reg:m2 R) later, expecting all bits to be preserved.
2940 So if the number of words is the same, preserve the
2941 subreg so that push_reloads can see it. */
2942 && ! ((x_size-1)/UNITS_PER_WORD == (new_size-1)/UNITS_PER_WORD)
2943 #endif
2945 || (x_size == new_size))
2948 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2949 enum machine_mode mode = GET_MODE (x);
2951 if (BYTES_BIG_ENDIAN)
2952 offset += (MIN (UNITS_PER_WORD,
2953 GET_MODE_SIZE (GET_MODE (new)))
2954 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2956 PUT_MODE (new, mode);
2957 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2958 return new;
2960 else
2961 return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_WORD (x));
2964 return x;
2966 case MEM:
2967 /* This is only for the benefit of the debugging backends, which call
2968 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2969 removed after CSE. */
2970 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
2971 return eliminate_regs (XEXP (XEXP (x, 0), 0), 0, insn);
2973 /* Our only special processing is to pass the mode of the MEM to our
2974 recursive call and copy the flags. While we are here, handle this
2975 case more efficiently. */
2976 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
2977 if (new != XEXP (x, 0))
2979 new = gen_rtx_MEM (GET_MODE (x), new);
2980 new->volatil = x->volatil;
2981 new->unchanging = x->unchanging;
2982 new->in_struct = x->in_struct;
2983 return new;
2985 else
2986 return x;
2988 case USE:
2989 case CLOBBER:
2990 case ASM_OPERANDS:
2991 case SET:
2992 abort ();
2994 default:
2995 break;
2998 /* Process each of our operands recursively. If any have changed, make a
2999 copy of the rtx. */
3000 fmt = GET_RTX_FORMAT (code);
3001 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3003 if (*fmt == 'e')
3005 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3006 if (new != XEXP (x, i) && ! copied)
3008 rtx new_x = rtx_alloc (code);
3009 bcopy ((char *) x, (char *) new_x,
3010 (sizeof (*new_x) - sizeof (new_x->fld)
3011 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3012 x = new_x;
3013 copied = 1;
3015 XEXP (x, i) = new;
3017 else if (*fmt == 'E')
3019 int copied_vec = 0;
3020 for (j = 0; j < XVECLEN (x, i); j++)
3022 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3023 if (new != XVECEXP (x, i, j) && ! copied_vec)
3025 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3026 XVEC (x, i)->elem);
3027 if (! copied)
3029 rtx new_x = rtx_alloc (code);
3030 bcopy ((char *) x, (char *) new_x,
3031 (sizeof (*new_x) - sizeof (new_x->fld)
3032 + (sizeof (new_x->fld[0])
3033 * GET_RTX_LENGTH (code))));
3034 x = new_x;
3035 copied = 1;
3037 XVEC (x, i) = new_v;
3038 copied_vec = 1;
3040 XVECEXP (x, i, j) = new;
3045 return x;
3048 /* Scan rtx X for modifications of elimination target registers. Update
3049 the table of eliminables to reflect the changed state. MEM_MODE is
3050 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
3052 static void
3053 elimination_effects (x, mem_mode)
3054 rtx x;
3055 enum machine_mode mem_mode;
3058 enum rtx_code code = GET_CODE (x);
3059 struct elim_table *ep;
3060 int regno;
3061 int i, j;
3062 const char *fmt;
3064 switch (code)
3066 case CONST_INT:
3067 case CONST_DOUBLE:
3068 case CONST:
3069 case SYMBOL_REF:
3070 case CODE_LABEL:
3071 case PC:
3072 case CC0:
3073 case ASM_INPUT:
3074 case ADDR_VEC:
3075 case ADDR_DIFF_VEC:
3076 case RETURN:
3077 return;
3079 case ADDRESSOF:
3080 abort ();
3082 case REG:
3083 regno = REGNO (x);
3085 /* First handle the case where we encounter a bare register that
3086 is eliminable. Replace it with a PLUS. */
3087 if (regno < FIRST_PSEUDO_REGISTER)
3089 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3090 ep++)
3091 if (ep->from_rtx == x && ep->can_eliminate)
3093 if (! mem_mode)
3094 ep->ref_outside_mem = 1;
3095 return;
3099 else if (reg_renumber[regno] < 0 && reg_equiv_constant
3100 && reg_equiv_constant[regno]
3101 && ! CONSTANT_P (reg_equiv_constant[regno]))
3102 elimination_effects (reg_equiv_constant[regno], mem_mode);
3103 return;
3105 case PRE_INC:
3106 case POST_INC:
3107 case PRE_DEC:
3108 case POST_DEC:
3109 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3110 if (ep->to_rtx == XEXP (x, 0))
3112 int size = GET_MODE_SIZE (mem_mode);
3114 /* If more bytes than MEM_MODE are pushed, account for them. */
3115 #ifdef PUSH_ROUNDING
3116 if (ep->to_rtx == stack_pointer_rtx)
3117 size = PUSH_ROUNDING (size);
3118 #endif
3119 if (code == PRE_DEC || code == POST_DEC)
3120 ep->offset += size;
3121 else
3122 ep->offset -= size;
3125 /* Fall through to generic unary operation case. */
3126 case STRICT_LOW_PART:
3127 case NEG: case NOT:
3128 case SIGN_EXTEND: case ZERO_EXTEND:
3129 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3130 case FLOAT: case FIX:
3131 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3132 case ABS:
3133 case SQRT:
3134 case FFS:
3135 elimination_effects (XEXP (x, 0), mem_mode);
3136 return;
3138 case SUBREG:
3139 if (GET_CODE (SUBREG_REG (x)) == REG
3140 && (GET_MODE_SIZE (GET_MODE (x))
3141 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3142 && reg_equiv_memory_loc != 0
3143 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
3144 return;
3146 elimination_effects (SUBREG_REG (x), mem_mode);
3147 return;
3149 case USE:
3150 /* If using a register that is the source of an eliminate we still
3151 think can be performed, note it cannot be performed since we don't
3152 know how this register is used. */
3153 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3154 if (ep->from_rtx == XEXP (x, 0))
3155 ep->can_eliminate = 0;
3157 elimination_effects (XEXP (x, 0), mem_mode);
3158 return;
3160 case CLOBBER:
3161 /* If clobbering a register that is the replacement register for an
3162 elimination we still think can be performed, note that it cannot
3163 be performed. Otherwise, we need not be concerned about it. */
3164 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3165 if (ep->to_rtx == XEXP (x, 0))
3166 ep->can_eliminate = 0;
3168 elimination_effects (XEXP (x, 0), mem_mode);
3169 return;
3171 case SET:
3172 /* Check for setting a register that we know about. */
3173 if (GET_CODE (SET_DEST (x)) == REG)
3175 /* See if this is setting the replacement register for an
3176 elimination.
3178 If DEST is the hard frame pointer, we do nothing because we
3179 assume that all assignments to the frame pointer are for
3180 non-local gotos and are being done at a time when they are valid
3181 and do not disturb anything else. Some machines want to
3182 eliminate a fake argument pointer (or even a fake frame pointer)
3183 with either the real frame or the stack pointer. Assignments to
3184 the hard frame pointer must not prevent this elimination. */
3186 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3187 ep++)
3188 if (ep->to_rtx == SET_DEST (x)
3189 && SET_DEST (x) != hard_frame_pointer_rtx)
3191 /* If it is being incremented, adjust the offset. Otherwise,
3192 this elimination can't be done. */
3193 rtx src = SET_SRC (x);
3195 if (GET_CODE (src) == PLUS
3196 && XEXP (src, 0) == SET_DEST (x)
3197 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3198 ep->offset -= INTVAL (XEXP (src, 1));
3199 else
3200 ep->can_eliminate = 0;
3204 elimination_effects (SET_DEST (x), 0);
3205 elimination_effects (SET_SRC (x), 0);
3206 return;
3208 case MEM:
3209 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
3210 abort ();
3212 /* Our only special processing is to pass the mode of the MEM to our
3213 recursive call. */
3214 elimination_effects (XEXP (x, 0), GET_MODE (x));
3215 return;
3217 default:
3218 break;
3221 fmt = GET_RTX_FORMAT (code);
3222 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3224 if (*fmt == 'e')
3225 elimination_effects (XEXP (x, i), mem_mode);
3226 else if (*fmt == 'E')
3227 for (j = 0; j < XVECLEN (x, i); j++)
3228 elimination_effects (XVECEXP (x, i, j), mem_mode);
3232 /* Descend through rtx X and verify that no references to eliminable registers
3233 remain. If any do remain, mark the involved register as not
3234 eliminable. */
3235 static void
3236 check_eliminable_occurrences (x)
3237 rtx x;
3239 const char *fmt;
3240 int i;
3241 enum rtx_code code;
3243 if (x == 0)
3244 return;
3246 code = GET_CODE (x);
3248 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3250 struct elim_table *ep;
3252 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3253 if (ep->from_rtx == x && ep->can_eliminate)
3254 ep->can_eliminate = 0;
3255 return;
3258 fmt = GET_RTX_FORMAT (code);
3259 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3261 if (*fmt == 'e')
3262 check_eliminable_occurrences (XEXP (x, i));
3263 else if (*fmt == 'E')
3265 int j;
3266 for (j = 0; j < XVECLEN (x, i); j++)
3267 check_eliminable_occurrences (XVECEXP (x, i, j));
3272 /* Scan INSN and eliminate all eliminable registers in it.
3274 If REPLACE is nonzero, do the replacement destructively. Also
3275 delete the insn as dead it if it is setting an eliminable register.
3277 If REPLACE is zero, do all our allocations in reload_obstack.
3279 If no eliminations were done and this insn doesn't require any elimination
3280 processing (these are not identical conditions: it might be updating sp,
3281 but not referencing fp; this needs to be seen during reload_as_needed so
3282 that the offset between fp and sp can be taken into consideration), zero
3283 is returned. Otherwise, 1 is returned. */
3285 static int
3286 eliminate_regs_in_insn (insn, replace)
3287 rtx insn;
3288 int replace;
3290 int icode = recog_memoized (insn);
3291 rtx old_body = PATTERN (insn);
3292 int insn_is_asm = asm_noperands (old_body) >= 0;
3293 rtx old_set = single_set (insn);
3294 rtx new_body;
3295 int val = 0;
3296 int i, any_changes;
3297 rtx substed_operand[MAX_RECOG_OPERANDS];
3298 rtx orig_operand[MAX_RECOG_OPERANDS];
3299 struct elim_table *ep;
3301 if (! insn_is_asm && icode < 0)
3303 if (GET_CODE (PATTERN (insn)) == USE
3304 || GET_CODE (PATTERN (insn)) == CLOBBER
3305 || GET_CODE (PATTERN (insn)) == ADDR_VEC
3306 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
3307 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
3308 return 0;
3309 abort ();
3312 if (! replace)
3313 push_obstacks (&reload_obstack, &reload_obstack);
3315 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3316 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3318 /* Check for setting an eliminable register. */
3319 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3320 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3322 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3323 /* If this is setting the frame pointer register to the
3324 hardware frame pointer register and this is an elimination
3325 that will be done (tested above), this insn is really
3326 adjusting the frame pointer downward to compensate for
3327 the adjustment done before a nonlocal goto. */
3328 if (ep->from == FRAME_POINTER_REGNUM
3329 && ep->to == HARD_FRAME_POINTER_REGNUM)
3331 rtx src = SET_SRC (old_set);
3332 int offset = 0, ok = 0;
3333 rtx prev_insn, prev_set;
3335 if (src == ep->to_rtx)
3336 offset = 0, ok = 1;
3337 else if (GET_CODE (src) == PLUS
3338 && GET_CODE (XEXP (src, 0)) == CONST_INT
3339 && XEXP (src, 1) == ep->to_rtx)
3340 offset = INTVAL (XEXP (src, 0)), ok = 1;
3341 else if (GET_CODE (src) == PLUS
3342 && GET_CODE (XEXP (src, 1)) == CONST_INT
3343 && XEXP (src, 0) == ep->to_rtx)
3344 offset = INTVAL (XEXP (src, 1)), ok = 1;
3345 else if ((prev_insn = prev_nonnote_insn (insn)) != 0
3346 && (prev_set = single_set (prev_insn)) != 0
3347 && rtx_equal_p (SET_DEST (prev_set), src))
3349 src = SET_SRC (prev_set);
3350 if (src == ep->to_rtx)
3351 offset = 0, ok = 1;
3352 else if (GET_CODE (src) == PLUS
3353 && GET_CODE (XEXP (src, 0)) == CONST_INT
3354 && XEXP (src, 1) == ep->to_rtx)
3355 offset = INTVAL (XEXP (src, 0)), ok = 1;
3356 else if (GET_CODE (src) == PLUS
3357 && GET_CODE (XEXP (src, 1)) == CONST_INT
3358 && XEXP (src, 0) == ep->to_rtx)
3359 offset = INTVAL (XEXP (src, 1)), ok = 1;
3362 if (ok)
3364 if (replace)
3366 rtx src
3367 = plus_constant (ep->to_rtx, offset - ep->offset);
3369 /* First see if this insn remains valid when we
3370 make the change. If not, keep the INSN_CODE
3371 the same and let reload fit it up. */
3372 validate_change (insn, &SET_SRC (old_set), src, 1);
3373 validate_change (insn, &SET_DEST (old_set),
3374 ep->to_rtx, 1);
3375 if (! apply_change_group ())
3377 SET_SRC (old_set) = src;
3378 SET_DEST (old_set) = ep->to_rtx;
3382 val = 1;
3383 goto done;
3386 #endif
3388 /* In this case this insn isn't serving a useful purpose. We
3389 will delete it in reload_as_needed once we know that this
3390 elimination is, in fact, being done.
3392 If REPLACE isn't set, we can't delete this insn, but needn't
3393 process it since it won't be used unless something changes. */
3394 if (replace)
3396 delete_dead_insn (insn);
3397 return 1;
3399 val = 1;
3400 goto done;
3403 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3404 in the insn is the negative of the offset in FROM. Substitute
3405 (set (reg) (reg to)) for the insn and change its code.
3407 We have to do this here, rather than in eliminate_regs, so that we can
3408 change the insn code. */
3410 if (GET_CODE (SET_SRC (old_set)) == PLUS
3411 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3412 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3413 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3414 ep++)
3415 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3416 && ep->can_eliminate)
3418 /* We must stop at the first elimination that will be used.
3419 If this one would replace the PLUS with a REG, do it
3420 now. Otherwise, quit the loop and let eliminate_regs
3421 do its normal replacement. */
3422 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3424 /* We assume here that we don't need a PARALLEL of
3425 any CLOBBERs for this assignment. There's not
3426 much we can do if we do need it. */
3427 PATTERN (insn) = gen_rtx_SET (VOIDmode,
3428 SET_DEST (old_set),
3429 ep->to_rtx);
3430 INSN_CODE (insn) = -1;
3431 val = 1;
3432 goto done;
3435 break;
3439 /* Determine the effects of this insn on elimination offsets. */
3440 elimination_effects (old_body, 0);
3442 /* Eliminate all eliminable registers occurring in operands that
3443 can be handled by reload. */
3444 extract_insn (insn);
3445 any_changes = 0;
3446 for (i = 0; i < recog_data.n_operands; i++)
3448 orig_operand[i] = recog_data.operand[i];
3449 substed_operand[i] = recog_data.operand[i];
3451 /* For an asm statement, every operand is eliminable. */
3452 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3454 /* Check for setting a register that we know about. */
3455 if (recog_data.operand_type[i] != OP_IN
3456 && GET_CODE (orig_operand[i]) == REG)
3458 /* If we are assigning to a register that can be eliminated, it
3459 must be as part of a PARALLEL, since the code above handles
3460 single SETs. We must indicate that we can no longer
3461 eliminate this reg. */
3462 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3463 ep++)
3464 if (ep->from_rtx == orig_operand[i] && ep->can_eliminate)
3465 ep->can_eliminate = 0;
3468 substed_operand[i] = eliminate_regs (recog_data.operand[i], 0,
3469 replace ? insn : NULL_RTX);
3470 if (substed_operand[i] != orig_operand[i])
3471 val = any_changes = 1;
3472 /* Terminate the search in check_eliminable_occurrences at
3473 this point. */
3474 *recog_data.operand_loc[i] = 0;
3476 /* If an output operand changed from a REG to a MEM and INSN is an
3477 insn, write a CLOBBER insn. */
3478 if (recog_data.operand_type[i] != OP_IN
3479 && GET_CODE (orig_operand[i]) == REG
3480 && GET_CODE (substed_operand[i]) == MEM
3481 && replace)
3482 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, orig_operand[i]),
3483 insn);
3487 for (i = 0; i < recog_data.n_dups; i++)
3488 *recog_data.dup_loc[i]
3489 = *recog_data.operand_loc[(int)recog_data.dup_num[i]];
3491 /* If any eliminable remain, they aren't eliminable anymore. */
3492 check_eliminable_occurrences (old_body);
3494 /* Substitute the operands; the new values are in the substed_operand
3495 array. */
3496 for (i = 0; i < recog_data.n_operands; i++)
3497 *recog_data.operand_loc[i] = substed_operand[i];
3498 for (i = 0; i < recog_data.n_dups; i++)
3499 *recog_data.dup_loc[i] = substed_operand[(int)recog_data.dup_num[i]];
3501 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3502 re-recognize the insn. We do this in case we had a simple addition
3503 but now can do this as a load-address. This saves an insn in this
3504 common case.
3505 If re-recognition fails, the old insn code number will still be used,
3506 and some register operands may have changed into PLUS expressions.
3507 These will be handled by find_reloads by loading them into a register
3508 again.*/
3510 if (val)
3512 /* If we aren't replacing things permanently and we changed something,
3513 make another copy to ensure that all the RTL is new. Otherwise
3514 things can go wrong if find_reload swaps commutative operands
3515 and one is inside RTL that has been copied while the other is not. */
3516 new_body = old_body;
3517 if (! replace)
3518 new_body = copy_insn (old_body);
3519 PATTERN (insn) = new_body;
3521 /* If we had a move insn but now we don't, rerecognize it. This will
3522 cause spurious re-recognition if the old move had a PARALLEL since
3523 the new one still will, but we can't call single_set without
3524 having put NEW_BODY into the insn and the re-recognition won't
3525 hurt in this rare case. */
3526 /* ??? Why this huge if statement - why don't we just rerecognize the
3527 thing always? */
3528 if (! insn_is_asm
3529 && old_set != 0
3530 && ((GET_CODE (SET_SRC (old_set)) == REG
3531 && (GET_CODE (new_body) != SET
3532 || GET_CODE (SET_SRC (new_body)) != REG))
3533 /* If this was a load from or store to memory, compare
3534 the MEM in recog_data.operand to the one in the insn.
3535 If they are not equal, then rerecognize the insn. */
3536 || (old_set != 0
3537 && ((GET_CODE (SET_SRC (old_set)) == MEM
3538 && SET_SRC (old_set) != recog_data.operand[1])
3539 || (GET_CODE (SET_DEST (old_set)) == MEM
3540 && SET_DEST (old_set) != recog_data.operand[0])))
3541 /* If this was an add insn before, rerecognize. */
3542 || GET_CODE (SET_SRC (old_set)) == PLUS))
3544 int new_icode = recog (PATTERN (insn), insn, 0);
3545 if (new_icode < 0)
3546 INSN_CODE (insn) = icode;
3550 /* Restore the old body. If there were any changes to it, we made a copy
3551 of it while the changes were still in place, so we'll correctly return
3552 a modified insn below. */
3553 if (! replace)
3555 /* Restore the old body. */
3556 for (i = 0; i < recog_data.n_operands; i++)
3557 *recog_data.operand_loc[i] = orig_operand[i];
3558 for (i = 0; i < recog_data.n_dups; i++)
3559 *recog_data.dup_loc[i] = orig_operand[(int)recog_data.dup_num[i]];
3562 /* Update all elimination pairs to reflect the status after the current
3563 insn. The changes we make were determined by the earlier call to
3564 elimination_effects.
3566 We also detect a cases where register elimination cannot be done,
3567 namely, if a register would be both changed and referenced outside a MEM
3568 in the resulting insn since such an insn is often undefined and, even if
3569 not, we cannot know what meaning will be given to it. Note that it is
3570 valid to have a register used in an address in an insn that changes it
3571 (presumably with a pre- or post-increment or decrement).
3573 If anything changes, return nonzero. */
3575 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3577 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3578 ep->can_eliminate = 0;
3580 ep->ref_outside_mem = 0;
3582 if (ep->previous_offset != ep->offset)
3583 val = 1;
3586 done:
3587 /* If we changed something, perform elimination in REG_NOTES. This is
3588 needed even when REPLACE is zero because a REG_DEAD note might refer
3589 to a register that we eliminate and could cause a different number
3590 of spill registers to be needed in the final reload pass than in
3591 the pre-passes. */
3592 if (val && REG_NOTES (insn) != 0)
3593 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3595 if (! replace)
3596 pop_obstacks ();
3598 return val;
3601 /* Loop through all elimination pairs.
3602 Recalculate the number not at initial offset.
3604 Compute the maximum offset (minimum offset if the stack does not
3605 grow downward) for each elimination pair. */
3607 static void
3608 update_eliminable_offsets ()
3610 struct elim_table *ep;
3612 num_not_at_initial_offset = 0;
3613 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3615 ep->previous_offset = ep->offset;
3616 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3617 num_not_at_initial_offset++;
3621 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3622 replacement we currently believe is valid, mark it as not eliminable if X
3623 modifies DEST in any way other than by adding a constant integer to it.
3625 If DEST is the frame pointer, we do nothing because we assume that
3626 all assignments to the hard frame pointer are nonlocal gotos and are being
3627 done at a time when they are valid and do not disturb anything else.
3628 Some machines want to eliminate a fake argument pointer with either the
3629 frame or stack pointer. Assignments to the hard frame pointer must not
3630 prevent this elimination.
3632 Called via note_stores from reload before starting its passes to scan
3633 the insns of the function. */
3635 static void
3636 mark_not_eliminable (dest, x, data)
3637 rtx dest;
3638 rtx x;
3639 void *data ATTRIBUTE_UNUSED;
3641 register unsigned int i;
3643 /* A SUBREG of a hard register here is just changing its mode. We should
3644 not see a SUBREG of an eliminable hard register, but check just in
3645 case. */
3646 if (GET_CODE (dest) == SUBREG)
3647 dest = SUBREG_REG (dest);
3649 if (dest == hard_frame_pointer_rtx)
3650 return;
3652 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3653 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3654 && (GET_CODE (x) != SET
3655 || GET_CODE (SET_SRC (x)) != PLUS
3656 || XEXP (SET_SRC (x), 0) != dest
3657 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3659 reg_eliminate[i].can_eliminate_previous
3660 = reg_eliminate[i].can_eliminate = 0;
3661 num_eliminable--;
3665 /* Verify that the initial elimination offsets did not change since the
3666 last call to set_initial_elim_offsets. This is used to catch cases
3667 where something illegal happened during reload_as_needed that could
3668 cause incorrect code to be generated if we did not check for it. */
3669 static void
3670 verify_initial_elim_offsets ()
3672 int t;
3674 #ifdef ELIMINABLE_REGS
3675 struct elim_table *ep;
3677 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3679 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3680 if (t != ep->initial_offset)
3681 abort ();
3683 #else
3684 INITIAL_FRAME_POINTER_OFFSET (t);
3685 if (t != reg_eliminate[0].initial_offset)
3686 abort ();
3687 #endif
3690 /* Reset all offsets on eliminable registers to their initial values. */
3691 static void
3692 set_initial_elim_offsets ()
3694 struct elim_table *ep = reg_eliminate;
3696 #ifdef ELIMINABLE_REGS
3697 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3699 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3700 ep->previous_offset = ep->offset = ep->initial_offset;
3702 #else
3703 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3704 ep->previous_offset = ep->offset = ep->initial_offset;
3705 #endif
3707 num_not_at_initial_offset = 0;
3710 /* Initialize the known label offsets.
3711 Set a known offset for each forced label to be at the initial offset
3712 of each elimination. We do this because we assume that all
3713 computed jumps occur from a location where each elimination is
3714 at its initial offset.
3715 For all other labels, show that we don't know the offsets. */
3717 static void
3718 set_initial_label_offsets ()
3720 rtx x;
3721 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
3723 for (x = forced_labels; x; x = XEXP (x, 1))
3724 if (XEXP (x, 0))
3725 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3728 /* Set all elimination offsets to the known values for the code label given
3729 by INSN. */
3730 static void
3731 set_offsets_for_label (insn)
3732 rtx insn;
3734 unsigned int i;
3735 int label_nr = CODE_LABEL_NUMBER (insn);
3736 struct elim_table *ep;
3738 num_not_at_initial_offset = 0;
3739 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3741 ep->offset = ep->previous_offset = offsets_at[label_nr][i];
3742 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3743 num_not_at_initial_offset++;
3747 /* See if anything that happened changes which eliminations are valid.
3748 For example, on the Sparc, whether or not the frame pointer can
3749 be eliminated can depend on what registers have been used. We need
3750 not check some conditions again (such as flag_omit_frame_pointer)
3751 since they can't have changed. */
3753 static void
3754 update_eliminables (pset)
3755 HARD_REG_SET *pset;
3757 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3758 int previous_frame_pointer_needed = frame_pointer_needed;
3759 #endif
3760 struct elim_table *ep;
3762 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3763 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
3764 #ifdef ELIMINABLE_REGS
3765 || ! CAN_ELIMINATE (ep->from, ep->to)
3766 #endif
3768 ep->can_eliminate = 0;
3770 /* Look for the case where we have discovered that we can't replace
3771 register A with register B and that means that we will now be
3772 trying to replace register A with register C. This means we can
3773 no longer replace register C with register B and we need to disable
3774 such an elimination, if it exists. This occurs often with A == ap,
3775 B == sp, and C == fp. */
3777 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3779 struct elim_table *op;
3780 register int new_to = -1;
3782 if (! ep->can_eliminate && ep->can_eliminate_previous)
3784 /* Find the current elimination for ep->from, if there is a
3785 new one. */
3786 for (op = reg_eliminate;
3787 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3788 if (op->from == ep->from && op->can_eliminate)
3790 new_to = op->to;
3791 break;
3794 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3795 disable it. */
3796 for (op = reg_eliminate;
3797 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3798 if (op->from == new_to && op->to == ep->to)
3799 op->can_eliminate = 0;
3803 /* See if any registers that we thought we could eliminate the previous
3804 time are no longer eliminable. If so, something has changed and we
3805 must spill the register. Also, recompute the number of eliminable
3806 registers and see if the frame pointer is needed; it is if there is
3807 no elimination of the frame pointer that we can perform. */
3809 frame_pointer_needed = 1;
3810 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3812 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
3813 && ep->to != HARD_FRAME_POINTER_REGNUM)
3814 frame_pointer_needed = 0;
3816 if (! ep->can_eliminate && ep->can_eliminate_previous)
3818 ep->can_eliminate_previous = 0;
3819 SET_HARD_REG_BIT (*pset, ep->from);
3820 num_eliminable--;
3824 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3825 /* If we didn't need a frame pointer last time, but we do now, spill
3826 the hard frame pointer. */
3827 if (frame_pointer_needed && ! previous_frame_pointer_needed)
3828 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3829 #endif
3832 /* Initialize the table of registers to eliminate. */
3833 static void
3834 init_elim_table ()
3836 struct elim_table *ep;
3837 #ifdef ELIMINABLE_REGS
3838 struct elim_table_1 *ep1;
3839 #endif
3841 if (!reg_eliminate)
3842 reg_eliminate = (struct elim_table *)
3843 xcalloc(sizeof(struct elim_table), NUM_ELIMINABLE_REGS);
3845 /* Does this function require a frame pointer? */
3847 frame_pointer_needed = (! flag_omit_frame_pointer
3848 #ifdef EXIT_IGNORE_STACK
3849 /* ?? If EXIT_IGNORE_STACK is set, we will not save
3850 and restore sp for alloca. So we can't eliminate
3851 the frame pointer in that case. At some point,
3852 we should improve this by emitting the
3853 sp-adjusting insns for this case. */
3854 || (current_function_calls_alloca
3855 && EXIT_IGNORE_STACK)
3856 #endif
3857 || FRAME_POINTER_REQUIRED);
3859 num_eliminable = 0;
3861 #ifdef ELIMINABLE_REGS
3862 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3863 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
3865 ep->from = ep1->from;
3866 ep->to = ep1->to;
3867 ep->can_eliminate = ep->can_eliminate_previous
3868 = (CAN_ELIMINATE (ep->from, ep->to)
3869 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
3871 #else
3872 reg_eliminate[0].from = reg_eliminate_1[0].from;
3873 reg_eliminate[0].to = reg_eliminate_1[0].to;
3874 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3875 = ! frame_pointer_needed;
3876 #endif
3878 /* Count the number of eliminable registers and build the FROM and TO
3879 REG rtx's. Note that code in gen_rtx will cause, e.g.,
3880 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3881 We depend on this. */
3882 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3884 num_eliminable += ep->can_eliminate;
3885 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3886 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3890 /* Kick all pseudos out of hard register REGNO.
3891 If DUMPFILE is nonzero, log actions taken on that file.
3893 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3894 because we found we can't eliminate some register. In the case, no pseudos
3895 are allowed to be in the register, even if they are only in a block that
3896 doesn't require spill registers, unlike the case when we are spilling this
3897 hard reg to produce another spill register.
3899 Return nonzero if any pseudos needed to be kicked out. */
3901 static void
3902 spill_hard_reg (regno, dumpfile, cant_eliminate)
3903 register int regno;
3904 FILE *dumpfile ATTRIBUTE_UNUSED;
3905 int cant_eliminate;
3907 register int i;
3909 if (cant_eliminate)
3911 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3912 regs_ever_live[regno] = 1;
3915 /* Spill every pseudo reg that was allocated to this reg
3916 or to something that overlaps this reg. */
3918 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3919 if (reg_renumber[i] >= 0
3920 && reg_renumber[i] <= regno
3921 && (reg_renumber[i]
3922 + HARD_REGNO_NREGS (reg_renumber[i],
3923 PSEUDO_REGNO_MODE (i))
3924 > regno))
3925 SET_REGNO_REG_SET (spilled_pseudos, i);
3928 /* I'm getting weird preprocessor errors if I use IOR_HARD_REG_SET
3929 from within EXECUTE_IF_SET_IN_REG_SET. Hence this awkwardness. */
3930 static void
3931 ior_hard_reg_set (set1, set2)
3932 HARD_REG_SET *set1, *set2;
3934 IOR_HARD_REG_SET (*set1, *set2);
3937 /* After find_reload_regs has been run for all insn that need reloads,
3938 and/or spill_hard_regs was called, this function is used to actually
3939 spill pseudo registers and try to reallocate them. It also sets up the
3940 spill_regs array for use by choose_reload_regs. */
3942 static int
3943 finish_spills (global, dumpfile)
3944 int global;
3945 FILE *dumpfile;
3947 struct insn_chain *chain;
3948 int something_changed = 0;
3949 int i;
3951 /* Build the spill_regs array for the function. */
3952 /* If there are some registers still to eliminate and one of the spill regs
3953 wasn't ever used before, additional stack space may have to be
3954 allocated to store this register. Thus, we may have changed the offset
3955 between the stack and frame pointers, so mark that something has changed.
3957 One might think that we need only set VAL to 1 if this is a call-used
3958 register. However, the set of registers that must be saved by the
3959 prologue is not identical to the call-used set. For example, the
3960 register used by the call insn for the return PC is a call-used register,
3961 but must be saved by the prologue. */
3963 n_spills = 0;
3964 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3965 if (TEST_HARD_REG_BIT (used_spill_regs, i))
3967 spill_reg_order[i] = n_spills;
3968 spill_regs[n_spills++] = i;
3969 if (num_eliminable && ! regs_ever_live[i])
3970 something_changed = 1;
3971 regs_ever_live[i] = 1;
3973 else
3974 spill_reg_order[i] = -1;
3976 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3977 if (REGNO_REG_SET_P (spilled_pseudos, i))
3979 /* Record the current hard register the pseudo is allocated to in
3980 pseudo_previous_regs so we avoid reallocating it to the same
3981 hard reg in a later pass. */
3982 if (reg_renumber[i] < 0)
3983 abort ();
3984 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
3985 /* Mark it as no longer having a hard register home. */
3986 reg_renumber[i] = -1;
3987 /* We will need to scan everything again. */
3988 something_changed = 1;
3991 /* Retry global register allocation if possible. */
3992 if (global)
3994 bzero ((char *) pseudo_forbidden_regs, max_regno * sizeof (HARD_REG_SET));
3995 /* For every insn that needs reloads, set the registers used as spill
3996 regs in pseudo_forbidden_regs for every pseudo live across the
3997 insn. */
3998 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4000 EXECUTE_IF_SET_IN_REG_SET
4001 (chain->live_before, FIRST_PSEUDO_REGISTER, i,
4003 ior_hard_reg_set (pseudo_forbidden_regs + i,
4004 &chain->used_spill_regs);
4006 EXECUTE_IF_SET_IN_REG_SET
4007 (chain->live_after, FIRST_PSEUDO_REGISTER, i,
4009 ior_hard_reg_set (pseudo_forbidden_regs + i,
4010 &chain->used_spill_regs);
4014 /* Retry allocating the spilled pseudos. For each reg, merge the
4015 various reg sets that indicate which hard regs can't be used,
4016 and call retry_global_alloc.
4017 We change spill_pseudos here to only contain pseudos that did not
4018 get a new hard register. */
4019 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4020 if (reg_old_renumber[i] != reg_renumber[i])
4022 HARD_REG_SET forbidden;
4023 COPY_HARD_REG_SET (forbidden, bad_spill_regs_global);
4024 IOR_HARD_REG_SET (forbidden, pseudo_forbidden_regs[i]);
4025 IOR_HARD_REG_SET (forbidden, pseudo_previous_regs[i]);
4026 retry_global_alloc (i, forbidden);
4027 if (reg_renumber[i] >= 0)
4028 CLEAR_REGNO_REG_SET (spilled_pseudos, i);
4032 /* Fix up the register information in the insn chain.
4033 This involves deleting those of the spilled pseudos which did not get
4034 a new hard register home from the live_{before,after} sets. */
4035 for (chain = reload_insn_chain; chain; chain = chain->next)
4037 HARD_REG_SET used_by_pseudos;
4038 HARD_REG_SET used_by_pseudos2;
4040 AND_COMPL_REG_SET (chain->live_before, spilled_pseudos);
4041 AND_COMPL_REG_SET (chain->live_after, spilled_pseudos);
4043 /* Mark any unallocated hard regs as available for spills. That
4044 makes inheritance work somewhat better. */
4045 if (chain->need_reload)
4047 REG_SET_TO_HARD_REG_SET (used_by_pseudos, chain->live_before);
4048 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, chain->live_after);
4049 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4051 /* Save the old value for the sanity test below. */
4052 COPY_HARD_REG_SET (used_by_pseudos2, chain->used_spill_regs);
4054 compute_use_by_pseudos (&used_by_pseudos, chain->live_before);
4055 compute_use_by_pseudos (&used_by_pseudos, chain->live_after);
4056 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4057 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4059 /* Make sure we only enlarge the set. */
4060 GO_IF_HARD_REG_SUBSET (used_by_pseudos2, chain->used_spill_regs, ok);
4061 abort ();
4062 ok:;
4066 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
4067 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4069 int regno = reg_renumber[i];
4070 if (reg_old_renumber[i] == regno)
4071 continue;
4073 alter_reg (i, reg_old_renumber[i]);
4074 reg_old_renumber[i] = regno;
4075 if (dumpfile)
4077 if (regno == -1)
4078 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
4079 else
4080 fprintf (dumpfile, " Register %d now in %d.\n\n",
4081 i, reg_renumber[i]);
4085 return something_changed;
4088 /* Find all paradoxical subregs within X and update reg_max_ref_width.
4089 Also mark any hard registers used to store user variables as
4090 forbidden from being used for spill registers. */
4092 static void
4093 scan_paradoxical_subregs (x)
4094 register rtx x;
4096 register int i;
4097 register const char *fmt;
4098 register enum rtx_code code = GET_CODE (x);
4100 switch (code)
4102 case REG:
4103 #if 0
4104 if (SMALL_REGISTER_CLASSES && REGNO (x) < FIRST_PSEUDO_REGISTER
4105 && REG_USERVAR_P (x))
4106 SET_HARD_REG_BIT (bad_spill_regs_global, REGNO (x));
4107 #endif
4108 return;
4110 case CONST_INT:
4111 case CONST:
4112 case SYMBOL_REF:
4113 case LABEL_REF:
4114 case CONST_DOUBLE:
4115 case CC0:
4116 case PC:
4117 case USE:
4118 case CLOBBER:
4119 return;
4121 case SUBREG:
4122 if (GET_CODE (SUBREG_REG (x)) == REG
4123 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4124 reg_max_ref_width[REGNO (SUBREG_REG (x))]
4125 = GET_MODE_SIZE (GET_MODE (x));
4126 return;
4128 default:
4129 break;
4132 fmt = GET_RTX_FORMAT (code);
4133 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4135 if (fmt[i] == 'e')
4136 scan_paradoxical_subregs (XEXP (x, i));
4137 else if (fmt[i] == 'E')
4139 register int j;
4140 for (j = XVECLEN (x, i) - 1; j >=0; j--)
4141 scan_paradoxical_subregs (XVECEXP (x, i, j));
4146 static int
4147 hard_reg_use_compare (p1p, p2p)
4148 const PTR p1p;
4149 const PTR p2p;
4151 const struct hard_reg_n_uses *p1 = (const struct hard_reg_n_uses *)p1p;
4152 const struct hard_reg_n_uses *p2 = (const struct hard_reg_n_uses *)p2p;
4153 int bad1 = TEST_HARD_REG_BIT (bad_spill_regs, p1->regno);
4154 int bad2 = TEST_HARD_REG_BIT (bad_spill_regs, p2->regno);
4155 if (bad1 && bad2)
4156 return p1->regno - p2->regno;
4157 if (bad1)
4158 return 1;
4159 if (bad2)
4160 return -1;
4161 if (p1->uses > p2->uses)
4162 return 1;
4163 if (p1->uses < p2->uses)
4164 return -1;
4165 /* If regs are equally good, sort by regno,
4166 so that the results of qsort leave nothing to chance. */
4167 return p1->regno - p2->regno;
4170 /* Used for communication between order_regs_for_reload and count_pseudo.
4171 Used to avoid counting one pseudo twice. */
4172 static regset pseudos_counted;
4174 /* Update the costs in N_USES, considering that pseudo REG is live. */
4175 static void
4176 count_pseudo (n_uses, reg)
4177 struct hard_reg_n_uses *n_uses;
4178 int reg;
4180 int r = reg_renumber[reg];
4181 int nregs;
4183 if (REGNO_REG_SET_P (pseudos_counted, reg))
4184 return;
4185 SET_REGNO_REG_SET (pseudos_counted, reg);
4187 if (r < 0)
4188 abort ();
4190 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg));
4191 while (nregs-- > 0)
4192 n_uses[r++].uses += REG_N_REFS (reg);
4194 /* Choose the order to consider regs for use as reload registers
4195 based on how much trouble would be caused by spilling one.
4196 Store them in order of decreasing preference in potential_reload_regs. */
4198 static void
4199 order_regs_for_reload (chain)
4200 struct insn_chain *chain;
4202 register int i;
4203 register int o = 0;
4204 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
4206 pseudos_counted = ALLOCA_REG_SET ();
4208 COPY_HARD_REG_SET (bad_spill_regs, bad_spill_regs_global);
4210 /* Count number of uses of each hard reg by pseudo regs allocated to it
4211 and then order them by decreasing use. */
4213 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4215 hard_reg_n_uses[i].regno = i;
4216 hard_reg_n_uses[i].uses = 0;
4218 /* Test the various reasons why we can't use a register for
4219 spilling in this insn. */
4220 if (fixed_regs[i]
4221 || REGNO_REG_SET_P (chain->live_before, i)
4222 || REGNO_REG_SET_P (chain->live_after, i))
4223 SET_HARD_REG_BIT (bad_spill_regs, i);
4226 /* Now compute hard_reg_n_uses. */
4227 CLEAR_REG_SET (pseudos_counted);
4229 EXECUTE_IF_SET_IN_REG_SET
4230 (chain->live_before, FIRST_PSEUDO_REGISTER, i,
4232 count_pseudo (hard_reg_n_uses, i);
4234 EXECUTE_IF_SET_IN_REG_SET
4235 (chain->live_after, FIRST_PSEUDO_REGISTER, i,
4237 count_pseudo (hard_reg_n_uses, i);
4240 FREE_REG_SET (pseudos_counted);
4242 /* Prefer registers not so far used, for use in temporary loading.
4243 Among them, if REG_ALLOC_ORDER is defined, use that order.
4244 Otherwise, prefer registers not preserved by calls. */
4246 #ifdef REG_ALLOC_ORDER
4247 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4249 int regno = reg_alloc_order[i];
4251 if (hard_reg_n_uses[regno].uses == 0
4252 && ! TEST_HARD_REG_BIT (bad_spill_regs, regno))
4253 potential_reload_regs[o++] = regno;
4255 #else
4256 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4258 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i]
4259 && ! TEST_HARD_REG_BIT (bad_spill_regs, i))
4260 potential_reload_regs[o++] = i;
4262 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4264 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i]
4265 && ! TEST_HARD_REG_BIT (bad_spill_regs, i))
4266 potential_reload_regs[o++] = i;
4268 #endif
4270 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
4271 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
4273 /* Now add the regs that are already used,
4274 preferring those used less often. The fixed and otherwise forbidden
4275 registers will be at the end of this list. */
4277 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4278 if (hard_reg_n_uses[i].uses != 0
4279 && ! TEST_HARD_REG_BIT (bad_spill_regs, hard_reg_n_uses[i].regno))
4280 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
4281 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4282 if (TEST_HARD_REG_BIT (bad_spill_regs, hard_reg_n_uses[i].regno))
4283 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
4286 /* Reload pseudo-registers into hard regs around each insn as needed.
4287 Additional register load insns are output before the insn that needs it
4288 and perhaps store insns after insns that modify the reloaded pseudo reg.
4290 reg_last_reload_reg and reg_reloaded_contents keep track of
4291 which registers are already available in reload registers.
4292 We update these for the reloads that we perform,
4293 as the insns are scanned. */
4295 static void
4296 reload_as_needed (live_known)
4297 int live_known;
4299 struct insn_chain *chain;
4300 #if defined (AUTO_INC_DEC) || defined (INSN_CLOBBERS_REGNO_P)
4301 register int i;
4302 #endif
4303 rtx x;
4305 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
4306 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
4307 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
4308 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
4309 reg_has_output_reload = (char *) alloca (max_regno);
4310 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4312 set_initial_elim_offsets ();
4314 for (chain = reload_insn_chain; chain; chain = chain->next)
4316 rtx prev;
4317 rtx insn = chain->insn;
4318 rtx old_next = NEXT_INSN (insn);
4320 /* If we pass a label, copy the offsets from the label information
4321 into the current offsets of each elimination. */
4322 if (GET_CODE (insn) == CODE_LABEL)
4323 set_offsets_for_label (insn);
4325 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4327 rtx oldpat = PATTERN (insn);
4329 /* If this is a USE and CLOBBER of a MEM, ensure that any
4330 references to eliminable registers have been removed. */
4332 if ((GET_CODE (PATTERN (insn)) == USE
4333 || GET_CODE (PATTERN (insn)) == CLOBBER)
4334 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
4335 XEXP (XEXP (PATTERN (insn), 0), 0)
4336 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4337 GET_MODE (XEXP (PATTERN (insn), 0)),
4338 NULL_RTX);
4340 /* If we need to do register elimination processing, do so.
4341 This might delete the insn, in which case we are done. */
4342 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4344 eliminate_regs_in_insn (insn, 1);
4345 if (GET_CODE (insn) == NOTE)
4347 update_eliminable_offsets ();
4348 continue;
4352 /* If need_elim is nonzero but need_reload is zero, one might think
4353 that we could simply set n_reloads to 0. However, find_reloads
4354 could have done some manipulation of the insn (such as swapping
4355 commutative operands), and these manipulations are lost during
4356 the first pass for every insn that needs register elimination.
4357 So the actions of find_reloads must be redone here. */
4359 if (! chain->need_elim && ! chain->need_reload
4360 && ! chain->need_operand_change)
4361 n_reloads = 0;
4362 /* First find the pseudo regs that must be reloaded for this insn.
4363 This info is returned in the tables reload_... (see reload.h).
4364 Also modify the body of INSN by substituting RELOAD
4365 rtx's for those pseudo regs. */
4366 else
4368 bzero (reg_has_output_reload, max_regno);
4369 CLEAR_HARD_REG_SET (reg_is_output_reload);
4371 find_reloads (insn, 1, spill_indirect_levels, live_known,
4372 spill_reg_order);
4375 if (num_eliminable && chain->need_elim)
4376 update_eliminable_offsets ();
4378 if (n_reloads > 0)
4380 rtx next = NEXT_INSN (insn);
4381 rtx p;
4383 prev = PREV_INSN (insn);
4385 /* Now compute which reload regs to reload them into. Perhaps
4386 reusing reload regs from previous insns, or else output
4387 load insns to reload them. Maybe output store insns too.
4388 Record the choices of reload reg in reload_reg_rtx. */
4389 choose_reload_regs (chain);
4391 /* Merge any reloads that we didn't combine for fear of
4392 increasing the number of spill registers needed but now
4393 discover can be safely merged. */
4394 if (SMALL_REGISTER_CLASSES)
4395 merge_assigned_reloads (insn);
4397 /* Generate the insns to reload operands into or out of
4398 their reload regs. */
4399 emit_reload_insns (chain);
4401 /* Substitute the chosen reload regs from reload_reg_rtx
4402 into the insn's body (or perhaps into the bodies of other
4403 load and store insn that we just made for reloading
4404 and that we moved the structure into). */
4405 subst_reloads ();
4407 /* If this was an ASM, make sure that all the reload insns
4408 we have generated are valid. If not, give an error
4409 and delete them. */
4411 if (asm_noperands (PATTERN (insn)) >= 0)
4412 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4413 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
4414 && (recog_memoized (p) < 0
4415 || (extract_insn (p), ! constrain_operands (1))))
4417 error_for_asm (insn,
4418 "`asm' operand requires impossible reload");
4419 PUT_CODE (p, NOTE);
4420 NOTE_SOURCE_FILE (p) = 0;
4421 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
4424 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4425 is no longer validly lying around to save a future reload.
4426 Note that this does not detect pseudos that were reloaded
4427 for this insn in order to be stored in
4428 (obeying register constraints). That is correct; such reload
4429 registers ARE still valid. */
4430 note_stores (oldpat, forget_old_reloads_1, NULL);
4432 /* There may have been CLOBBER insns placed after INSN. So scan
4433 between INSN and NEXT and use them to forget old reloads. */
4434 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4435 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
4436 note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4438 #ifdef AUTO_INC_DEC
4439 /* Likewise for regs altered by auto-increment in this insn.
4440 REG_INC notes have been changed by reloading:
4441 find_reloads_address_1 records substitutions for them,
4442 which have been performed by subst_reloads above. */
4443 for (i = n_reloads - 1; i >= 0; i--)
4445 rtx in_reg = rld[i].in_reg;
4446 if (in_reg)
4448 enum rtx_code code = GET_CODE (in_reg);
4449 /* PRE_INC / PRE_DEC will have the reload register ending up
4450 with the same value as the stack slot, but that doesn't
4451 hold true for POST_INC / POST_DEC. Either we have to
4452 convert the memory access to a true POST_INC / POST_DEC,
4453 or we can't use the reload register for inheritance. */
4454 if ((code == POST_INC || code == POST_DEC)
4455 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4456 REGNO (rld[i].reg_rtx))
4457 /* Make sure it is the inc/dec pseudo, and not
4458 some other (e.g. output operand) pseudo. */
4459 && (reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4460 == REGNO (XEXP (in_reg, 0))))
4463 rtx reload_reg = rld[i].reg_rtx;
4464 enum machine_mode mode = GET_MODE (reload_reg);
4465 int n = 0;
4466 rtx p;
4468 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4470 /* We really want to ignore REG_INC notes here, so
4471 use PATTERN (p) as argument to reg_set_p . */
4472 if (reg_set_p (reload_reg, PATTERN (p)))
4473 break;
4474 n = count_occurrences (PATTERN (p), reload_reg);
4475 if (! n)
4476 continue;
4477 if (n == 1)
4479 n = validate_replace_rtx (reload_reg,
4480 gen_rtx (code, mode,
4481 reload_reg),
4484 /* We must also verify that the constraints
4485 are met after the replacement. */
4486 extract_insn (p);
4487 if (n)
4488 n = constrain_operands (1);
4489 else
4490 break;
4492 /* If the constraints were not met, then
4493 undo the replacement. */
4494 if (!n)
4496 validate_replace_rtx (gen_rtx (code, mode,
4497 reload_reg),
4498 reload_reg, p);
4499 break;
4503 break;
4505 if (n == 1)
4507 REG_NOTES (p)
4508 = gen_rtx_EXPR_LIST (REG_INC, reload_reg,
4509 REG_NOTES (p));
4510 /* Mark this as having an output reload so that the
4511 REG_INC processing code below won't invalidate
4512 the reload for inheritance. */
4513 SET_HARD_REG_BIT (reg_is_output_reload,
4514 REGNO (reload_reg));
4515 reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1;
4517 else
4518 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4519 NULL);
4521 else if ((code == PRE_INC || code == PRE_DEC)
4522 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4523 REGNO (rld[i].reg_rtx))
4524 /* Make sure it is the inc/dec pseudo, and not
4525 some other (e.g. output operand) pseudo. */
4526 && (reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4527 == REGNO (XEXP (in_reg, 0))))
4529 SET_HARD_REG_BIT (reg_is_output_reload,
4530 REGNO (rld[i].reg_rtx));
4531 reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1;
4535 /* If a pseudo that got a hard register is auto-incremented,
4536 we must purge records of copying it into pseudos without
4537 hard registers. */
4538 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4539 if (REG_NOTE_KIND (x) == REG_INC)
4541 /* See if this pseudo reg was reloaded in this insn.
4542 If so, its last-reload info is still valid
4543 because it is based on this insn's reload. */
4544 for (i = 0; i < n_reloads; i++)
4545 if (rld[i].out == XEXP (x, 0))
4546 break;
4548 if (i == n_reloads)
4549 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4551 #endif
4553 /* A reload reg's contents are unknown after a label. */
4554 if (GET_CODE (insn) == CODE_LABEL)
4555 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4557 /* Don't assume a reload reg is still good after a call insn
4558 if it is a call-used reg. */
4559 else if (GET_CODE (insn) == CALL_INSN)
4560 AND_COMPL_HARD_REG_SET(reg_reloaded_valid, call_used_reg_set);
4562 /* In case registers overlap, allow certain insns to invalidate
4563 particular hard registers. */
4565 #ifdef INSN_CLOBBERS_REGNO_P
4566 for (i = 0 ; i < FIRST_PSEUDO_REGISTER; i++)
4567 if (TEST_HARD_REG_BIT (reg_reloaded_valid, i)
4568 && INSN_CLOBBERS_REGNO_P (insn, i))
4569 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i);
4570 #endif
4572 #ifdef USE_C_ALLOCA
4573 alloca (0);
4574 #endif
4578 /* Discard all record of any value reloaded from X,
4579 or reloaded in X from someplace else;
4580 unless X is an output reload reg of the current insn.
4582 X may be a hard reg (the reload reg)
4583 or it may be a pseudo reg that was reloaded from. */
4585 static void
4586 forget_old_reloads_1 (x, ignored, data)
4587 rtx x;
4588 rtx ignored ATTRIBUTE_UNUSED;
4589 void *data ATTRIBUTE_UNUSED;
4591 register int regno;
4592 int nr;
4593 int offset = 0;
4595 /* note_stores does give us subregs of hard regs. */
4596 while (GET_CODE (x) == SUBREG)
4598 offset += SUBREG_WORD (x);
4599 x = SUBREG_REG (x);
4602 if (GET_CODE (x) != REG)
4603 return;
4605 regno = REGNO (x) + offset;
4607 if (regno >= FIRST_PSEUDO_REGISTER)
4608 nr = 1;
4609 else
4611 int i;
4612 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4613 /* Storing into a spilled-reg invalidates its contents.
4614 This can happen if a block-local pseudo is allocated to that reg
4615 and it wasn't spilled because this block's total need is 0.
4616 Then some insn might have an optional reload and use this reg. */
4617 for (i = 0; i < nr; i++)
4618 /* But don't do this if the reg actually serves as an output
4619 reload reg in the current instruction. */
4620 if (n_reloads == 0
4621 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4622 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4625 /* Since value of X has changed,
4626 forget any value previously copied from it. */
4628 while (nr-- > 0)
4629 /* But don't forget a copy if this is the output reload
4630 that establishes the copy's validity. */
4631 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4632 reg_last_reload_reg[regno + nr] = 0;
4635 /* Comparison function for qsort to decide which of two reloads
4636 should be handled first. *P1 and *P2 are the reload numbers. */
4638 static int
4639 reload_reg_class_lower (r1p, r2p)
4640 const PTR r1p;
4641 const PTR r2p;
4643 register int r1 = *(const short *)r1p, r2 = *(const short *)r2p;
4644 register int t;
4646 /* Consider required reloads before optional ones. */
4647 t = rld[r1].optional - rld[r2].optional;
4648 if (t != 0)
4649 return t;
4651 /* Count all solitary classes before non-solitary ones. */
4652 t = ((reg_class_size[(int) rld[r2].class] == 1)
4653 - (reg_class_size[(int) rld[r1].class] == 1));
4654 if (t != 0)
4655 return t;
4657 /* Aside from solitaires, consider all multi-reg groups first. */
4658 t = rld[r2].nregs - rld[r1].nregs;
4659 if (t != 0)
4660 return t;
4662 /* Consider reloads in order of increasing reg-class number. */
4663 t = (int) rld[r1].class - (int) rld[r2].class;
4664 if (t != 0)
4665 return t;
4667 /* If reloads are equally urgent, sort by reload number,
4668 so that the results of qsort leave nothing to chance. */
4669 return r1 - r2;
4672 /* The following HARD_REG_SETs indicate when each hard register is
4673 used for a reload of various parts of the current insn. */
4675 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4676 static HARD_REG_SET reload_reg_used;
4677 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4678 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4679 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4680 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4681 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4682 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4683 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4684 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4685 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4686 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4687 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4688 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4689 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4690 static HARD_REG_SET reload_reg_used_in_op_addr;
4691 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4692 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4693 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4694 static HARD_REG_SET reload_reg_used_in_insn;
4695 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4696 static HARD_REG_SET reload_reg_used_in_other_addr;
4698 /* If reg is in use as a reload reg for any sort of reload. */
4699 static HARD_REG_SET reload_reg_used_at_all;
4701 /* If reg is use as an inherited reload. We just mark the first register
4702 in the group. */
4703 static HARD_REG_SET reload_reg_used_for_inherit;
4705 /* Records which hard regs are used in any way, either as explicit use or
4706 by being allocated to a pseudo during any point of the current insn. */
4707 static HARD_REG_SET reg_used_in_insn;
4709 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4710 TYPE. MODE is used to indicate how many consecutive regs are
4711 actually used. */
4713 static void
4714 mark_reload_reg_in_use (regno, opnum, type, mode)
4715 int regno;
4716 int opnum;
4717 enum reload_type type;
4718 enum machine_mode mode;
4720 int nregs = HARD_REGNO_NREGS (regno, mode);
4721 int i;
4723 for (i = regno; i < nregs + regno; i++)
4725 switch (type)
4727 case RELOAD_OTHER:
4728 SET_HARD_REG_BIT (reload_reg_used, i);
4729 break;
4731 case RELOAD_FOR_INPUT_ADDRESS:
4732 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4733 break;
4735 case RELOAD_FOR_INPADDR_ADDRESS:
4736 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4737 break;
4739 case RELOAD_FOR_OUTPUT_ADDRESS:
4740 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4741 break;
4743 case RELOAD_FOR_OUTADDR_ADDRESS:
4744 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4745 break;
4747 case RELOAD_FOR_OPERAND_ADDRESS:
4748 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4749 break;
4751 case RELOAD_FOR_OPADDR_ADDR:
4752 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4753 break;
4755 case RELOAD_FOR_OTHER_ADDRESS:
4756 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4757 break;
4759 case RELOAD_FOR_INPUT:
4760 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4761 break;
4763 case RELOAD_FOR_OUTPUT:
4764 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4765 break;
4767 case RELOAD_FOR_INSN:
4768 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4769 break;
4772 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4776 /* Similarly, but show REGNO is no longer in use for a reload. */
4778 static void
4779 clear_reload_reg_in_use (regno, opnum, type, mode)
4780 int regno;
4781 int opnum;
4782 enum reload_type type;
4783 enum machine_mode mode;
4785 int nregs = HARD_REGNO_NREGS (regno, mode);
4786 int start_regno, end_regno;
4787 int i;
4788 /* A complication is that for some reload types, inheritance might
4789 allow multiple reloads of the same types to share a reload register.
4790 We set check_opnum if we have to check only reloads with the same
4791 operand number, and check_any if we have to check all reloads. */
4792 int check_opnum = 0;
4793 int check_any = 0;
4794 HARD_REG_SET *used_in_set;
4796 switch (type)
4798 case RELOAD_OTHER:
4799 used_in_set = &reload_reg_used;
4800 break;
4802 case RELOAD_FOR_INPUT_ADDRESS:
4803 used_in_set = &reload_reg_used_in_input_addr[opnum];
4804 break;
4806 case RELOAD_FOR_INPADDR_ADDRESS:
4807 check_opnum = 1;
4808 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4809 break;
4811 case RELOAD_FOR_OUTPUT_ADDRESS:
4812 used_in_set = &reload_reg_used_in_output_addr[opnum];
4813 break;
4815 case RELOAD_FOR_OUTADDR_ADDRESS:
4816 check_opnum = 1;
4817 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4818 break;
4820 case RELOAD_FOR_OPERAND_ADDRESS:
4821 used_in_set = &reload_reg_used_in_op_addr;
4822 break;
4824 case RELOAD_FOR_OPADDR_ADDR:
4825 check_any = 1;
4826 used_in_set = &reload_reg_used_in_op_addr_reload;
4827 break;
4829 case RELOAD_FOR_OTHER_ADDRESS:
4830 used_in_set = &reload_reg_used_in_other_addr;
4831 check_any = 1;
4832 break;
4834 case RELOAD_FOR_INPUT:
4835 used_in_set = &reload_reg_used_in_input[opnum];
4836 break;
4838 case RELOAD_FOR_OUTPUT:
4839 used_in_set = &reload_reg_used_in_output[opnum];
4840 break;
4842 case RELOAD_FOR_INSN:
4843 used_in_set = &reload_reg_used_in_insn;
4844 break;
4845 default:
4846 abort ();
4848 /* We resolve conflicts with remaining reloads of the same type by
4849 excluding the intervals of of reload registers by them from the
4850 interval of freed reload registers. Since we only keep track of
4851 one set of interval bounds, we might have to exclude somewhat
4852 more then what would be necessary if we used a HARD_REG_SET here.
4853 But this should only happen very infrequently, so there should
4854 be no reason to worry about it. */
4856 start_regno = regno;
4857 end_regno = regno + nregs;
4858 if (check_opnum || check_any)
4860 for (i = n_reloads - 1; i >= 0; i--)
4862 if (rld[i].when_needed == type
4863 && (check_any || rld[i].opnum == opnum)
4864 && rld[i].reg_rtx)
4866 int conflict_start = true_regnum (rld[i].reg_rtx);
4867 int conflict_end
4868 = (conflict_start
4869 + HARD_REGNO_NREGS (conflict_start, rld[i].mode));
4871 /* If there is an overlap with the first to-be-freed register,
4872 adjust the interval start. */
4873 if (conflict_start <= start_regno && conflict_end > start_regno)
4874 start_regno = conflict_end;
4875 /* Otherwise, if there is a conflict with one of the other
4876 to-be-freed registers, adjust the interval end. */
4877 if (conflict_start > start_regno && conflict_start < end_regno)
4878 end_regno = conflict_start;
4882 for (i = start_regno; i < end_regno; i++)
4883 CLEAR_HARD_REG_BIT (*used_in_set, i);
4886 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4887 specified by OPNUM and TYPE. */
4889 static int
4890 reload_reg_free_p (regno, opnum, type)
4891 int regno;
4892 int opnum;
4893 enum reload_type type;
4895 int i;
4897 /* In use for a RELOAD_OTHER means it's not available for anything. */
4898 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
4899 return 0;
4901 switch (type)
4903 case RELOAD_OTHER:
4904 /* In use for anything means we can't use it for RELOAD_OTHER. */
4905 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4906 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4907 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4908 return 0;
4910 for (i = 0; i < reload_n_operands; i++)
4911 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4912 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4913 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4914 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4915 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4916 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4917 return 0;
4919 return 1;
4921 case RELOAD_FOR_INPUT:
4922 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4923 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4924 return 0;
4926 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4927 return 0;
4929 /* If it is used for some other input, can't use it. */
4930 for (i = 0; i < reload_n_operands; i++)
4931 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4932 return 0;
4934 /* If it is used in a later operand's address, can't use it. */
4935 for (i = opnum + 1; i < reload_n_operands; i++)
4936 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4937 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4938 return 0;
4940 return 1;
4942 case RELOAD_FOR_INPUT_ADDRESS:
4943 /* Can't use a register if it is used for an input address for this
4944 operand or used as an input in an earlier one. */
4945 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4946 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4947 return 0;
4949 for (i = 0; i < opnum; i++)
4950 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4951 return 0;
4953 return 1;
4955 case RELOAD_FOR_INPADDR_ADDRESS:
4956 /* Can't use a register if it is used for an input address
4957 for this operand or used as an input in an earlier
4958 one. */
4959 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4960 return 0;
4962 for (i = 0; i < opnum; i++)
4963 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4964 return 0;
4966 return 1;
4968 case RELOAD_FOR_OUTPUT_ADDRESS:
4969 /* Can't use a register if it is used for an output address for this
4970 operand or used as an output in this or a later operand. */
4971 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4972 return 0;
4974 for (i = opnum; i < reload_n_operands; i++)
4975 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4976 return 0;
4978 return 1;
4980 case RELOAD_FOR_OUTADDR_ADDRESS:
4981 /* Can't use a register if it is used for an output address
4982 for this operand or used as an output in this or a
4983 later operand. */
4984 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4985 return 0;
4987 for (i = opnum; i < reload_n_operands; i++)
4988 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4989 return 0;
4991 return 1;
4993 case RELOAD_FOR_OPERAND_ADDRESS:
4994 for (i = 0; i < reload_n_operands; i++)
4995 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4996 return 0;
4998 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4999 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5001 case RELOAD_FOR_OPADDR_ADDR:
5002 for (i = 0; i < reload_n_operands; i++)
5003 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5004 return 0;
5006 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
5008 case RELOAD_FOR_OUTPUT:
5009 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
5010 outputs, or an operand address for this or an earlier output. */
5011 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5012 return 0;
5014 for (i = 0; i < reload_n_operands; i++)
5015 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5016 return 0;
5018 for (i = 0; i <= opnum; i++)
5019 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5020 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5021 return 0;
5023 return 1;
5025 case RELOAD_FOR_INSN:
5026 for (i = 0; i < reload_n_operands; i++)
5027 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5028 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5029 return 0;
5031 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5032 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5034 case RELOAD_FOR_OTHER_ADDRESS:
5035 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
5037 abort ();
5040 /* Return 1 if the value in reload reg REGNO, as used by a reload
5041 needed for the part of the insn specified by OPNUM and TYPE,
5042 is still available in REGNO at the end of the insn.
5044 We can assume that the reload reg was already tested for availability
5045 at the time it is needed, and we should not check this again,
5046 in case the reg has already been marked in use. */
5048 static int
5049 reload_reg_reaches_end_p (regno, opnum, type)
5050 int regno;
5051 int opnum;
5052 enum reload_type type;
5054 int i;
5056 switch (type)
5058 case RELOAD_OTHER:
5059 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5060 its value must reach the end. */
5061 return 1;
5063 /* If this use is for part of the insn,
5064 its value reaches if no subsequent part uses the same register.
5065 Just like the above function, don't try to do this with lots
5066 of fallthroughs. */
5068 case RELOAD_FOR_OTHER_ADDRESS:
5069 /* Here we check for everything else, since these don't conflict
5070 with anything else and everything comes later. */
5072 for (i = 0; i < reload_n_operands; i++)
5073 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5074 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5075 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5076 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5077 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5078 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5079 return 0;
5081 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5082 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5083 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5085 case RELOAD_FOR_INPUT_ADDRESS:
5086 case RELOAD_FOR_INPADDR_ADDRESS:
5087 /* Similar, except that we check only for this and subsequent inputs
5088 and the address of only subsequent inputs and we do not need
5089 to check for RELOAD_OTHER objects since they are known not to
5090 conflict. */
5092 for (i = opnum; i < reload_n_operands; i++)
5093 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5094 return 0;
5096 for (i = opnum + 1; i < reload_n_operands; i++)
5097 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5098 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5099 return 0;
5101 for (i = 0; i < reload_n_operands; i++)
5102 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5103 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5104 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5105 return 0;
5107 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5108 return 0;
5110 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5111 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
5113 case RELOAD_FOR_INPUT:
5114 /* Similar to input address, except we start at the next operand for
5115 both input and input address and we do not check for
5116 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5117 would conflict. */
5119 for (i = opnum + 1; i < reload_n_operands; i++)
5120 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5121 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5122 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5123 return 0;
5125 /* ... fall through ... */
5127 case RELOAD_FOR_OPERAND_ADDRESS:
5128 /* Check outputs and their addresses. */
5130 for (i = 0; i < reload_n_operands; i++)
5131 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5132 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5133 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5134 return 0;
5136 return 1;
5138 case RELOAD_FOR_OPADDR_ADDR:
5139 for (i = 0; i < reload_n_operands; i++)
5140 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5141 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5142 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5143 return 0;
5145 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5146 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
5148 case RELOAD_FOR_INSN:
5149 /* These conflict with other outputs with RELOAD_OTHER. So
5150 we need only check for output addresses. */
5152 opnum = -1;
5154 /* ... fall through ... */
5156 case RELOAD_FOR_OUTPUT:
5157 case RELOAD_FOR_OUTPUT_ADDRESS:
5158 case RELOAD_FOR_OUTADDR_ADDRESS:
5159 /* We already know these can't conflict with a later output. So the
5160 only thing to check are later output addresses. */
5161 for (i = opnum + 1; i < reload_n_operands; i++)
5162 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5163 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5164 return 0;
5166 return 1;
5169 abort ();
5172 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5173 Return 0 otherwise.
5175 This function uses the same algorithm as reload_reg_free_p above. */
5178 reloads_conflict (r1, r2)
5179 int r1, r2;
5181 enum reload_type r1_type = rld[r1].when_needed;
5182 enum reload_type r2_type = rld[r2].when_needed;
5183 int r1_opnum = rld[r1].opnum;
5184 int r2_opnum = rld[r2].opnum;
5186 /* RELOAD_OTHER conflicts with everything. */
5187 if (r2_type == RELOAD_OTHER)
5188 return 1;
5190 /* Otherwise, check conflicts differently for each type. */
5192 switch (r1_type)
5194 case RELOAD_FOR_INPUT:
5195 return (r2_type == RELOAD_FOR_INSN
5196 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5197 || r2_type == RELOAD_FOR_OPADDR_ADDR
5198 || r2_type == RELOAD_FOR_INPUT
5199 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5200 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5201 && r2_opnum > r1_opnum));
5203 case RELOAD_FOR_INPUT_ADDRESS:
5204 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5205 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5207 case RELOAD_FOR_INPADDR_ADDRESS:
5208 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5209 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5211 case RELOAD_FOR_OUTPUT_ADDRESS:
5212 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5213 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
5215 case RELOAD_FOR_OUTADDR_ADDRESS:
5216 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5217 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
5219 case RELOAD_FOR_OPERAND_ADDRESS:
5220 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5221 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5223 case RELOAD_FOR_OPADDR_ADDR:
5224 return (r2_type == RELOAD_FOR_INPUT
5225 || r2_type == RELOAD_FOR_OPADDR_ADDR);
5227 case RELOAD_FOR_OUTPUT:
5228 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5229 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5230 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5231 && r2_opnum >= r1_opnum));
5233 case RELOAD_FOR_INSN:
5234 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5235 || r2_type == RELOAD_FOR_INSN
5236 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5238 case RELOAD_FOR_OTHER_ADDRESS:
5239 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5241 case RELOAD_OTHER:
5242 return 1;
5244 default:
5245 abort ();
5249 /* Vector of reload-numbers showing the order in which the reloads should
5250 be processed. */
5251 short reload_order[MAX_RELOADS];
5253 /* Indexed by reload number, 1 if incoming value
5254 inherited from previous insns. */
5255 char reload_inherited[MAX_RELOADS];
5257 /* For an inherited reload, this is the insn the reload was inherited from,
5258 if we know it. Otherwise, this is 0. */
5259 rtx reload_inheritance_insn[MAX_RELOADS];
5261 /* If non-zero, this is a place to get the value of the reload,
5262 rather than using reload_in. */
5263 rtx reload_override_in[MAX_RELOADS];
5265 /* For each reload, the hard register number of the register used,
5266 or -1 if we did not need a register for this reload. */
5267 int reload_spill_index[MAX_RELOADS];
5269 /* Return 1 if the value in reload reg REGNO, as used by a reload
5270 needed for the part of the insn specified by OPNUM and TYPE,
5271 may be used to load VALUE into it.
5273 Other read-only reloads with the same value do not conflict
5274 unless OUT is non-zero and these other reloads have to live while
5275 output reloads live.
5276 If OUT is CONST0_RTX, this is a special case: it means that the
5277 test should not be for using register REGNO as reload register, but
5278 for copying from register REGNO into the reload register.
5280 RELOADNUM is the number of the reload we want to load this value for;
5281 a reload does not conflict with itself.
5283 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
5284 reloads that load an address for the very reload we are considering.
5286 The caller has to make sure that there is no conflict with the return
5287 register. */
5288 static int
5289 reload_reg_free_for_value_p (regno, opnum, type, value, out, reloadnum,
5290 ignore_address_reloads)
5291 int regno;
5292 int opnum;
5293 enum reload_type type;
5294 rtx value, out;
5295 int reloadnum;
5296 int ignore_address_reloads;
5298 int time1;
5299 /* Set if we see an input reload that must not share its reload register
5300 with any new earlyclobber, but might otherwise share the reload
5301 register with an output or input-output reload. */
5302 int check_earlyclobber = 0;
5303 int i;
5304 int copy = 0;
5306 if (out == const0_rtx)
5308 copy = 1;
5309 out = NULL_RTX;
5312 /* We use some pseudo 'time' value to check if the lifetimes of the
5313 new register use would overlap with the one of a previous reload
5314 that is not read-only or uses a different value.
5315 The 'time' used doesn't have to be linear in any shape or form, just
5316 monotonic.
5317 Some reload types use different 'buckets' for each operand.
5318 So there are MAX_RECOG_OPERANDS different time values for each
5319 such reload type.
5320 We compute TIME1 as the time when the register for the prospective
5321 new reload ceases to be live, and TIME2 for each existing
5322 reload as the time when that the reload register of that reload
5323 becomes live.
5324 Where there is little to be gained by exact lifetime calculations,
5325 we just make conservative assumptions, i.e. a longer lifetime;
5326 this is done in the 'default:' cases. */
5327 switch (type)
5329 case RELOAD_FOR_OTHER_ADDRESS:
5330 /* RELOAD_FOR_OTHER_ADDRESS conflits with RELOAD_OTHER reloads. */
5331 time1 = copy ? 0 : 1;
5332 break;
5333 case RELOAD_OTHER:
5334 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5335 break;
5336 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5337 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5338 respectively, to the time values for these, we get distinct time
5339 values. To get distinct time values for each operand, we have to
5340 multiply opnum by at least three. We round that up to four because
5341 multiply by four is often cheaper. */
5342 case RELOAD_FOR_INPADDR_ADDRESS:
5343 time1 = opnum * 4 + 2;
5344 break;
5345 case RELOAD_FOR_INPUT_ADDRESS:
5346 time1 = opnum * 4 + 3;
5347 break;
5348 case RELOAD_FOR_INPUT:
5349 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5350 executes (inclusive). */
5351 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5352 break;
5353 case RELOAD_FOR_OPADDR_ADDR:
5354 /* opnum * 4 + 4
5355 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5356 time1 = MAX_RECOG_OPERANDS * 4 + 1;
5357 break;
5358 case RELOAD_FOR_OPERAND_ADDRESS:
5359 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5360 is executed. */
5361 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5362 break;
5363 case RELOAD_FOR_OUTADDR_ADDRESS:
5364 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5365 break;
5366 case RELOAD_FOR_OUTPUT_ADDRESS:
5367 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5368 break;
5369 default:
5370 time1 = MAX_RECOG_OPERANDS * 5 + 5;
5373 for (i = 0; i < n_reloads; i++)
5375 rtx reg = rld[i].reg_rtx;
5376 if (reg && GET_CODE (reg) == REG
5377 && ((unsigned) regno - true_regnum (reg)
5378 <= HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)) - (unsigned)1)
5379 && i != reloadnum)
5381 if (! rld[i].in || ! rtx_equal_p (rld[i].in, value)
5382 || rld[i].out || out)
5384 int time2;
5385 switch (rld[i].when_needed)
5387 case RELOAD_FOR_OTHER_ADDRESS:
5388 time2 = 0;
5389 break;
5390 case RELOAD_FOR_INPADDR_ADDRESS:
5391 /* find_reloads makes sure that a
5392 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5393 by at most one - the first -
5394 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5395 address reload is inherited, the address address reload
5396 goes away, so we can ignore this conflict. */
5397 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5398 && ignore_address_reloads
5399 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5400 Then the address address is still needed to store
5401 back the new address. */
5402 && ! rld[reloadnum].out)
5403 continue;
5404 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5405 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5406 reloads go away. */
5407 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5408 && ignore_address_reloads
5409 /* Unless we are reloading an auto_inc expression. */
5410 && ! rld[reloadnum].out)
5411 continue;
5412 time2 = rld[i].opnum * 4 + 2;
5413 break;
5414 case RELOAD_FOR_INPUT_ADDRESS:
5415 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5416 && ignore_address_reloads
5417 && ! rld[reloadnum].out)
5418 continue;
5419 time2 = rld[i].opnum * 4 + 3;
5420 break;
5421 case RELOAD_FOR_INPUT:
5422 time2 = rld[i].opnum * 4 + 4;
5423 check_earlyclobber = 1;
5424 break;
5425 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5426 == MAX_RECOG_OPERAND * 4 */
5427 case RELOAD_FOR_OPADDR_ADDR:
5428 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5429 && ignore_address_reloads
5430 && ! rld[reloadnum].out)
5431 continue;
5432 time2 = MAX_RECOG_OPERANDS * 4 + 1;
5433 break;
5434 case RELOAD_FOR_OPERAND_ADDRESS:
5435 time2 = MAX_RECOG_OPERANDS * 4 + 2;
5436 check_earlyclobber = 1;
5437 break;
5438 case RELOAD_FOR_INSN:
5439 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5440 break;
5441 case RELOAD_FOR_OUTPUT:
5442 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5443 instruction is executed. */
5444 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5445 break;
5446 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5447 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5448 value. */
5449 case RELOAD_FOR_OUTADDR_ADDRESS:
5450 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5451 && ignore_address_reloads
5452 && ! rld[reloadnum].out)
5453 continue;
5454 time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5455 break;
5456 case RELOAD_FOR_OUTPUT_ADDRESS:
5457 time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5458 break;
5459 case RELOAD_OTHER:
5460 /* If there is no conflict in the input part, handle this
5461 like an output reload. */
5462 if (! rld[i].in || rtx_equal_p (rld[i].in, value))
5464 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5465 /* Earlyclobbered outputs must conflict with inputs. */
5466 if (earlyclobber_operand_p (rld[i].out))
5467 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5469 break;
5471 time2 = 1;
5472 /* RELOAD_OTHER might be live beyond instruction execution,
5473 but this is not obvious when we set time2 = 1. So check
5474 here if there might be a problem with the new reload
5475 clobbering the register used by the RELOAD_OTHER. */
5476 if (out)
5477 return 0;
5478 break;
5479 default:
5480 return 0;
5482 if ((time1 >= time2
5483 && (! rld[i].in || rld[i].out
5484 || ! rtx_equal_p (rld[i].in, value)))
5485 || (out && rld[reloadnum].out_reg
5486 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
5487 return 0;
5492 /* Earlyclobbered outputs must conflict with inputs. */
5493 if (check_earlyclobber && out && earlyclobber_operand_p (out))
5494 return 0;
5496 return 1;
5499 /* Give an error message saying we failed to find a reload for INSN,
5500 and clear out reload R. */
5501 static void
5502 failed_reload (insn, r)
5503 rtx insn;
5504 int r;
5506 if (asm_noperands (PATTERN (insn)) < 0)
5507 /* It's the compiler's fault. */
5508 fatal_insn ("Could not find a spill register", insn);
5510 /* It's the user's fault; the operand's mode and constraint
5511 don't match. Disable this reload so we don't crash in final. */
5512 error_for_asm (insn,
5513 "`asm' operand constraint incompatible with operand size");
5514 rld[r].in = 0;
5515 rld[r].out = 0;
5516 rld[r].reg_rtx = 0;
5517 rld[r].optional = 1;
5518 rld[r].secondary_p = 1;
5521 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
5522 for reload R. If it's valid, get an rtx for it. Return nonzero if
5523 successful. */
5524 static int
5525 set_reload_reg (i, r)
5526 int i, r;
5528 int regno;
5529 rtx reg = spill_reg_rtx[i];
5531 if (reg == 0 || GET_MODE (reg) != rld[r].mode)
5532 spill_reg_rtx[i] = reg
5533 = gen_rtx_REG (rld[r].mode, spill_regs[i]);
5535 regno = true_regnum (reg);
5537 /* Detect when the reload reg can't hold the reload mode.
5538 This used to be one `if', but Sequent compiler can't handle that. */
5539 if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
5541 enum machine_mode test_mode = VOIDmode;
5542 if (rld[r].in)
5543 test_mode = GET_MODE (rld[r].in);
5544 /* If rld[r].in has VOIDmode, it means we will load it
5545 in whatever mode the reload reg has: to wit, rld[r].mode.
5546 We have already tested that for validity. */
5547 /* Aside from that, we need to test that the expressions
5548 to reload from or into have modes which are valid for this
5549 reload register. Otherwise the reload insns would be invalid. */
5550 if (! (rld[r].in != 0 && test_mode != VOIDmode
5551 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5552 if (! (rld[r].out != 0
5553 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
5555 /* The reg is OK. */
5556 last_spill_reg = i;
5558 /* Mark as in use for this insn the reload regs we use
5559 for this. */
5560 mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
5561 rld[r].when_needed, rld[r].mode);
5563 rld[r].reg_rtx = reg;
5564 reload_spill_index[r] = spill_regs[i];
5565 return 1;
5568 return 0;
5571 /* Find a spill register to use as a reload register for reload R.
5572 LAST_RELOAD is non-zero if this is the last reload for the insn being
5573 processed.
5575 Set rld[R].reg_rtx to the register allocated.
5577 If NOERROR is nonzero, we return 1 if successful,
5578 or 0 if we couldn't find a spill reg and we didn't change anything. */
5580 static int
5581 allocate_reload_reg (chain, r, last_reload, noerror)
5582 struct insn_chain *chain;
5583 int r;
5584 int last_reload;
5585 int noerror;
5587 rtx insn = chain->insn;
5588 int i, pass, count;
5590 /* If we put this reload ahead, thinking it is a group,
5591 then insist on finding a group. Otherwise we can grab a
5592 reg that some other reload needs.
5593 (That can happen when we have a 68000 DATA_OR_FP_REG
5594 which is a group of data regs or one fp reg.)
5595 We need not be so restrictive if there are no more reloads
5596 for this insn.
5598 ??? Really it would be nicer to have smarter handling
5599 for that kind of reg class, where a problem like this is normal.
5600 Perhaps those classes should be avoided for reloading
5601 by use of more alternatives. */
5603 int force_group = rld[r].nregs > 1 && ! last_reload;
5605 /* If we want a single register and haven't yet found one,
5606 take any reg in the right class and not in use.
5607 If we want a consecutive group, here is where we look for it.
5609 We use two passes so we can first look for reload regs to
5610 reuse, which are already in use for other reloads in this insn,
5611 and only then use additional registers.
5612 I think that maximizing reuse is needed to make sure we don't
5613 run out of reload regs. Suppose we have three reloads, and
5614 reloads A and B can share regs. These need two regs.
5615 Suppose A and B are given different regs.
5616 That leaves none for C. */
5617 for (pass = 0; pass < 2; pass++)
5619 /* I is the index in spill_regs.
5620 We advance it round-robin between insns to use all spill regs
5621 equally, so that inherited reloads have a chance
5622 of leapfrogging each other. Don't do this, however, when we have
5623 group needs and failure would be fatal; if we only have a relatively
5624 small number of spill registers, and more than one of them has
5625 group needs, then by starting in the middle, we may end up
5626 allocating the first one in such a way that we are not left with
5627 sufficient groups to handle the rest. */
5629 if (noerror || ! force_group)
5630 i = last_spill_reg;
5631 else
5632 i = -1;
5634 for (count = 0; count < n_spills; count++)
5636 int class = (int) rld[r].class;
5637 int regnum;
5639 i++;
5640 if (i >= n_spills)
5641 i -= n_spills;
5642 regnum = spill_regs[i];
5644 if ((reload_reg_free_p (regnum, rld[r].opnum,
5645 rld[r].when_needed)
5646 || (rld[r].in
5647 /* We check reload_reg_used to make sure we
5648 don't clobber the return register. */
5649 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
5650 && reload_reg_free_for_value_p (regnum,
5651 rld[r].opnum,
5652 rld[r].when_needed,
5653 rld[r].in,
5654 rld[r].out, r, 1)))
5655 && TEST_HARD_REG_BIT (reg_class_contents[class], regnum)
5656 && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
5657 /* Look first for regs to share, then for unshared. But
5658 don't share regs used for inherited reloads; they are
5659 the ones we want to preserve. */
5660 && (pass
5661 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5662 regnum)
5663 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5664 regnum))))
5666 int nr = HARD_REGNO_NREGS (regnum, rld[r].mode);
5667 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5668 (on 68000) got us two FP regs. If NR is 1,
5669 we would reject both of them. */
5670 if (force_group)
5671 nr = rld[r].nregs;
5672 /* If we need only one reg, we have already won. */
5673 if (nr == 1)
5675 /* But reject a single reg if we demand a group. */
5676 if (force_group)
5677 continue;
5678 break;
5680 /* Otherwise check that as many consecutive regs as we need
5681 are available here.
5682 Also, don't use for a group registers that are
5683 needed for nongroups. */
5684 if (! TEST_HARD_REG_BIT (chain->counted_for_nongroups, regnum))
5685 while (nr > 1)
5687 int regno = regnum + nr - 1;
5688 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5689 && spill_reg_order[regno] >= 0
5690 && reload_reg_free_p (regno, rld[r].opnum,
5691 rld[r].when_needed)
5692 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups,
5693 regno)))
5694 break;
5695 nr--;
5697 if (nr == 1)
5698 break;
5702 /* If we found something on pass 1, omit pass 2. */
5703 if (count < n_spills)
5704 break;
5707 /* We should have found a spill register by now. */
5708 if (count == n_spills)
5710 if (noerror)
5711 return 0;
5712 goto failure;
5715 if (set_reload_reg (i, r))
5716 return 1;
5718 /* The reg is not OK. */
5719 if (noerror)
5720 return 0;
5722 failure:
5723 failed_reload (insn, r);
5725 return 1;
5728 /* Initialize all the tables needed to allocate reload registers.
5729 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
5730 is the array we use to restore the reg_rtx field for every reload. */
5731 static void
5732 choose_reload_regs_init (chain, save_reload_reg_rtx)
5733 struct insn_chain *chain;
5734 rtx *save_reload_reg_rtx;
5736 int i;
5738 for (i = 0; i < n_reloads; i++)
5739 rld[i].reg_rtx = save_reload_reg_rtx[i];
5741 bzero (reload_inherited, MAX_RELOADS);
5742 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
5743 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
5745 CLEAR_HARD_REG_SET (reload_reg_used);
5746 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5747 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5748 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5749 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5750 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5752 CLEAR_HARD_REG_SET (reg_used_in_insn);
5754 HARD_REG_SET tmp;
5755 REG_SET_TO_HARD_REG_SET (tmp, chain->live_before);
5756 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5757 REG_SET_TO_HARD_REG_SET (tmp, chain->live_after);
5758 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5759 compute_use_by_pseudos (&reg_used_in_insn, chain->live_before);
5760 compute_use_by_pseudos (&reg_used_in_insn, chain->live_after);
5762 for (i = 0; i < reload_n_operands; i++)
5764 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5765 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5766 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5767 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5768 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5769 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5772 IOR_COMPL_HARD_REG_SET (reload_reg_used, chain->used_spill_regs);
5774 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5776 for (i = 0; i < n_reloads; i++)
5777 /* If we have already decided to use a certain register,
5778 don't use it in another way. */
5779 if (rld[i].reg_rtx)
5780 mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
5781 rld[i].when_needed, rld[i].mode);
5784 /* Assign hard reg targets for the pseudo-registers we must reload
5785 into hard regs for this insn.
5786 Also output the instructions to copy them in and out of the hard regs.
5788 For machines with register classes, we are responsible for
5789 finding a reload reg in the proper class. */
5791 static void
5792 choose_reload_regs (chain)
5793 struct insn_chain *chain;
5795 rtx insn = chain->insn;
5796 register int i, j;
5797 int max_group_size = 1;
5798 enum reg_class group_class = NO_REGS;
5799 int inheritance;
5800 int pass;
5802 rtx save_reload_reg_rtx[MAX_RELOADS];
5804 /* In order to be certain of getting the registers we need,
5805 we must sort the reloads into order of increasing register class.
5806 Then our grabbing of reload registers will parallel the process
5807 that provided the reload registers.
5809 Also note whether any of the reloads wants a consecutive group of regs.
5810 If so, record the maximum size of the group desired and what
5811 register class contains all the groups needed by this insn. */
5813 for (j = 0; j < n_reloads; j++)
5815 reload_order[j] = j;
5816 reload_spill_index[j] = -1;
5818 if (rld[j].nregs > 1)
5820 max_group_size = MAX (rld[j].nregs, max_group_size);
5821 group_class = reg_class_superunion[(int)rld[j].class][(int)group_class];
5824 save_reload_reg_rtx[j] = rld[j].reg_rtx;
5827 if (n_reloads > 1)
5828 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5830 /* If -O, try first with inheritance, then turning it off.
5831 If not -O, don't do inheritance.
5832 Using inheritance when not optimizing leads to paradoxes
5833 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5834 because one side of the comparison might be inherited. */
5836 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5838 choose_reload_regs_init (chain, save_reload_reg_rtx);
5840 /* Process the reloads in order of preference just found.
5841 Beyond this point, subregs can be found in reload_reg_rtx.
5843 This used to look for an existing reloaded home for all
5844 of the reloads, and only then perform any new reloads.
5845 But that could lose if the reloads were done out of reg-class order
5846 because a later reload with a looser constraint might have an old
5847 home in a register needed by an earlier reload with a tighter constraint.
5849 To solve this, we make two passes over the reloads, in the order
5850 described above. In the first pass we try to inherit a reload
5851 from a previous insn. If there is a later reload that needs a
5852 class that is a proper subset of the class being processed, we must
5853 also allocate a spill register during the first pass.
5855 Then make a second pass over the reloads to allocate any reloads
5856 that haven't been given registers yet. */
5858 for (j = 0; j < n_reloads; j++)
5860 register int r = reload_order[j];
5861 rtx search_equiv = NULL_RTX;
5863 /* Ignore reloads that got marked inoperative. */
5864 if (rld[r].out == 0 && rld[r].in == 0
5865 && ! rld[r].secondary_p)
5866 continue;
5868 /* If find_reloads chose to use reload_in or reload_out as a reload
5869 register, we don't need to chose one. Otherwise, try even if it
5870 found one since we might save an insn if we find the value lying
5871 around.
5872 Try also when reload_in is a pseudo without a hard reg. */
5873 if (rld[r].in != 0 && rld[r].reg_rtx != 0
5874 && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
5875 || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
5876 && GET_CODE (rld[r].in) != MEM
5877 && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
5878 continue;
5880 #if 0 /* No longer needed for correct operation.
5881 It might give better code, or might not; worth an experiment? */
5882 /* If this is an optional reload, we can't inherit from earlier insns
5883 until we are sure that any non-optional reloads have been allocated.
5884 The following code takes advantage of the fact that optional reloads
5885 are at the end of reload_order. */
5886 if (rld[r].optional != 0)
5887 for (i = 0; i < j; i++)
5888 if ((rld[reload_order[i]].out != 0
5889 || rld[reload_order[i]].in != 0
5890 || rld[reload_order[i]].secondary_p)
5891 && ! rld[reload_order[i]].optional
5892 && rld[reload_order[i]].reg_rtx == 0)
5893 allocate_reload_reg (chain, reload_order[i], 0, inheritance);
5894 #endif
5896 /* First see if this pseudo is already available as reloaded
5897 for a previous insn. We cannot try to inherit for reloads
5898 that are smaller than the maximum number of registers needed
5899 for groups unless the register we would allocate cannot be used
5900 for the groups.
5902 We could check here to see if this is a secondary reload for
5903 an object that is already in a register of the desired class.
5904 This would avoid the need for the secondary reload register.
5905 But this is complex because we can't easily determine what
5906 objects might want to be loaded via this reload. So let a
5907 register be allocated here. In `emit_reload_insns' we suppress
5908 one of the loads in the case described above. */
5910 if (inheritance)
5912 int word = 0;
5913 register int regno = -1;
5914 enum machine_mode mode = VOIDmode;
5916 if (rld[r].in == 0)
5918 else if (GET_CODE (rld[r].in) == REG)
5920 regno = REGNO (rld[r].in);
5921 mode = GET_MODE (rld[r].in);
5923 else if (GET_CODE (rld[r].in_reg) == REG)
5925 regno = REGNO (rld[r].in_reg);
5926 mode = GET_MODE (rld[r].in_reg);
5928 else if (GET_CODE (rld[r].in_reg) == SUBREG
5929 && GET_CODE (SUBREG_REG (rld[r].in_reg)) == REG)
5931 word = SUBREG_WORD (rld[r].in_reg);
5932 regno = REGNO (SUBREG_REG (rld[r].in_reg));
5933 if (regno < FIRST_PSEUDO_REGISTER)
5934 regno += word;
5935 mode = GET_MODE (rld[r].in_reg);
5937 #ifdef AUTO_INC_DEC
5938 else if ((GET_CODE (rld[r].in_reg) == PRE_INC
5939 || GET_CODE (rld[r].in_reg) == PRE_DEC
5940 || GET_CODE (rld[r].in_reg) == POST_INC
5941 || GET_CODE (rld[r].in_reg) == POST_DEC)
5942 && GET_CODE (XEXP (rld[r].in_reg, 0)) == REG)
5944 regno = REGNO (XEXP (rld[r].in_reg, 0));
5945 mode = GET_MODE (XEXP (rld[r].in_reg, 0));
5946 rld[r].out = rld[r].in;
5948 #endif
5949 #if 0
5950 /* This won't work, since REGNO can be a pseudo reg number.
5951 Also, it takes much more hair to keep track of all the things
5952 that can invalidate an inherited reload of part of a pseudoreg. */
5953 else if (GET_CODE (rld[r].in) == SUBREG
5954 && GET_CODE (SUBREG_REG (rld[r].in)) == REG)
5955 regno = REGNO (SUBREG_REG (rld[r].in)) + SUBREG_WORD (rld[r].in);
5956 #endif
5958 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5960 enum reg_class class = rld[r].class, last_class;
5961 rtx last_reg = reg_last_reload_reg[regno];
5963 i = REGNO (last_reg) + word;
5964 last_class = REGNO_REG_CLASS (i);
5965 if ((GET_MODE_SIZE (GET_MODE (last_reg))
5966 >= GET_MODE_SIZE (mode) + word * UNITS_PER_WORD)
5967 && reg_reloaded_contents[i] == regno
5968 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
5969 && HARD_REGNO_MODE_OK (i, rld[r].mode)
5970 && (TEST_HARD_REG_BIT (reg_class_contents[(int) class], i)
5971 /* Even if we can't use this register as a reload
5972 register, we might use it for reload_override_in,
5973 if copying it to the desired class is cheap
5974 enough. */
5975 || ((REGISTER_MOVE_COST (last_class, class)
5976 < MEMORY_MOVE_COST (mode, class, 1))
5977 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5978 && (SECONDARY_INPUT_RELOAD_CLASS (class, mode,
5979 last_reg)
5980 == NO_REGS)
5981 #endif
5982 #ifdef SECONDARY_MEMORY_NEEDED
5983 && ! SECONDARY_MEMORY_NEEDED (last_class, class,
5984 mode)
5985 #endif
5988 && (rld[r].nregs == max_group_size
5989 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5991 && reload_reg_free_for_value_p (i, rld[r].opnum,
5992 rld[r].when_needed,
5993 rld[r].in,
5994 const0_rtx, r, 1))
5996 /* If a group is needed, verify that all the subsequent
5997 registers still have their values intact. */
5998 int nr
5999 = HARD_REGNO_NREGS (i, rld[r].mode);
6000 int k;
6002 for (k = 1; k < nr; k++)
6003 if (reg_reloaded_contents[i + k] != regno
6004 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6005 break;
6007 if (k == nr)
6009 int i1;
6011 last_reg = (GET_MODE (last_reg) == mode
6012 ? last_reg : gen_rtx_REG (mode, i));
6014 /* We found a register that contains the
6015 value we need. If this register is the
6016 same as an `earlyclobber' operand of the
6017 current insn, just mark it as a place to
6018 reload from since we can't use it as the
6019 reload register itself. */
6021 for (i1 = 0; i1 < n_earlyclobbers; i1++)
6022 if (reg_overlap_mentioned_for_reload_p
6023 (reg_last_reload_reg[regno],
6024 reload_earlyclobbers[i1]))
6025 break;
6027 if (i1 != n_earlyclobbers
6028 || ! (reload_reg_free_for_value_p
6029 (i, rld[r].opnum, rld[r].when_needed,
6030 rld[r].in, rld[r].out, r, 1))
6031 /* Don't use it if we'd clobber a pseudo reg. */
6032 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6033 && rld[r].out
6034 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6035 /* Don't clobber the frame pointer. */
6036 || (i == HARD_FRAME_POINTER_REGNUM && rld[r].out)
6037 /* Don't really use the inherited spill reg
6038 if we need it wider than we've got it. */
6039 || (GET_MODE_SIZE (rld[r].mode)
6040 > GET_MODE_SIZE (mode))
6041 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
6044 /* If find_reloads chose reload_out as reload
6045 register, stay with it - that leaves the
6046 inherited register for subsequent reloads. */
6047 || (rld[r].out && rld[r].reg_rtx
6048 && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6050 reload_override_in[r] = last_reg;
6051 reload_inheritance_insn[r]
6052 = reg_reloaded_insn[i];
6054 else
6056 int k;
6057 /* We can use this as a reload reg. */
6058 /* Mark the register as in use for this part of
6059 the insn. */
6060 mark_reload_reg_in_use (i,
6061 rld[r].opnum,
6062 rld[r].when_needed,
6063 rld[r].mode);
6064 rld[r].reg_rtx = last_reg;
6065 reload_inherited[r] = 1;
6066 reload_inheritance_insn[r]
6067 = reg_reloaded_insn[i];
6068 reload_spill_index[r] = i;
6069 for (k = 0; k < nr; k++)
6070 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6071 i + k);
6078 /* Here's another way to see if the value is already lying around. */
6079 if (inheritance
6080 && rld[r].in != 0
6081 && ! reload_inherited[r]
6082 && rld[r].out == 0
6083 && (CONSTANT_P (rld[r].in)
6084 || GET_CODE (rld[r].in) == PLUS
6085 || GET_CODE (rld[r].in) == REG
6086 || GET_CODE (rld[r].in) == MEM)
6087 && (rld[r].nregs == max_group_size
6088 || ! reg_classes_intersect_p (rld[r].class, group_class)))
6089 search_equiv = rld[r].in;
6090 /* If this is an output reload from a simple move insn, look
6091 if an equivalence for the input is available. */
6092 else if (inheritance && rld[r].in == 0 && rld[r].out != 0)
6094 rtx set = single_set (insn);
6096 if (set
6097 && rtx_equal_p (rld[r].out, SET_DEST (set))
6098 && CONSTANT_P (SET_SRC (set)))
6099 search_equiv = SET_SRC (set);
6102 if (search_equiv)
6104 register rtx equiv
6105 = find_equiv_reg (search_equiv, insn, rld[r].class,
6106 -1, NULL_PTR, 0, rld[r].mode);
6107 int regno = 0;
6109 if (equiv != 0)
6111 if (GET_CODE (equiv) == REG)
6112 regno = REGNO (equiv);
6113 else if (GET_CODE (equiv) == SUBREG)
6115 /* This must be a SUBREG of a hard register.
6116 Make a new REG since this might be used in an
6117 address and not all machines support SUBREGs
6118 there. */
6119 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
6120 equiv = gen_rtx_REG (rld[r].mode, regno);
6122 else
6123 abort ();
6126 /* If we found a spill reg, reject it unless it is free
6127 and of the desired class. */
6128 if (equiv != 0
6129 && ((TEST_HARD_REG_BIT (reload_reg_used_at_all, regno)
6130 && ! reload_reg_free_for_value_p (regno, rld[r].opnum,
6131 rld[r].when_needed,
6132 rld[r].in,
6133 rld[r].out, r, 1))
6134 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
6135 regno)))
6136 equiv = 0;
6138 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
6139 equiv = 0;
6141 /* We found a register that contains the value we need.
6142 If this register is the same as an `earlyclobber' operand
6143 of the current insn, just mark it as a place to reload from
6144 since we can't use it as the reload register itself. */
6146 if (equiv != 0)
6147 for (i = 0; i < n_earlyclobbers; i++)
6148 if (reg_overlap_mentioned_for_reload_p (equiv,
6149 reload_earlyclobbers[i]))
6151 reload_override_in[r] = equiv;
6152 equiv = 0;
6153 break;
6156 /* If the equiv register we have found is explicitly clobbered
6157 in the current insn, it depends on the reload type if we
6158 can use it, use it for reload_override_in, or not at all.
6159 In particular, we then can't use EQUIV for a
6160 RELOAD_FOR_OUTPUT_ADDRESS reload. */
6162 if (equiv != 0 && regno_clobbered_p (regno, insn))
6164 switch (rld[r].when_needed)
6166 case RELOAD_FOR_OTHER_ADDRESS:
6167 case RELOAD_FOR_INPADDR_ADDRESS:
6168 case RELOAD_FOR_INPUT_ADDRESS:
6169 case RELOAD_FOR_OPADDR_ADDR:
6170 break;
6171 case RELOAD_OTHER:
6172 case RELOAD_FOR_INPUT:
6173 case RELOAD_FOR_OPERAND_ADDRESS:
6174 reload_override_in[r] = equiv;
6175 /* Fall through. */
6176 default:
6177 equiv = 0;
6178 break;
6182 /* If we found an equivalent reg, say no code need be generated
6183 to load it, and use it as our reload reg. */
6184 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
6186 int nr = HARD_REGNO_NREGS (regno, rld[r].mode);
6187 int k;
6188 rld[r].reg_rtx = equiv;
6189 reload_inherited[r] = 1;
6191 /* If reg_reloaded_valid is not set for this register,
6192 there might be a stale spill_reg_store lying around.
6193 We must clear it, since otherwise emit_reload_insns
6194 might delete the store. */
6195 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6196 spill_reg_store[regno] = NULL_RTX;
6197 /* If any of the hard registers in EQUIV are spill
6198 registers, mark them as in use for this insn. */
6199 for (k = 0; k < nr; k++)
6201 i = spill_reg_order[regno + k];
6202 if (i >= 0)
6204 mark_reload_reg_in_use (regno, rld[r].opnum,
6205 rld[r].when_needed,
6206 rld[r].mode);
6207 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6208 regno + k);
6214 /* If we found a register to use already, or if this is an optional
6215 reload, we are done. */
6216 if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6217 continue;
6219 #if 0 /* No longer needed for correct operation. Might or might not
6220 give better code on the average. Want to experiment? */
6222 /* See if there is a later reload that has a class different from our
6223 class that intersects our class or that requires less register
6224 than our reload. If so, we must allocate a register to this
6225 reload now, since that reload might inherit a previous reload
6226 and take the only available register in our class. Don't do this
6227 for optional reloads since they will force all previous reloads
6228 to be allocated. Also don't do this for reloads that have been
6229 turned off. */
6231 for (i = j + 1; i < n_reloads; i++)
6233 int s = reload_order[i];
6235 if ((rld[s].in == 0 && rld[s].out == 0
6236 && ! rld[s].secondary_p)
6237 || rld[s].optional)
6238 continue;
6240 if ((rld[s].class != rld[r].class
6241 && reg_classes_intersect_p (rld[r].class,
6242 rld[s].class))
6243 || rld[s].nregs < rld[r].nregs)
6244 break;
6247 if (i == n_reloads)
6248 continue;
6250 allocate_reload_reg (chain, r, j == n_reloads - 1, inheritance);
6251 #endif
6254 /* Now allocate reload registers for anything non-optional that
6255 didn't get one yet. */
6256 for (j = 0; j < n_reloads; j++)
6258 register int r = reload_order[j];
6260 /* Ignore reloads that got marked inoperative. */
6261 if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6262 continue;
6264 /* Skip reloads that already have a register allocated or are
6265 optional. */
6266 if (rld[r].reg_rtx != 0 || rld[r].optional)
6267 continue;
6269 if (! allocate_reload_reg (chain, r, j == n_reloads - 1, inheritance))
6270 break;
6273 /* If that loop got all the way, we have won. */
6274 if (j == n_reloads)
6275 break;
6277 /* Loop around and try without any inheritance. */
6280 /* If we thought we could inherit a reload, because it seemed that
6281 nothing else wanted the same reload register earlier in the insn,
6282 verify that assumption, now that all reloads have been assigned.
6283 Likewise for reloads where reload_override_in has been set. */
6285 /* If doing expensive optimizations, do one preliminary pass that doesn't
6286 cancel any inheritance, but removes reloads that have been needed only
6287 for reloads that we know can be inherited. */
6288 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6290 for (j = 0; j < n_reloads; j++)
6292 register int r = reload_order[j];
6293 rtx check_reg;
6294 if (reload_inherited[r] && rld[r].reg_rtx)
6295 check_reg = rld[r].reg_rtx;
6296 else if (reload_override_in[r]
6297 && (GET_CODE (reload_override_in[r]) == REG
6298 || GET_CODE (reload_override_in[r]) == SUBREG))
6299 check_reg = reload_override_in[r];
6300 else
6301 continue;
6302 if (! reload_reg_free_for_value_p (true_regnum (check_reg),
6303 rld[r].opnum,
6304 rld[r].when_needed,
6305 rld[r].in,
6306 (reload_inherited[r]
6307 ? rld[r].out : const0_rtx),
6308 r, 1))
6310 if (pass)
6311 continue;
6312 reload_inherited[r] = 0;
6313 reload_override_in[r] = 0;
6315 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6316 reload_override_in, then we do not need its related
6317 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6318 likewise for other reload types.
6319 We handle this by removing a reload when its only replacement
6320 is mentioned in reload_in of the reload we are going to inherit.
6321 A special case are auto_inc expressions; even if the input is
6322 inherited, we still need the address for the output. We can
6323 recognize them because they have RELOAD_OUT set to RELOAD_IN.
6324 If we suceeded removing some reload and we are doing a preliminary
6325 pass just to remove such reloads, make another pass, since the
6326 removal of one reload might allow us to inherit another one. */
6327 else if (rld[r].in
6328 && rld[r].out != rld[r].in
6329 && remove_address_replacements (rld[r].in) && pass)
6330 pass = 2;
6334 /* Now that reload_override_in is known valid,
6335 actually override reload_in. */
6336 for (j = 0; j < n_reloads; j++)
6337 if (reload_override_in[j])
6338 rld[j].in = reload_override_in[j];
6340 /* If this reload won't be done because it has been cancelled or is
6341 optional and not inherited, clear reload_reg_rtx so other
6342 routines (such as subst_reloads) don't get confused. */
6343 for (j = 0; j < n_reloads; j++)
6344 if (rld[j].reg_rtx != 0
6345 && ((rld[j].optional && ! reload_inherited[j])
6346 || (rld[j].in == 0 && rld[j].out == 0
6347 && ! rld[j].secondary_p)))
6349 int regno = true_regnum (rld[j].reg_rtx);
6351 if (spill_reg_order[regno] >= 0)
6352 clear_reload_reg_in_use (regno, rld[j].opnum,
6353 rld[j].when_needed, rld[j].mode);
6354 rld[j].reg_rtx = 0;
6357 /* Record which pseudos and which spill regs have output reloads. */
6358 for (j = 0; j < n_reloads; j++)
6360 register int r = reload_order[j];
6362 i = reload_spill_index[r];
6364 /* I is nonneg if this reload uses a register.
6365 If rld[r].reg_rtx is 0, this is an optional reload
6366 that we opted to ignore. */
6367 if (rld[r].out_reg != 0 && GET_CODE (rld[r].out_reg) == REG
6368 && rld[r].reg_rtx != 0)
6370 register int nregno = REGNO (rld[r].out_reg);
6371 int nr = 1;
6373 if (nregno < FIRST_PSEUDO_REGISTER)
6374 nr = HARD_REGNO_NREGS (nregno, rld[r].mode);
6376 while (--nr >= 0)
6377 reg_has_output_reload[nregno + nr] = 1;
6379 if (i >= 0)
6381 nr = HARD_REGNO_NREGS (i, rld[r].mode);
6382 while (--nr >= 0)
6383 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
6386 if (rld[r].when_needed != RELOAD_OTHER
6387 && rld[r].when_needed != RELOAD_FOR_OUTPUT
6388 && rld[r].when_needed != RELOAD_FOR_INSN)
6389 abort ();
6394 /* Deallocate the reload register for reload R. This is called from
6395 remove_address_replacements. */
6396 void
6397 deallocate_reload_reg (r)
6398 int r;
6400 int regno;
6402 if (! rld[r].reg_rtx)
6403 return;
6404 regno = true_regnum (rld[r].reg_rtx);
6405 rld[r].reg_rtx = 0;
6406 if (spill_reg_order[regno] >= 0)
6407 clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
6408 rld[r].mode);
6409 reload_spill_index[r] = -1;
6412 /* If SMALL_REGISTER_CLASSES is non-zero, we may not have merged two
6413 reloads of the same item for fear that we might not have enough reload
6414 registers. However, normally they will get the same reload register
6415 and hence actually need not be loaded twice.
6417 Here we check for the most common case of this phenomenon: when we have
6418 a number of reloads for the same object, each of which were allocated
6419 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6420 reload, and is not modified in the insn itself. If we find such,
6421 merge all the reloads and set the resulting reload to RELOAD_OTHER.
6422 This will not increase the number of spill registers needed and will
6423 prevent redundant code. */
6425 static void
6426 merge_assigned_reloads (insn)
6427 rtx insn;
6429 int i, j;
6431 /* Scan all the reloads looking for ones that only load values and
6432 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6433 assigned and not modified by INSN. */
6435 for (i = 0; i < n_reloads; i++)
6437 int conflicting_input = 0;
6438 int max_input_address_opnum = -1;
6439 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6441 if (rld[i].in == 0 || rld[i].when_needed == RELOAD_OTHER
6442 || rld[i].out != 0 || rld[i].reg_rtx == 0
6443 || reg_set_p (rld[i].reg_rtx, insn))
6444 continue;
6446 /* Look at all other reloads. Ensure that the only use of this
6447 reload_reg_rtx is in a reload that just loads the same value
6448 as we do. Note that any secondary reloads must be of the identical
6449 class since the values, modes, and result registers are the
6450 same, so we need not do anything with any secondary reloads. */
6452 for (j = 0; j < n_reloads; j++)
6454 if (i == j || rld[j].reg_rtx == 0
6455 || ! reg_overlap_mentioned_p (rld[j].reg_rtx,
6456 rld[i].reg_rtx))
6457 continue;
6459 if (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6460 && rld[j].opnum > max_input_address_opnum)
6461 max_input_address_opnum = rld[j].opnum;
6463 /* If the reload regs aren't exactly the same (e.g, different modes)
6464 or if the values are different, we can't merge this reload.
6465 But if it is an input reload, we might still merge
6466 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
6468 if (! rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6469 || rld[j].out != 0 || rld[j].in == 0
6470 || ! rtx_equal_p (rld[i].in, rld[j].in))
6472 if (rld[j].when_needed != RELOAD_FOR_INPUT
6473 || ((rld[i].when_needed != RELOAD_FOR_INPUT_ADDRESS
6474 || rld[i].opnum > rld[j].opnum)
6475 && rld[i].when_needed != RELOAD_FOR_OTHER_ADDRESS))
6476 break;
6477 conflicting_input = 1;
6478 if (min_conflicting_input_opnum > rld[j].opnum)
6479 min_conflicting_input_opnum = rld[j].opnum;
6483 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6484 we, in fact, found any matching reloads. */
6486 if (j == n_reloads
6487 && max_input_address_opnum <= min_conflicting_input_opnum)
6489 for (j = 0; j < n_reloads; j++)
6490 if (i != j && rld[j].reg_rtx != 0
6491 && rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6492 && (! conflicting_input
6493 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6494 || rld[j].when_needed == RELOAD_FOR_OTHER_ADDRESS))
6496 rld[i].when_needed = RELOAD_OTHER;
6497 rld[j].in = 0;
6498 reload_spill_index[j] = -1;
6499 transfer_replacements (i, j);
6502 /* If this is now RELOAD_OTHER, look for any reloads that load
6503 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
6504 if they were for inputs, RELOAD_OTHER for outputs. Note that
6505 this test is equivalent to looking for reloads for this operand
6506 number. */
6508 if (rld[i].when_needed == RELOAD_OTHER)
6509 for (j = 0; j < n_reloads; j++)
6510 if (rld[j].in != 0
6511 && rld[i].when_needed != RELOAD_OTHER
6512 && reg_overlap_mentioned_for_reload_p (rld[j].in,
6513 rld[i].in))
6514 rld[j].when_needed
6515 = ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
6516 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6517 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
6523 /* Output insns to reload values in and out of the chosen reload regs. */
6525 static void
6526 emit_reload_insns (chain)
6527 struct insn_chain *chain;
6529 rtx insn = chain->insn;
6531 register int j;
6532 rtx input_reload_insns[MAX_RECOG_OPERANDS];
6533 rtx other_input_address_reload_insns = 0;
6534 rtx other_input_reload_insns = 0;
6535 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6536 rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6537 rtx output_reload_insns[MAX_RECOG_OPERANDS];
6538 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6539 rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6540 rtx operand_reload_insns = 0;
6541 rtx other_operand_reload_insns = 0;
6542 rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6543 rtx following_insn = NEXT_INSN (insn);
6544 rtx before_insn = PREV_INSN (insn);
6545 int special;
6546 /* Values to be put in spill_reg_store are put here first. */
6547 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6548 HARD_REG_SET reg_reloaded_died;
6550 CLEAR_HARD_REG_SET (reg_reloaded_died);
6552 for (j = 0; j < reload_n_operands; j++)
6553 input_reload_insns[j] = input_address_reload_insns[j]
6554 = inpaddr_address_reload_insns[j]
6555 = output_reload_insns[j] = output_address_reload_insns[j]
6556 = outaddr_address_reload_insns[j]
6557 = other_output_reload_insns[j] = 0;
6559 /* Now output the instructions to copy the data into and out of the
6560 reload registers. Do these in the order that the reloads were reported,
6561 since reloads of base and index registers precede reloads of operands
6562 and the operands may need the base and index registers reloaded. */
6564 for (j = 0; j < n_reloads; j++)
6566 register rtx old;
6567 rtx oldequiv_reg = 0;
6568 rtx this_reload_insn = 0;
6569 int expect_occurrences = 1;
6571 if (rld[j].reg_rtx
6572 && REGNO (rld[j].reg_rtx) < FIRST_PSEUDO_REGISTER)
6573 new_spill_reg_store[REGNO (rld[j].reg_rtx)] = 0;
6575 old = (rld[j].in && GET_CODE (rld[j].in) == MEM
6576 ? rld[j].in_reg : rld[j].in);
6578 if (old != 0
6579 /* AUTO_INC reloads need to be handled even if inherited. We got an
6580 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
6581 && (! reload_inherited[j] || (rld[j].out && ! rld[j].out_reg))
6582 && ! rtx_equal_p (rld[j].reg_rtx, old)
6583 && rld[j].reg_rtx != 0)
6585 register rtx reloadreg = rld[j].reg_rtx;
6586 rtx oldequiv = 0;
6587 enum machine_mode mode;
6588 rtx *where;
6590 /* Determine the mode to reload in.
6591 This is very tricky because we have three to choose from.
6592 There is the mode the insn operand wants (rld[J].inmode).
6593 There is the mode of the reload register RELOADREG.
6594 There is the intrinsic mode of the operand, which we could find
6595 by stripping some SUBREGs.
6596 It turns out that RELOADREG's mode is irrelevant:
6597 we can change that arbitrarily.
6599 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6600 then the reload reg may not support QImode moves, so use SImode.
6601 If foo is in memory due to spilling a pseudo reg, this is safe,
6602 because the QImode value is in the least significant part of a
6603 slot big enough for a SImode. If foo is some other sort of
6604 memory reference, then it is impossible to reload this case,
6605 so previous passes had better make sure this never happens.
6607 Then consider a one-word union which has SImode and one of its
6608 members is a float, being fetched as (SUBREG:SF union:SI).
6609 We must fetch that as SFmode because we could be loading into
6610 a float-only register. In this case OLD's mode is correct.
6612 Consider an immediate integer: it has VOIDmode. Here we need
6613 to get a mode from something else.
6615 In some cases, there is a fourth mode, the operand's
6616 containing mode. If the insn specifies a containing mode for
6617 this operand, it overrides all others.
6619 I am not sure whether the algorithm here is always right,
6620 but it does the right things in those cases. */
6622 mode = GET_MODE (old);
6623 if (mode == VOIDmode)
6624 mode = rld[j].inmode;
6626 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6627 /* If we need a secondary register for this operation, see if
6628 the value is already in a register in that class. Don't
6629 do this if the secondary register will be used as a scratch
6630 register. */
6632 if (rld[j].secondary_in_reload >= 0
6633 && rld[j].secondary_in_icode == CODE_FOR_nothing
6634 && optimize)
6635 oldequiv
6636 = find_equiv_reg (old, insn,
6637 rld[rld[j].secondary_in_reload].class,
6638 -1, NULL_PTR, 0, mode);
6639 #endif
6641 /* If reloading from memory, see if there is a register
6642 that already holds the same value. If so, reload from there.
6643 We can pass 0 as the reload_reg_p argument because
6644 any other reload has either already been emitted,
6645 in which case find_equiv_reg will see the reload-insn,
6646 or has yet to be emitted, in which case it doesn't matter
6647 because we will use this equiv reg right away. */
6649 if (oldequiv == 0 && optimize
6650 && (GET_CODE (old) == MEM
6651 || (GET_CODE (old) == REG
6652 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6653 && reg_renumber[REGNO (old)] < 0)))
6654 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
6655 -1, NULL_PTR, 0, mode);
6657 if (oldequiv)
6659 int regno = true_regnum (oldequiv);
6661 /* Don't use OLDEQUIV if any other reload changes it at an
6662 earlier stage of this insn or at this stage. */
6663 if (! reload_reg_free_for_value_p (regno, rld[j].opnum,
6664 rld[j].when_needed,
6665 rld[j].in, const0_rtx, j,
6667 oldequiv = 0;
6669 /* If it is no cheaper to copy from OLDEQUIV into the
6670 reload register than it would be to move from memory,
6671 don't use it. Likewise, if we need a secondary register
6672 or memory. */
6674 if (oldequiv != 0
6675 && ((REGNO_REG_CLASS (regno) != rld[j].class
6676 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
6677 rld[j].class)
6678 >= MEMORY_MOVE_COST (mode, rld[j].class, 1)))
6679 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6680 || (SECONDARY_INPUT_RELOAD_CLASS (rld[j].class,
6681 mode, oldequiv)
6682 != NO_REGS)
6683 #endif
6684 #ifdef SECONDARY_MEMORY_NEEDED
6685 || SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (regno),
6686 rld[j].class,
6687 mode)
6688 #endif
6690 oldequiv = 0;
6693 /* delete_output_reload is only invoked properly if old contains
6694 the original pseudo register. Since this is replaced with a
6695 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6696 find the pseudo in RELOAD_IN_REG. */
6697 if (oldequiv == 0
6698 && reload_override_in[j]
6699 && GET_CODE (rld[j].in_reg) == REG)
6701 oldequiv = old;
6702 old = rld[j].in_reg;
6704 if (oldequiv == 0)
6705 oldequiv = old;
6706 else if (GET_CODE (oldequiv) == REG)
6707 oldequiv_reg = oldequiv;
6708 else if (GET_CODE (oldequiv) == SUBREG)
6709 oldequiv_reg = SUBREG_REG (oldequiv);
6711 /* If we are reloading from a register that was recently stored in
6712 with an output-reload, see if we can prove there was
6713 actually no need to store the old value in it. */
6715 if (optimize && GET_CODE (oldequiv) == REG
6716 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6717 && spill_reg_store[REGNO (oldequiv)]
6718 && GET_CODE (old) == REG
6719 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6720 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6721 rld[j].out_reg)))
6722 delete_output_reload (insn, j, REGNO (oldequiv));
6724 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6725 then load RELOADREG from OLDEQUIV. Note that we cannot use
6726 gen_lowpart_common since it can do the wrong thing when
6727 RELOADREG has a multi-word mode. Note that RELOADREG
6728 must always be a REG here. */
6730 if (GET_MODE (reloadreg) != mode)
6731 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
6732 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6733 oldequiv = SUBREG_REG (oldequiv);
6734 if (GET_MODE (oldequiv) != VOIDmode
6735 && mode != GET_MODE (oldequiv))
6736 oldequiv = gen_rtx_SUBREG (mode, oldequiv, 0);
6738 /* Switch to the right place to emit the reload insns. */
6739 switch (rld[j].when_needed)
6741 case RELOAD_OTHER:
6742 where = &other_input_reload_insns;
6743 break;
6744 case RELOAD_FOR_INPUT:
6745 where = &input_reload_insns[rld[j].opnum];
6746 break;
6747 case RELOAD_FOR_INPUT_ADDRESS:
6748 where = &input_address_reload_insns[rld[j].opnum];
6749 break;
6750 case RELOAD_FOR_INPADDR_ADDRESS:
6751 where = &inpaddr_address_reload_insns[rld[j].opnum];
6752 break;
6753 case RELOAD_FOR_OUTPUT_ADDRESS:
6754 where = &output_address_reload_insns[rld[j].opnum];
6755 break;
6756 case RELOAD_FOR_OUTADDR_ADDRESS:
6757 where = &outaddr_address_reload_insns[rld[j].opnum];
6758 break;
6759 case RELOAD_FOR_OPERAND_ADDRESS:
6760 where = &operand_reload_insns;
6761 break;
6762 case RELOAD_FOR_OPADDR_ADDR:
6763 where = &other_operand_reload_insns;
6764 break;
6765 case RELOAD_FOR_OTHER_ADDRESS:
6766 where = &other_input_address_reload_insns;
6767 break;
6768 default:
6769 abort ();
6772 push_to_sequence (*where);
6773 special = 0;
6775 /* Auto-increment addresses must be reloaded in a special way. */
6776 if (rld[j].out && ! rld[j].out_reg)
6778 /* We are not going to bother supporting the case where a
6779 incremented register can't be copied directly from
6780 OLDEQUIV since this seems highly unlikely. */
6781 if (rld[j].secondary_in_reload >= 0)
6782 abort ();
6784 if (reload_inherited[j])
6785 oldequiv = reloadreg;
6787 old = XEXP (rld[j].in_reg, 0);
6789 if (optimize && GET_CODE (oldequiv) == REG
6790 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6791 && spill_reg_store[REGNO (oldequiv)]
6792 && GET_CODE (old) == REG
6793 && (dead_or_set_p (insn,
6794 spill_reg_stored_to[REGNO (oldequiv)])
6795 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6796 old)))
6797 delete_output_reload (insn, j, REGNO (oldequiv));
6799 /* Prevent normal processing of this reload. */
6800 special = 1;
6801 /* Output a special code sequence for this case. */
6802 new_spill_reg_store[REGNO (reloadreg)]
6803 = inc_for_reload (reloadreg, oldequiv, rld[j].out,
6804 rld[j].inc);
6807 /* If we are reloading a pseudo-register that was set by the previous
6808 insn, see if we can get rid of that pseudo-register entirely
6809 by redirecting the previous insn into our reload register. */
6811 else if (optimize && GET_CODE (old) == REG
6812 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6813 && dead_or_set_p (insn, old)
6814 /* This is unsafe if some other reload
6815 uses the same reg first. */
6816 && reload_reg_free_for_value_p (REGNO (reloadreg),
6817 rld[j].opnum,
6818 rld[j].when_needed,
6819 old, rld[j].out,
6820 j, 0))
6822 rtx temp = PREV_INSN (insn);
6823 while (temp && GET_CODE (temp) == NOTE)
6824 temp = PREV_INSN (temp);
6825 if (temp
6826 && GET_CODE (temp) == INSN
6827 && GET_CODE (PATTERN (temp)) == SET
6828 && SET_DEST (PATTERN (temp)) == old
6829 /* Make sure we can access insn_operand_constraint. */
6830 && asm_noperands (PATTERN (temp)) < 0
6831 /* This is unsafe if prev insn rejects our reload reg. */
6832 && constraint_accepts_reg_p (insn_data[recog_memoized (temp)].operand[0].constraint,
6833 reloadreg)
6834 /* This is unsafe if operand occurs more than once in current
6835 insn. Perhaps some occurrences aren't reloaded. */
6836 && count_occurrences (PATTERN (insn), old) == 1
6837 /* Don't risk splitting a matching pair of operands. */
6838 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
6840 /* Store into the reload register instead of the pseudo. */
6841 SET_DEST (PATTERN (temp)) = reloadreg;
6843 /* If the previous insn is an output reload, the source is
6844 a reload register, and its spill_reg_store entry will
6845 contain the previous destination. This is now
6846 invalid. */
6847 if (GET_CODE (SET_SRC (PATTERN (temp))) == REG
6848 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
6850 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6851 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6854 /* If these are the only uses of the pseudo reg,
6855 pretend for GDB it lives in the reload reg we used. */
6856 if (REG_N_DEATHS (REGNO (old)) == 1
6857 && REG_N_SETS (REGNO (old)) == 1)
6859 reg_renumber[REGNO (old)] = REGNO (rld[j].reg_rtx);
6860 alter_reg (REGNO (old), -1);
6862 special = 1;
6866 /* We can't do that, so output an insn to load RELOADREG. */
6868 if (! special)
6870 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6871 rtx second_reload_reg = 0;
6872 enum insn_code icode;
6874 /* If we have a secondary reload, pick up the secondary register
6875 and icode, if any. If OLDEQUIV and OLD are different or
6876 if this is an in-out reload, recompute whether or not we
6877 still need a secondary register and what the icode should
6878 be. If we still need a secondary register and the class or
6879 icode is different, go back to reloading from OLD if using
6880 OLDEQUIV means that we got the wrong type of register. We
6881 cannot have different class or icode due to an in-out reload
6882 because we don't make such reloads when both the input and
6883 output need secondary reload registers. */
6885 if (rld[j].secondary_in_reload >= 0)
6887 int secondary_reload = rld[j].secondary_in_reload;
6888 rtx real_oldequiv = oldequiv;
6889 rtx real_old = old;
6890 rtx tmp;
6892 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6893 and similarly for OLD.
6894 See comments in get_secondary_reload in reload.c. */
6895 /* If it is a pseudo that cannot be replaced with its
6896 equivalent MEM, we must fall back to reload_in, which
6897 will have all the necessary substitutions registered.
6898 Likewise for a pseudo that can't be replaced with its
6899 equivalent constant.
6901 Take extra care for subregs of such pseudos. Note that
6902 we cannot use reg_equiv_mem in this case because it is
6903 not in the right mode. */
6905 tmp = oldequiv;
6906 if (GET_CODE (tmp) == SUBREG)
6907 tmp = SUBREG_REG (tmp);
6908 if (GET_CODE (tmp) == REG
6909 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6910 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6911 || reg_equiv_constant[REGNO (tmp)] != 0))
6913 if (! reg_equiv_mem[REGNO (tmp)]
6914 || num_not_at_initial_offset
6915 || GET_CODE (oldequiv) == SUBREG)
6916 real_oldequiv = rld[j].in;
6917 else
6918 real_oldequiv = reg_equiv_mem[REGNO (tmp)];
6921 tmp = old;
6922 if (GET_CODE (tmp) == SUBREG)
6923 tmp = SUBREG_REG (tmp);
6924 if (GET_CODE (tmp) == REG
6925 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6926 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6927 || reg_equiv_constant[REGNO (tmp)] != 0))
6929 if (! reg_equiv_mem[REGNO (tmp)]
6930 || num_not_at_initial_offset
6931 || GET_CODE (old) == SUBREG)
6932 real_old = rld[j].in;
6933 else
6934 real_old = reg_equiv_mem[REGNO (tmp)];
6937 second_reload_reg = rld[secondary_reload].reg_rtx;
6938 icode = rld[j].secondary_in_icode;
6940 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6941 || (rld[j].in != 0 && rld[j].out != 0))
6943 enum reg_class new_class
6944 = SECONDARY_INPUT_RELOAD_CLASS (rld[j].class,
6945 mode, real_oldequiv);
6947 if (new_class == NO_REGS)
6948 second_reload_reg = 0;
6949 else
6951 enum insn_code new_icode;
6952 enum machine_mode new_mode;
6954 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6955 REGNO (second_reload_reg)))
6956 oldequiv = old, real_oldequiv = real_old;
6957 else
6959 new_icode = reload_in_optab[(int) mode];
6960 if (new_icode != CODE_FOR_nothing
6961 && ((insn_data[(int) new_icode].operand[0].predicate
6962 && ! ((*insn_data[(int) new_icode].operand[0].predicate)
6963 (reloadreg, mode)))
6964 || (insn_data[(int) new_icode].operand[1].predicate
6965 && ! ((*insn_data[(int) new_icode].operand[1].predicate)
6966 (real_oldequiv, mode)))))
6967 new_icode = CODE_FOR_nothing;
6969 if (new_icode == CODE_FOR_nothing)
6970 new_mode = mode;
6971 else
6972 new_mode = insn_data[(int) new_icode].operand[2].mode;
6974 if (GET_MODE (second_reload_reg) != new_mode)
6976 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6977 new_mode))
6978 oldequiv = old, real_oldequiv = real_old;
6979 else
6980 second_reload_reg
6981 = gen_rtx_REG (new_mode,
6982 REGNO (second_reload_reg));
6988 /* If we still need a secondary reload register, check
6989 to see if it is being used as a scratch or intermediate
6990 register and generate code appropriately. If we need
6991 a scratch register, use REAL_OLDEQUIV since the form of
6992 the insn may depend on the actual address if it is
6993 a MEM. */
6995 if (second_reload_reg)
6997 if (icode != CODE_FOR_nothing)
6999 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7000 second_reload_reg));
7001 special = 1;
7003 else
7005 /* See if we need a scratch register to load the
7006 intermediate register (a tertiary reload). */
7007 enum insn_code tertiary_icode
7008 = rld[secondary_reload].secondary_in_icode;
7010 if (tertiary_icode != CODE_FOR_nothing)
7012 rtx third_reload_reg
7013 = rld[rld[secondary_reload].secondary_in_reload].reg_rtx;
7015 emit_insn ((GEN_FCN (tertiary_icode)
7016 (second_reload_reg, real_oldequiv,
7017 third_reload_reg)));
7019 else
7020 gen_reload (second_reload_reg, real_oldequiv,
7021 rld[j].opnum,
7022 rld[j].when_needed);
7024 oldequiv = second_reload_reg;
7028 #endif
7030 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7032 rtx real_oldequiv = oldequiv;
7034 if ((GET_CODE (oldequiv) == REG
7035 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7036 && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
7037 || reg_equiv_constant[REGNO (oldequiv)] != 0))
7038 || (GET_CODE (oldequiv) == SUBREG
7039 && GET_CODE (SUBREG_REG (oldequiv)) == REG
7040 && (REGNO (SUBREG_REG (oldequiv))
7041 >= FIRST_PSEUDO_REGISTER)
7042 && ((reg_equiv_memory_loc
7043 [REGNO (SUBREG_REG (oldequiv))] != 0)
7044 || (reg_equiv_constant
7045 [REGNO (SUBREG_REG (oldequiv))] != 0))))
7046 real_oldequiv = rld[j].in;
7047 gen_reload (reloadreg, real_oldequiv, rld[j].opnum,
7048 rld[j].when_needed);
7053 this_reload_insn = get_last_insn ();
7054 /* End this sequence. */
7055 *where = get_insns ();
7056 end_sequence ();
7058 /* Update reload_override_in so that delete_address_reloads_1
7059 can see the actual register usage. */
7060 if (oldequiv_reg)
7061 reload_override_in[j] = oldequiv;
7064 /* When inheriting a wider reload, we have a MEM in rld[j].in,
7065 e.g. inheriting a SImode output reload for
7066 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
7067 if (optimize && reload_inherited[j] && rld[j].in
7068 && GET_CODE (rld[j].in) == MEM
7069 && GET_CODE (rld[j].in_reg) == MEM
7070 && reload_spill_index[j] >= 0
7071 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7073 expect_occurrences
7074 = count_occurrences (PATTERN (insn), rld[j].in) == 1 ? 0 : -1;
7075 rld[j].in
7076 = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7079 /* If we are reloading a register that was recently stored in with an
7080 output-reload, see if we can prove there was
7081 actually no need to store the old value in it. */
7083 if (optimize
7084 && (reload_inherited[j] || reload_override_in[j])
7085 && rld[j].reg_rtx
7086 && GET_CODE (rld[j].reg_rtx) == REG
7087 && spill_reg_store[REGNO (rld[j].reg_rtx)] != 0
7088 #if 0
7089 /* There doesn't seem to be any reason to restrict this to pseudos
7090 and doing so loses in the case where we are copying from a
7091 register of the wrong class. */
7092 && (REGNO (spill_reg_stored_to[REGNO (rld[j].reg_rtx)])
7093 >= FIRST_PSEUDO_REGISTER)
7094 #endif
7095 /* The insn might have already some references to stackslots
7096 replaced by MEMs, while reload_out_reg still names the
7097 original pseudo. */
7098 && (dead_or_set_p (insn,
7099 spill_reg_stored_to[REGNO (rld[j].reg_rtx)])
7100 || rtx_equal_p (spill_reg_stored_to[REGNO (rld[j].reg_rtx)],
7101 rld[j].out_reg)))
7102 delete_output_reload (insn, j, REGNO (rld[j].reg_rtx));
7104 /* Input-reloading is done. Now do output-reloading,
7105 storing the value from the reload-register after the main insn
7106 if rld[j].out is nonzero.
7108 ??? At some point we need to support handling output reloads of
7109 JUMP_INSNs or insns that set cc0. */
7111 /* If this is an output reload that stores something that is
7112 not loaded in this same reload, see if we can eliminate a previous
7113 store. */
7115 rtx pseudo = rld[j].out_reg;
7117 if (pseudo
7118 && GET_CODE (pseudo) == REG
7119 && ! rtx_equal_p (rld[j].in_reg, pseudo)
7120 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7121 && reg_last_reload_reg[REGNO (pseudo)])
7123 int pseudo_no = REGNO (pseudo);
7124 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
7126 /* We don't need to test full validity of last_regno for
7127 inherit here; we only want to know if the store actually
7128 matches the pseudo. */
7129 if (reg_reloaded_contents[last_regno] == pseudo_no
7130 && spill_reg_store[last_regno]
7131 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
7132 delete_output_reload (insn, j, last_regno);
7136 old = rld[j].out_reg;
7137 if (old != 0
7138 && rld[j].reg_rtx != old
7139 && rld[j].reg_rtx != 0)
7141 register rtx reloadreg = rld[j].reg_rtx;
7142 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
7143 register rtx second_reloadreg = 0;
7144 #endif
7145 rtx note, p;
7146 enum machine_mode mode;
7147 int special = 0;
7149 /* An output operand that dies right away does need a reload,
7150 but need not be copied from it. Show the new location in the
7151 REG_UNUSED note. */
7152 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
7153 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7155 XEXP (note, 0) = rld[j].reg_rtx;
7156 continue;
7158 /* Likewise for a SUBREG of an operand that dies. */
7159 else if (GET_CODE (old) == SUBREG
7160 && GET_CODE (SUBREG_REG (old)) == REG
7161 && 0 != (note = find_reg_note (insn, REG_UNUSED,
7162 SUBREG_REG (old))))
7164 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
7165 rld[j].reg_rtx);
7166 continue;
7168 else if (GET_CODE (old) == SCRATCH)
7169 /* If we aren't optimizing, there won't be a REG_UNUSED note,
7170 but we don't want to make an output reload. */
7171 continue;
7173 #if 0
7174 /* Strip off of OLD any size-increasing SUBREGs such as
7175 (SUBREG:SI foo:QI 0). */
7177 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
7178 && (GET_MODE_SIZE (GET_MODE (old))
7179 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
7180 old = SUBREG_REG (old);
7181 #endif
7183 /* If is a JUMP_INSN, we can't support output reloads yet. */
7184 if (GET_CODE (insn) == JUMP_INSN)
7185 abort ();
7187 if (rld[j].when_needed == RELOAD_OTHER)
7188 start_sequence ();
7189 else
7190 push_to_sequence (output_reload_insns[rld[j].opnum]);
7192 old = rld[j].out;
7194 /* Determine the mode to reload in.
7195 See comments above (for input reloading). */
7197 mode = GET_MODE (old);
7198 if (mode == VOIDmode)
7200 /* VOIDmode should never happen for an output. */
7201 if (asm_noperands (PATTERN (insn)) < 0)
7202 /* It's the compiler's fault. */
7203 fatal_insn ("VOIDmode on an output", insn);
7204 error_for_asm (insn, "output operand is constant in `asm'");
7205 /* Prevent crash--use something we know is valid. */
7206 mode = word_mode;
7207 old = gen_rtx_REG (mode, REGNO (reloadreg));
7210 if (GET_MODE (reloadreg) != mode)
7211 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
7213 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
7215 /* If we need two reload regs, set RELOADREG to the intermediate
7216 one, since it will be stored into OLD. We might need a secondary
7217 register only for an input reload, so check again here. */
7219 if (rld[j].secondary_out_reload >= 0)
7221 rtx real_old = old;
7223 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
7224 && reg_equiv_mem[REGNO (old)] != 0)
7225 real_old = reg_equiv_mem[REGNO (old)];
7227 if((SECONDARY_OUTPUT_RELOAD_CLASS (rld[j].class,
7228 mode, real_old)
7229 != NO_REGS))
7231 second_reloadreg = reloadreg;
7232 reloadreg = rld[rld[j].secondary_out_reload].reg_rtx;
7234 /* See if RELOADREG is to be used as a scratch register
7235 or as an intermediate register. */
7236 if (rld[j].secondary_out_icode != CODE_FOR_nothing)
7238 emit_insn ((GEN_FCN (rld[j].secondary_out_icode)
7239 (real_old, second_reloadreg, reloadreg)));
7240 special = 1;
7242 else
7244 /* See if we need both a scratch and intermediate reload
7245 register. */
7247 int secondary_reload = rld[j].secondary_out_reload;
7248 enum insn_code tertiary_icode
7249 = rld[secondary_reload].secondary_out_icode;
7251 if (GET_MODE (reloadreg) != mode)
7252 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
7254 if (tertiary_icode != CODE_FOR_nothing)
7256 rtx third_reloadreg
7257 = rld[rld[secondary_reload].secondary_out_reload].reg_rtx;
7258 rtx tem;
7260 /* Copy primary reload reg to secondary reload reg.
7261 (Note that these have been swapped above, then
7262 secondary reload reg to OLD using our insn. */
7264 /* If REAL_OLD is a paradoxical SUBREG, remove it
7265 and try to put the opposite SUBREG on
7266 RELOADREG. */
7267 if (GET_CODE (real_old) == SUBREG
7268 && (GET_MODE_SIZE (GET_MODE (real_old))
7269 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
7270 && 0 != (tem = gen_lowpart_common
7271 (GET_MODE (SUBREG_REG (real_old)),
7272 reloadreg)))
7273 real_old = SUBREG_REG (real_old), reloadreg = tem;
7275 gen_reload (reloadreg, second_reloadreg,
7276 rld[j].opnum, rld[j].when_needed);
7277 emit_insn ((GEN_FCN (tertiary_icode)
7278 (real_old, reloadreg, third_reloadreg)));
7279 special = 1;
7282 else
7283 /* Copy between the reload regs here and then to
7284 OUT later. */
7286 gen_reload (reloadreg, second_reloadreg,
7287 rld[j].opnum, rld[j].when_needed);
7291 #endif
7293 /* Output the last reload insn. */
7294 if (! special)
7296 rtx set;
7298 /* Don't output the last reload if OLD is not the dest of
7299 INSN and is in the src and is clobbered by INSN. */
7300 if (! flag_expensive_optimizations
7301 || GET_CODE (old) != REG
7302 || !(set = single_set (insn))
7303 || rtx_equal_p (old, SET_DEST (set))
7304 || !reg_mentioned_p (old, SET_SRC (set))
7305 || !regno_clobbered_p (REGNO (old), insn))
7306 gen_reload (old, reloadreg, rld[j].opnum,
7307 rld[j].when_needed);
7310 /* Look at all insns we emitted, just to be safe. */
7311 for (p = get_insns (); p; p = NEXT_INSN (p))
7312 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
7314 rtx pat = PATTERN (p);
7316 /* If this output reload doesn't come from a spill reg,
7317 clear any memory of reloaded copies of the pseudo reg.
7318 If this output reload comes from a spill reg,
7319 reg_has_output_reload will make this do nothing. */
7320 note_stores (pat, forget_old_reloads_1, NULL);
7322 if (reg_mentioned_p (rld[j].reg_rtx, pat))
7324 rtx set = single_set (insn);
7325 if (reload_spill_index[j] < 0
7326 && set
7327 && SET_SRC (set) == rld[j].reg_rtx)
7329 int src = REGNO (SET_SRC (set));
7331 reload_spill_index[j] = src;
7332 SET_HARD_REG_BIT (reg_is_output_reload, src);
7333 if (find_regno_note (insn, REG_DEAD, src))
7334 SET_HARD_REG_BIT (reg_reloaded_died, src);
7336 if (REGNO (rld[j].reg_rtx) < FIRST_PSEUDO_REGISTER)
7338 int s = rld[j].secondary_out_reload;
7339 set = single_set (p);
7340 /* If this reload copies only to the secondary reload
7341 register, the secondary reload does the actual
7342 store. */
7343 if (s >= 0 && set == NULL_RTX)
7344 ; /* We can't tell what function the secondary reload
7345 has and where the actual store to the pseudo is
7346 made; leave new_spill_reg_store alone. */
7347 else if (s >= 0
7348 && SET_SRC (set) == rld[j].reg_rtx
7349 && SET_DEST (set) == rld[s].reg_rtx)
7351 /* Usually the next instruction will be the
7352 secondary reload insn; if we can confirm
7353 that it is, setting new_spill_reg_store to
7354 that insn will allow an extra optimization. */
7355 rtx s_reg = rld[s].reg_rtx;
7356 rtx next = NEXT_INSN (p);
7357 rld[s].out = rld[j].out;
7358 rld[s].out_reg = rld[j].out_reg;
7359 set = single_set (next);
7360 if (set && SET_SRC (set) == s_reg
7361 && ! new_spill_reg_store[REGNO (s_reg)])
7363 SET_HARD_REG_BIT (reg_is_output_reload,
7364 REGNO (s_reg));
7365 new_spill_reg_store[REGNO (s_reg)] = next;
7368 else
7369 new_spill_reg_store[REGNO (rld[j].reg_rtx)] = p;
7374 if (rld[j].when_needed == RELOAD_OTHER)
7376 emit_insns (other_output_reload_insns[rld[j].opnum]);
7377 other_output_reload_insns[rld[j].opnum] = get_insns ();
7379 else
7380 output_reload_insns[rld[j].opnum] = get_insns ();
7382 end_sequence ();
7386 /* Now write all the insns we made for reloads in the order expected by
7387 the allocation functions. Prior to the insn being reloaded, we write
7388 the following reloads:
7390 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7392 RELOAD_OTHER reloads.
7394 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7395 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7396 RELOAD_FOR_INPUT reload for the operand.
7398 RELOAD_FOR_OPADDR_ADDRS reloads.
7400 RELOAD_FOR_OPERAND_ADDRESS reloads.
7402 After the insn being reloaded, we write the following:
7404 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7405 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7406 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7407 reloads for the operand. The RELOAD_OTHER output reloads are
7408 output in descending order by reload number. */
7410 emit_insns_before (other_input_address_reload_insns, insn);
7411 emit_insns_before (other_input_reload_insns, insn);
7413 for (j = 0; j < reload_n_operands; j++)
7415 emit_insns_before (inpaddr_address_reload_insns[j], insn);
7416 emit_insns_before (input_address_reload_insns[j], insn);
7417 emit_insns_before (input_reload_insns[j], insn);
7420 emit_insns_before (other_operand_reload_insns, insn);
7421 emit_insns_before (operand_reload_insns, insn);
7423 for (j = 0; j < reload_n_operands; j++)
7425 emit_insns_before (outaddr_address_reload_insns[j], following_insn);
7426 emit_insns_before (output_address_reload_insns[j], following_insn);
7427 emit_insns_before (output_reload_insns[j], following_insn);
7428 emit_insns_before (other_output_reload_insns[j], following_insn);
7431 /* Keep basic block info up to date. */
7432 if (n_basic_blocks)
7434 if (BLOCK_HEAD (chain->block) == insn)
7435 BLOCK_HEAD (chain->block) = NEXT_INSN (before_insn);
7436 if (BLOCK_END (chain->block) == insn)
7437 BLOCK_END (chain->block) = PREV_INSN (following_insn);
7440 /* For all the spill regs newly reloaded in this instruction,
7441 record what they were reloaded from, so subsequent instructions
7442 can inherit the reloads.
7444 Update spill_reg_store for the reloads of this insn.
7445 Copy the elements that were updated in the loop above. */
7447 for (j = 0; j < n_reloads; j++)
7449 register int r = reload_order[j];
7450 register int i = reload_spill_index[r];
7452 /* If this is a non-inherited input reload from a pseudo, we must
7453 clear any memory of a previous store to the same pseudo. Only do
7454 something if there will not be an output reload for the pseudo
7455 being reloaded. */
7456 if (rld[r].in_reg != 0
7457 && ! (reload_inherited[r] || reload_override_in[r]))
7459 rtx reg = rld[r].in_reg;
7461 if (GET_CODE (reg) == SUBREG)
7462 reg = SUBREG_REG (reg);
7464 if (GET_CODE (reg) == REG
7465 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
7466 && ! reg_has_output_reload[REGNO (reg)])
7468 int nregno = REGNO (reg);
7470 if (reg_last_reload_reg[nregno])
7472 int last_regno = REGNO (reg_last_reload_reg[nregno]);
7474 if (reg_reloaded_contents[last_regno] == nregno)
7475 spill_reg_store[last_regno] = 0;
7480 /* I is nonneg if this reload used a register.
7481 If rld[r].reg_rtx is 0, this is an optional reload
7482 that we opted to ignore. */
7484 if (i >= 0 && rld[r].reg_rtx != 0)
7486 int nr
7487 = HARD_REGNO_NREGS (i, GET_MODE (rld[r].reg_rtx));
7488 int k;
7489 int part_reaches_end = 0;
7490 int all_reaches_end = 1;
7492 /* For a multi register reload, we need to check if all or part
7493 of the value lives to the end. */
7494 for (k = 0; k < nr; k++)
7496 if (reload_reg_reaches_end_p (i + k, rld[r].opnum,
7497 rld[r].when_needed))
7498 part_reaches_end = 1;
7499 else
7500 all_reaches_end = 0;
7503 /* Ignore reloads that don't reach the end of the insn in
7504 entirety. */
7505 if (all_reaches_end)
7507 /* First, clear out memory of what used to be in this spill reg.
7508 If consecutive registers are used, clear them all. */
7510 for (k = 0; k < nr; k++)
7511 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7513 /* Maybe the spill reg contains a copy of reload_out. */
7514 if (rld[r].out != 0
7515 && (GET_CODE (rld[r].out) == REG
7516 #ifdef AUTO_INC_DEC
7517 || ! rld[r].out_reg
7518 #endif
7519 || GET_CODE (rld[r].out_reg) == REG))
7521 rtx out = (GET_CODE (rld[r].out) == REG
7522 ? rld[r].out
7523 : rld[r].out_reg
7524 ? rld[r].out_reg
7525 /* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
7526 register int nregno = REGNO (out);
7527 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7528 : HARD_REGNO_NREGS (nregno,
7529 GET_MODE (rld[r].reg_rtx)));
7531 spill_reg_store[i] = new_spill_reg_store[i];
7532 spill_reg_stored_to[i] = out;
7533 reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7535 /* If NREGNO is a hard register, it may occupy more than
7536 one register. If it does, say what is in the
7537 rest of the registers assuming that both registers
7538 agree on how many words the object takes. If not,
7539 invalidate the subsequent registers. */
7541 if (nregno < FIRST_PSEUDO_REGISTER)
7542 for (k = 1; k < nnr; k++)
7543 reg_last_reload_reg[nregno + k]
7544 = (nr == nnr
7545 ? gen_rtx_REG (reg_raw_mode[REGNO (rld[r].reg_rtx) + k],
7546 REGNO (rld[r].reg_rtx) + k)
7547 : 0);
7549 /* Now do the inverse operation. */
7550 for (k = 0; k < nr; k++)
7552 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7553 reg_reloaded_contents[i + k]
7554 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7555 ? nregno
7556 : nregno + k);
7557 reg_reloaded_insn[i + k] = insn;
7558 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7562 /* Maybe the spill reg contains a copy of reload_in. Only do
7563 something if there will not be an output reload for
7564 the register being reloaded. */
7565 else if (rld[r].out_reg == 0
7566 && rld[r].in != 0
7567 && ((GET_CODE (rld[r].in) == REG
7568 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER
7569 && ! reg_has_output_reload[REGNO (rld[r].in)])
7570 || (GET_CODE (rld[r].in_reg) == REG
7571 && ! reg_has_output_reload[REGNO (rld[r].in_reg)]))
7572 && ! reg_set_p (rld[r].reg_rtx, PATTERN (insn)))
7574 register int nregno;
7575 int nnr;
7577 if (GET_CODE (rld[r].in) == REG
7578 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
7579 nregno = REGNO (rld[r].in);
7580 else if (GET_CODE (rld[r].in_reg) == REG)
7581 nregno = REGNO (rld[r].in_reg);
7582 else
7583 nregno = REGNO (XEXP (rld[r].in_reg, 0));
7585 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7586 : HARD_REGNO_NREGS (nregno,
7587 GET_MODE (rld[r].reg_rtx)));
7589 reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7591 if (nregno < FIRST_PSEUDO_REGISTER)
7592 for (k = 1; k < nnr; k++)
7593 reg_last_reload_reg[nregno + k]
7594 = (nr == nnr
7595 ? gen_rtx_REG (reg_raw_mode[REGNO (rld[r].reg_rtx) + k],
7596 REGNO (rld[r].reg_rtx) + k)
7597 : 0);
7599 /* Unless we inherited this reload, show we haven't
7600 recently done a store.
7601 Previous stores of inherited auto_inc expressions
7602 also have to be discarded. */
7603 if (! reload_inherited[r]
7604 || (rld[r].out && ! rld[r].out_reg))
7605 spill_reg_store[i] = 0;
7607 for (k = 0; k < nr; k++)
7609 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7610 reg_reloaded_contents[i + k]
7611 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7612 ? nregno
7613 : nregno + k);
7614 reg_reloaded_insn[i + k] = insn;
7615 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7620 /* However, if part of the reload reaches the end, then we must
7621 invalidate the old info for the part that survives to the end. */
7622 else if (part_reaches_end)
7624 for (k = 0; k < nr; k++)
7625 if (reload_reg_reaches_end_p (i + k,
7626 rld[r].opnum,
7627 rld[r].when_needed))
7628 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7632 /* The following if-statement was #if 0'd in 1.34 (or before...).
7633 It's reenabled in 1.35 because supposedly nothing else
7634 deals with this problem. */
7636 /* If a register gets output-reloaded from a non-spill register,
7637 that invalidates any previous reloaded copy of it.
7638 But forget_old_reloads_1 won't get to see it, because
7639 it thinks only about the original insn. So invalidate it here. */
7640 if (i < 0 && rld[r].out != 0
7641 && (GET_CODE (rld[r].out) == REG
7642 || (GET_CODE (rld[r].out) == MEM
7643 && GET_CODE (rld[r].out_reg) == REG)))
7645 rtx out = (GET_CODE (rld[r].out) == REG
7646 ? rld[r].out : rld[r].out_reg);
7647 register int nregno = REGNO (out);
7648 if (nregno >= FIRST_PSEUDO_REGISTER)
7650 rtx src_reg, store_insn = NULL_RTX;
7652 reg_last_reload_reg[nregno] = 0;
7654 /* If we can find a hard register that is stored, record
7655 the storing insn so that we may delete this insn with
7656 delete_output_reload. */
7657 src_reg = rld[r].reg_rtx;
7659 /* If this is an optional reload, try to find the source reg
7660 from an input reload. */
7661 if (! src_reg)
7663 rtx set = single_set (insn);
7664 if (set && SET_DEST (set) == rld[r].out)
7666 int k;
7668 src_reg = SET_SRC (set);
7669 store_insn = insn;
7670 for (k = 0; k < n_reloads; k++)
7672 if (rld[k].in == src_reg)
7674 src_reg = rld[k].reg_rtx;
7675 break;
7680 else
7681 store_insn = new_spill_reg_store[REGNO (src_reg)];
7682 if (src_reg && GET_CODE (src_reg) == REG
7683 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
7685 int src_regno = REGNO (src_reg);
7686 int nr = HARD_REGNO_NREGS (src_regno, rld[r].mode);
7687 /* The place where to find a death note varies with
7688 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
7689 necessarily checked exactly in the code that moves
7690 notes, so just check both locations. */
7691 rtx note = find_regno_note (insn, REG_DEAD, src_regno);
7692 if (! note)
7693 note = find_regno_note (store_insn, REG_DEAD, src_regno);
7694 while (nr-- > 0)
7696 spill_reg_store[src_regno + nr] = store_insn;
7697 spill_reg_stored_to[src_regno + nr] = out;
7698 reg_reloaded_contents[src_regno + nr] = nregno;
7699 reg_reloaded_insn[src_regno + nr] = store_insn;
7700 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + nr);
7701 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + nr);
7702 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + nr);
7703 if (note)
7704 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
7705 else
7706 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
7708 reg_last_reload_reg[nregno] = src_reg;
7711 else
7713 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (rld[r].out));
7715 while (num_regs-- > 0)
7716 reg_last_reload_reg[nregno + num_regs] = 0;
7720 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
7723 /* Emit code to perform a reload from IN (which may be a reload register) to
7724 OUT (which may also be a reload register). IN or OUT is from operand
7725 OPNUM with reload type TYPE.
7727 Returns first insn emitted. */
7730 gen_reload (out, in, opnum, type)
7731 rtx out;
7732 rtx in;
7733 int opnum;
7734 enum reload_type type;
7736 rtx last = get_last_insn ();
7737 rtx tem;
7739 /* If IN is a paradoxical SUBREG, remove it and try to put the
7740 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7741 if (GET_CODE (in) == SUBREG
7742 && (GET_MODE_SIZE (GET_MODE (in))
7743 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7744 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7745 in = SUBREG_REG (in), out = tem;
7746 else if (GET_CODE (out) == SUBREG
7747 && (GET_MODE_SIZE (GET_MODE (out))
7748 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7749 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7750 out = SUBREG_REG (out), in = tem;
7752 /* How to do this reload can get quite tricky. Normally, we are being
7753 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7754 register that didn't get a hard register. In that case we can just
7755 call emit_move_insn.
7757 We can also be asked to reload a PLUS that adds a register or a MEM to
7758 another register, constant or MEM. This can occur during frame pointer
7759 elimination and while reloading addresses. This case is handled by
7760 trying to emit a single insn to perform the add. If it is not valid,
7761 we use a two insn sequence.
7763 Finally, we could be called to handle an 'o' constraint by putting
7764 an address into a register. In that case, we first try to do this
7765 with a named pattern of "reload_load_address". If no such pattern
7766 exists, we just emit a SET insn and hope for the best (it will normally
7767 be valid on machines that use 'o').
7769 This entire process is made complex because reload will never
7770 process the insns we generate here and so we must ensure that
7771 they will fit their constraints and also by the fact that parts of
7772 IN might be being reloaded separately and replaced with spill registers.
7773 Because of this, we are, in some sense, just guessing the right approach
7774 here. The one listed above seems to work.
7776 ??? At some point, this whole thing needs to be rethought. */
7778 if (GET_CODE (in) == PLUS
7779 && (GET_CODE (XEXP (in, 0)) == REG
7780 || GET_CODE (XEXP (in, 0)) == SUBREG
7781 || GET_CODE (XEXP (in, 0)) == MEM)
7782 && (GET_CODE (XEXP (in, 1)) == REG
7783 || GET_CODE (XEXP (in, 1)) == SUBREG
7784 || CONSTANT_P (XEXP (in, 1))
7785 || GET_CODE (XEXP (in, 1)) == MEM))
7787 /* We need to compute the sum of a register or a MEM and another
7788 register, constant, or MEM, and put it into the reload
7789 register. The best possible way of doing this is if the machine
7790 has a three-operand ADD insn that accepts the required operands.
7792 The simplest approach is to try to generate such an insn and see if it
7793 is recognized and matches its constraints. If so, it can be used.
7795 It might be better not to actually emit the insn unless it is valid,
7796 but we need to pass the insn as an operand to `recog' and
7797 `extract_insn' and it is simpler to emit and then delete the insn if
7798 not valid than to dummy things up. */
7800 rtx op0, op1, tem, insn;
7801 int code;
7803 op0 = find_replacement (&XEXP (in, 0));
7804 op1 = find_replacement (&XEXP (in, 1));
7806 /* Since constraint checking is strict, commutativity won't be
7807 checked, so we need to do that here to avoid spurious failure
7808 if the add instruction is two-address and the second operand
7809 of the add is the same as the reload reg, which is frequently
7810 the case. If the insn would be A = B + A, rearrange it so
7811 it will be A = A + B as constrain_operands expects. */
7813 if (GET_CODE (XEXP (in, 1)) == REG
7814 && REGNO (out) == REGNO (XEXP (in, 1)))
7815 tem = op0, op0 = op1, op1 = tem;
7817 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7818 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
7820 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
7821 code = recog_memoized (insn);
7823 if (code >= 0)
7825 extract_insn (insn);
7826 /* We want constrain operands to treat this insn strictly in
7827 its validity determination, i.e., the way it would after reload
7828 has completed. */
7829 if (constrain_operands (1))
7830 return insn;
7833 delete_insns_since (last);
7835 /* If that failed, we must use a conservative two-insn sequence.
7837 Use a move to copy one operand into the reload register. Prefer
7838 to reload a constant, MEM or pseudo since the move patterns can
7839 handle an arbitrary operand. If OP1 is not a constant, MEM or
7840 pseudo and OP1 is not a valid operand for an add instruction, then
7841 reload OP1.
7843 After reloading one of the operands into the reload register, add
7844 the reload register to the output register.
7846 If there is another way to do this for a specific machine, a
7847 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7848 we emit below. */
7850 code = (int) add_optab->handlers[(int) GET_MODE (out)].insn_code;
7852 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
7853 || (GET_CODE (op1) == REG
7854 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
7855 || (code != CODE_FOR_nothing
7856 && ! ((*insn_data[code].operand[2].predicate)
7857 (op1, insn_data[code].operand[2].mode))))
7858 tem = op0, op0 = op1, op1 = tem;
7860 gen_reload (out, op0, opnum, type);
7862 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7863 This fixes a problem on the 32K where the stack pointer cannot
7864 be used as an operand of an add insn. */
7866 if (rtx_equal_p (op0, op1))
7867 op1 = out;
7869 insn = emit_insn (gen_add2_insn (out, op1));
7871 /* If that failed, copy the address register to the reload register.
7872 Then add the constant to the reload register. */
7874 code = recog_memoized (insn);
7876 if (code >= 0)
7878 extract_insn (insn);
7879 /* We want constrain operands to treat this insn strictly in
7880 its validity determination, i.e., the way it would after reload
7881 has completed. */
7882 if (constrain_operands (1))
7884 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
7885 REG_NOTES (insn)
7886 = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7887 return insn;
7891 delete_insns_since (last);
7893 gen_reload (out, op1, opnum, type);
7894 insn = emit_insn (gen_add2_insn (out, op0));
7895 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7898 #ifdef SECONDARY_MEMORY_NEEDED
7899 /* If we need a memory location to do the move, do it that way. */
7900 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
7901 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
7902 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
7903 REGNO_REG_CLASS (REGNO (out)),
7904 GET_MODE (out)))
7906 /* Get the memory to use and rewrite both registers to its mode. */
7907 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7909 if (GET_MODE (loc) != GET_MODE (out))
7910 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
7912 if (GET_MODE (loc) != GET_MODE (in))
7913 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
7915 gen_reload (loc, in, opnum, type);
7916 gen_reload (out, loc, opnum, type);
7918 #endif
7920 /* If IN is a simple operand, use gen_move_insn. */
7921 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
7922 emit_insn (gen_move_insn (out, in));
7924 #ifdef HAVE_reload_load_address
7925 else if (HAVE_reload_load_address)
7926 emit_insn (gen_reload_load_address (out, in));
7927 #endif
7929 /* Otherwise, just write (set OUT IN) and hope for the best. */
7930 else
7931 emit_insn (gen_rtx_SET (VOIDmode, out, in));
7933 /* Return the first insn emitted.
7934 We can not just return get_last_insn, because there may have
7935 been multiple instructions emitted. Also note that gen_move_insn may
7936 emit more than one insn itself, so we can not assume that there is one
7937 insn emitted per emit_insn_before call. */
7939 return last ? NEXT_INSN (last) : get_insns ();
7942 /* Delete a previously made output-reload
7943 whose result we now believe is not needed.
7944 First we double-check.
7946 INSN is the insn now being processed.
7947 LAST_RELOAD_REG is the hard register number for which we want to delete
7948 the last output reload.
7949 J is the reload-number that originally used REG. The caller has made
7950 certain that reload J doesn't use REG any longer for input. */
7952 static void
7953 delete_output_reload (insn, j, last_reload_reg)
7954 rtx insn;
7955 int j;
7956 int last_reload_reg;
7958 rtx output_reload_insn = spill_reg_store[last_reload_reg];
7959 rtx reg = spill_reg_stored_to[last_reload_reg];
7960 int k;
7961 int n_occurrences;
7962 int n_inherited = 0;
7963 register rtx i1;
7964 rtx substed;
7966 /* Get the raw pseudo-register referred to. */
7968 while (GET_CODE (reg) == SUBREG)
7969 reg = SUBREG_REG (reg);
7970 substed = reg_equiv_memory_loc[REGNO (reg)];
7972 /* This is unsafe if the operand occurs more often in the current
7973 insn than it is inherited. */
7974 for (k = n_reloads - 1; k >= 0; k--)
7976 rtx reg2 = rld[k].in;
7977 if (! reg2)
7978 continue;
7979 if (GET_CODE (reg2) == MEM || reload_override_in[k])
7980 reg2 = rld[k].in_reg;
7981 #ifdef AUTO_INC_DEC
7982 if (rld[k].out && ! rld[k].out_reg)
7983 reg2 = XEXP (rld[k].in_reg, 0);
7984 #endif
7985 while (GET_CODE (reg2) == SUBREG)
7986 reg2 = SUBREG_REG (reg2);
7987 if (rtx_equal_p (reg2, reg))
7989 if (reload_inherited[k] || reload_override_in[k] || k == j)
7991 n_inherited++;
7992 reg2 = rld[k].out_reg;
7993 if (! reg2)
7994 continue;
7995 while (GET_CODE (reg2) == SUBREG)
7996 reg2 = XEXP (reg2, 0);
7997 if (rtx_equal_p (reg2, reg))
7998 n_inherited++;
8000 else
8001 return;
8004 n_occurrences = count_occurrences (PATTERN (insn), reg);
8005 if (substed)
8006 n_occurrences += count_occurrences (PATTERN (insn), substed);
8007 if (n_occurrences > n_inherited)
8008 return;
8010 /* If the pseudo-reg we are reloading is no longer referenced
8011 anywhere between the store into it and here,
8012 and no jumps or labels intervene, then the value can get
8013 here through the reload reg alone.
8014 Otherwise, give up--return. */
8015 for (i1 = NEXT_INSN (output_reload_insn);
8016 i1 != insn; i1 = NEXT_INSN (i1))
8018 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
8019 return;
8020 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
8021 && reg_mentioned_p (reg, PATTERN (i1)))
8023 /* If this is USE in front of INSN, we only have to check that
8024 there are no more references than accounted for by inheritance. */
8025 while (GET_CODE (i1) == INSN && GET_CODE (PATTERN (i1)) == USE)
8027 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8028 i1 = NEXT_INSN (i1);
8030 if (n_occurrences <= n_inherited && i1 == insn)
8031 break;
8032 return;
8036 /* The caller has already checked that REG dies or is set in INSN.
8037 It has also checked that we are optimizing, and thus some inaccurancies
8038 in the debugging information are acceptable.
8039 So we could just delete output_reload_insn.
8040 But in some cases we can improve the debugging information without
8041 sacrificing optimization - maybe even improving the code:
8042 See if the pseudo reg has been completely replaced
8043 with reload regs. If so, delete the store insn
8044 and forget we had a stack slot for the pseudo. */
8045 if (rld[j].out != rld[j].in
8046 && REG_N_DEATHS (REGNO (reg)) == 1
8047 && REG_N_SETS (REGNO (reg)) == 1
8048 && REG_BASIC_BLOCK (REGNO (reg)) >= 0
8049 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8051 rtx i2;
8053 /* We know that it was used only between here
8054 and the beginning of the current basic block.
8055 (We also know that the last use before INSN was
8056 the output reload we are thinking of deleting, but never mind that.)
8057 Search that range; see if any ref remains. */
8058 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8060 rtx set = single_set (i2);
8062 /* Uses which just store in the pseudo don't count,
8063 since if they are the only uses, they are dead. */
8064 if (set != 0 && SET_DEST (set) == reg)
8065 continue;
8066 if (GET_CODE (i2) == CODE_LABEL
8067 || GET_CODE (i2) == JUMP_INSN)
8068 break;
8069 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
8070 && reg_mentioned_p (reg, PATTERN (i2)))
8072 /* Some other ref remains; just delete the output reload we
8073 know to be dead. */
8074 delete_address_reloads (output_reload_insn, insn);
8075 PUT_CODE (output_reload_insn, NOTE);
8076 NOTE_SOURCE_FILE (output_reload_insn) = 0;
8077 NOTE_LINE_NUMBER (output_reload_insn) = NOTE_INSN_DELETED;
8078 return;
8082 /* Delete the now-dead stores into this pseudo. */
8083 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8085 rtx set = single_set (i2);
8087 if (set != 0 && SET_DEST (set) == reg)
8089 delete_address_reloads (i2, insn);
8090 /* This might be a basic block head,
8091 thus don't use delete_insn. */
8092 PUT_CODE (i2, NOTE);
8093 NOTE_SOURCE_FILE (i2) = 0;
8094 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
8096 if (GET_CODE (i2) == CODE_LABEL
8097 || GET_CODE (i2) == JUMP_INSN)
8098 break;
8101 /* For the debugging info,
8102 say the pseudo lives in this reload reg. */
8103 reg_renumber[REGNO (reg)] = REGNO (rld[j].reg_rtx);
8104 alter_reg (REGNO (reg), -1);
8106 delete_address_reloads (output_reload_insn, insn);
8107 PUT_CODE (output_reload_insn, NOTE);
8108 NOTE_SOURCE_FILE (output_reload_insn) = 0;
8109 NOTE_LINE_NUMBER (output_reload_insn) = NOTE_INSN_DELETED;
8113 /* We are going to delete DEAD_INSN. Recursively delete loads of
8114 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8115 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
8116 static void
8117 delete_address_reloads (dead_insn, current_insn)
8118 rtx dead_insn, current_insn;
8120 rtx set = single_set (dead_insn);
8121 rtx set2, dst, prev, next;
8122 if (set)
8124 rtx dst = SET_DEST (set);
8125 if (GET_CODE (dst) == MEM)
8126 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8128 /* If we deleted the store from a reloaded post_{in,de}c expression,
8129 we can delete the matching adds. */
8130 prev = PREV_INSN (dead_insn);
8131 next = NEXT_INSN (dead_insn);
8132 if (! prev || ! next)
8133 return;
8134 set = single_set (next);
8135 set2 = single_set (prev);
8136 if (! set || ! set2
8137 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8138 || GET_CODE (XEXP (SET_SRC (set), 1)) != CONST_INT
8139 || GET_CODE (XEXP (SET_SRC (set2), 1)) != CONST_INT)
8140 return;
8141 dst = SET_DEST (set);
8142 if (! rtx_equal_p (dst, SET_DEST (set2))
8143 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8144 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8145 || (INTVAL (XEXP (SET_SRC (set), 1))
8146 != - INTVAL (XEXP (SET_SRC (set2), 1))))
8147 return;
8148 delete_insn (prev);
8149 delete_insn (next);
8152 /* Subfunction of delete_address_reloads: process registers found in X. */
8153 static void
8154 delete_address_reloads_1 (dead_insn, x, current_insn)
8155 rtx dead_insn, x, current_insn;
8157 rtx prev, set, dst, i2;
8158 int i, j;
8159 enum rtx_code code = GET_CODE (x);
8161 if (code != REG)
8163 const char *fmt= GET_RTX_FORMAT (code);
8164 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8166 if (fmt[i] == 'e')
8167 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
8168 else if (fmt[i] == 'E')
8170 for (j = XVECLEN (x, i) - 1; j >=0; j--)
8171 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
8172 current_insn);
8175 return;
8178 if (spill_reg_order[REGNO (x)] < 0)
8179 return;
8181 /* Scan backwards for the insn that sets x. This might be a way back due
8182 to inheritance. */
8183 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
8185 code = GET_CODE (prev);
8186 if (code == CODE_LABEL || code == JUMP_INSN)
8187 return;
8188 if (GET_RTX_CLASS (code) != 'i')
8189 continue;
8190 if (reg_set_p (x, PATTERN (prev)))
8191 break;
8192 if (reg_referenced_p (x, PATTERN (prev)))
8193 return;
8195 if (! prev || INSN_UID (prev) < reload_first_uid)
8196 return;
8197 /* Check that PREV only sets the reload register. */
8198 set = single_set (prev);
8199 if (! set)
8200 return;
8201 dst = SET_DEST (set);
8202 if (GET_CODE (dst) != REG
8203 || ! rtx_equal_p (dst, x))
8204 return;
8205 if (! reg_set_p (dst, PATTERN (dead_insn)))
8207 /* Check if DST was used in a later insn -
8208 it might have been inherited. */
8209 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
8211 if (GET_CODE (i2) == CODE_LABEL)
8212 break;
8213 if (GET_RTX_CLASS (GET_CODE (i2)) != 'i')
8214 continue;
8215 if (reg_referenced_p (dst, PATTERN (i2)))
8217 /* If there is a reference to the register in the current insn,
8218 it might be loaded in a non-inherited reload. If no other
8219 reload uses it, that means the register is set before
8220 referenced. */
8221 if (i2 == current_insn)
8223 for (j = n_reloads - 1; j >= 0; j--)
8224 if ((rld[j].reg_rtx == dst && reload_inherited[j])
8225 || reload_override_in[j] == dst)
8226 return;
8227 for (j = n_reloads - 1; j >= 0; j--)
8228 if (rld[j].in && rld[j].reg_rtx == dst)
8229 break;
8230 if (j >= 0)
8231 break;
8233 return;
8235 if (GET_CODE (i2) == JUMP_INSN)
8236 break;
8237 /* If DST is still live at CURRENT_INSN, check if it is used for
8238 any reload. Note that even if CURRENT_INSN sets DST, we still
8239 have to check the reloads. */
8240 if (i2 == current_insn)
8242 for (j = n_reloads - 1; j >= 0; j--)
8243 if ((rld[j].reg_rtx == dst && reload_inherited[j])
8244 || reload_override_in[j] == dst)
8245 return;
8246 /* ??? We can't finish the loop here, because dst might be
8247 allocated to a pseudo in this block if no reload in this
8248 block needs any of the clsses containing DST - see
8249 spill_hard_reg. There is no easy way to tell this, so we
8250 have to scan till the end of the basic block. */
8252 if (reg_set_p (dst, PATTERN (i2)))
8253 break;
8256 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
8257 reg_reloaded_contents[REGNO (dst)] = -1;
8258 /* Can't use delete_insn here because PREV might be a basic block head. */
8259 PUT_CODE (prev, NOTE);
8260 NOTE_LINE_NUMBER (prev) = NOTE_INSN_DELETED;
8261 NOTE_SOURCE_FILE (prev) = 0;
8264 /* Output reload-insns to reload VALUE into RELOADREG.
8265 VALUE is an autoincrement or autodecrement RTX whose operand
8266 is a register or memory location;
8267 so reloading involves incrementing that location.
8268 IN is either identical to VALUE, or some cheaper place to reload from.
8270 INC_AMOUNT is the number to increment or decrement by (always positive).
8271 This cannot be deduced from VALUE.
8273 Return the instruction that stores into RELOADREG. */
8275 static rtx
8276 inc_for_reload (reloadreg, in, value, inc_amount)
8277 rtx reloadreg;
8278 rtx in, value;
8279 int inc_amount;
8281 /* REG or MEM to be copied and incremented. */
8282 rtx incloc = XEXP (value, 0);
8283 /* Nonzero if increment after copying. */
8284 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
8285 rtx last;
8286 rtx inc;
8287 rtx add_insn;
8288 int code;
8289 rtx store;
8290 rtx real_in = in == value ? XEXP (in, 0) : in;
8292 /* No hard register is equivalent to this register after
8293 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
8294 we could inc/dec that register as well (maybe even using it for
8295 the source), but I'm not sure it's worth worrying about. */
8296 if (GET_CODE (incloc) == REG)
8297 reg_last_reload_reg[REGNO (incloc)] = 0;
8299 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
8300 inc_amount = - inc_amount;
8302 inc = GEN_INT (inc_amount);
8304 /* If this is post-increment, first copy the location to the reload reg. */
8305 if (post && real_in != reloadreg)
8306 emit_insn (gen_move_insn (reloadreg, real_in));
8308 if (in == value)
8310 /* See if we can directly increment INCLOC. Use a method similar to
8311 that in gen_reload. */
8313 last = get_last_insn ();
8314 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
8315 gen_rtx_PLUS (GET_MODE (incloc),
8316 incloc, inc)));
8318 code = recog_memoized (add_insn);
8319 if (code >= 0)
8321 extract_insn (add_insn);
8322 if (constrain_operands (1))
8324 /* If this is a pre-increment and we have incremented the value
8325 where it lives, copy the incremented value to RELOADREG to
8326 be used as an address. */
8328 if (! post)
8329 emit_insn (gen_move_insn (reloadreg, incloc));
8331 return add_insn;
8334 delete_insns_since (last);
8337 /* If couldn't do the increment directly, must increment in RELOADREG.
8338 The way we do this depends on whether this is pre- or post-increment.
8339 For pre-increment, copy INCLOC to the reload register, increment it
8340 there, then save back. */
8342 if (! post)
8344 if (in != reloadreg)
8345 emit_insn (gen_move_insn (reloadreg, real_in));
8346 emit_insn (gen_add2_insn (reloadreg, inc));
8347 store = emit_insn (gen_move_insn (incloc, reloadreg));
8349 else
8351 /* Postincrement.
8352 Because this might be a jump insn or a compare, and because RELOADREG
8353 may not be available after the insn in an input reload, we must do
8354 the incrementation before the insn being reloaded for.
8356 We have already copied IN to RELOADREG. Increment the copy in
8357 RELOADREG, save that back, then decrement RELOADREG so it has
8358 the original value. */
8360 emit_insn (gen_add2_insn (reloadreg, inc));
8361 store = emit_insn (gen_move_insn (incloc, reloadreg));
8362 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
8365 return store;
8368 /* Return 1 if we are certain that the constraint-string STRING allows
8369 the hard register REG. Return 0 if we can't be sure of this. */
8371 static int
8372 constraint_accepts_reg_p (string, reg)
8373 const char *string;
8374 rtx reg;
8376 int value = 0;
8377 int regno = true_regnum (reg);
8378 int c;
8380 /* Initialize for first alternative. */
8381 value = 0;
8382 /* Check that each alternative contains `g' or `r'. */
8383 while (1)
8384 switch (c = *string++)
8386 case 0:
8387 /* If an alternative lacks `g' or `r', we lose. */
8388 return value;
8389 case ',':
8390 /* If an alternative lacks `g' or `r', we lose. */
8391 if (value == 0)
8392 return 0;
8393 /* Initialize for next alternative. */
8394 value = 0;
8395 break;
8396 case 'g':
8397 case 'r':
8398 /* Any general reg wins for this alternative. */
8399 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
8400 value = 1;
8401 break;
8402 default:
8403 /* Any reg in specified class wins for this alternative. */
8405 enum reg_class class = REG_CLASS_FROM_LETTER (c);
8407 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
8408 value = 1;
8413 /* Return the number of places FIND appears within X, but don't count
8414 an occurrence if some SET_DEST is FIND. */
8417 count_occurrences (x, find)
8418 register rtx x, find;
8420 register int i, j;
8421 register enum rtx_code code;
8422 register const char *format_ptr;
8423 int count;
8425 if (x == find)
8426 return 1;
8427 if (x == 0)
8428 return 0;
8430 code = GET_CODE (x);
8432 switch (code)
8434 case REG:
8435 case QUEUED:
8436 case CONST_INT:
8437 case CONST_DOUBLE:
8438 case SYMBOL_REF:
8439 case CODE_LABEL:
8440 case PC:
8441 case CC0:
8442 return 0;
8444 case MEM:
8445 if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
8446 return 1;
8447 break;
8448 case SET:
8449 if (SET_DEST (x) == find)
8450 return count_occurrences (SET_SRC (x), find);
8451 break;
8453 default:
8454 break;
8457 format_ptr = GET_RTX_FORMAT (code);
8458 count = 0;
8460 for (i = 0; i < GET_RTX_LENGTH (code); i++)
8462 switch (*format_ptr++)
8464 case 'e':
8465 count += count_occurrences (XEXP (x, i), find);
8466 break;
8468 case 'E':
8469 if (XVEC (x, i) != NULL)
8471 for (j = 0; j < XVECLEN (x, i); j++)
8472 count += count_occurrences (XVECEXP (x, i, j), find);
8474 break;
8477 return count;
8480 /* This array holds values which are equivalent to a hard register
8481 during reload_cse_regs. Each array element is an EXPR_LIST of
8482 values. Each time a hard register is set, we set the corresponding
8483 array element to the value. Each time a hard register is copied
8484 into memory, we add the memory location to the corresponding array
8485 element. We don't store values or memory addresses with side
8486 effects in this array.
8488 If the value is a CONST_INT, then the mode of the containing
8489 EXPR_LIST is the mode in which that CONST_INT was referenced.
8491 We sometimes clobber a specific entry in a list. In that case, we
8492 just set XEXP (list-entry, 0) to 0. */
8494 static rtx *reg_values;
8496 /* This is a preallocated REG rtx which we use as a temporary in
8497 reload_cse_invalidate_regno, so that we don't need to allocate a
8498 new one each time through a loop in that function. */
8500 static rtx invalidate_regno_rtx;
8502 /* Invalidate any entries in reg_values which depend on REGNO,
8503 including those for REGNO itself. This is called if REGNO is
8504 changing. If CLOBBER is true, then always forget anything we
8505 currently know about REGNO. MODE is the mode of the assignment to
8506 REGNO, which is used to determine how many hard registers are being
8507 changed. If MODE is VOIDmode, then only REGNO is being changed;
8508 this is used when invalidating call clobbered registers across a
8509 call. */
8511 static void
8512 reload_cse_invalidate_regno (regno, mode, clobber)
8513 int regno;
8514 enum machine_mode mode;
8515 int clobber;
8517 int endregno;
8518 register int i;
8520 /* Our callers don't always go through true_regnum; we may see a
8521 pseudo-register here from a CLOBBER or the like. We probably
8522 won't ever see a pseudo-register that has a real register number,
8523 for we check anyhow for safety. */
8524 if (regno >= FIRST_PSEUDO_REGISTER)
8525 regno = reg_renumber[regno];
8526 if (regno < 0)
8527 return;
8529 if (mode == VOIDmode)
8530 endregno = regno + 1;
8531 else
8532 endregno = regno + HARD_REGNO_NREGS (regno, mode);
8534 if (clobber)
8535 for (i = regno; i < endregno; i++)
8536 reg_values[i] = 0;
8538 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8540 rtx x;
8542 for (x = reg_values[i]; x; x = XEXP (x, 1))
8544 if (XEXP (x, 0) != 0
8545 && refers_to_regno_p (regno, endregno, XEXP (x, 0), NULL_PTR))
8547 /* If this is the only entry on the list, clear
8548 reg_values[i]. Otherwise, just clear this entry on
8549 the list. */
8550 if (XEXP (x, 1) == 0 && x == reg_values[i])
8552 reg_values[i] = 0;
8553 break;
8555 XEXP (x, 0) = 0;
8560 /* We must look at earlier registers, in case REGNO is part of a
8561 multi word value but is not the first register. If an earlier
8562 register has a value in a mode which overlaps REGNO, then we must
8563 invalidate that earlier register. Note that we do not need to
8564 check REGNO or later registers (we must not check REGNO itself,
8565 because we would incorrectly conclude that there was a conflict). */
8567 for (i = 0; i < regno; i++)
8569 rtx x;
8571 for (x = reg_values[i]; x; x = XEXP (x, 1))
8573 if (XEXP (x, 0) != 0)
8575 PUT_MODE (invalidate_regno_rtx, GET_MODE (x));
8576 REGNO (invalidate_regno_rtx) = i;
8577 if (refers_to_regno_p (regno, endregno, invalidate_regno_rtx,
8578 NULL_PTR))
8580 reload_cse_invalidate_regno (i, VOIDmode, 1);
8581 break;
8588 /* The memory at address MEM_BASE is being changed.
8589 Return whether this change will invalidate VAL. */
8591 static int
8592 reload_cse_mem_conflict_p (mem_base, val)
8593 rtx mem_base;
8594 rtx val;
8596 enum rtx_code code;
8597 const char *fmt;
8598 int i;
8600 code = GET_CODE (val);
8601 switch (code)
8603 /* Get rid of a few simple cases quickly. */
8604 case REG:
8605 case PC:
8606 case CC0:
8607 case SCRATCH:
8608 case CONST:
8609 case CONST_INT:
8610 case CONST_DOUBLE:
8611 case SYMBOL_REF:
8612 case LABEL_REF:
8613 return 0;
8615 case MEM:
8616 if (GET_MODE (mem_base) == BLKmode
8617 || GET_MODE (val) == BLKmode)
8618 return 1;
8619 if (anti_dependence (val, mem_base))
8620 return 1;
8621 /* The address may contain nested MEMs. */
8622 break;
8624 default:
8625 break;
8628 fmt = GET_RTX_FORMAT (code);
8630 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8632 if (fmt[i] == 'e')
8634 if (reload_cse_mem_conflict_p (mem_base, XEXP (val, i)))
8635 return 1;
8637 else if (fmt[i] == 'E')
8639 int j;
8641 for (j = 0; j < XVECLEN (val, i); j++)
8642 if (reload_cse_mem_conflict_p (mem_base, XVECEXP (val, i, j)))
8643 return 1;
8647 return 0;
8650 /* Invalidate any entries in reg_values which are changed because of a
8651 store to MEM_RTX. If this is called because of a non-const call
8652 instruction, MEM_RTX is (mem:BLK const0_rtx). */
8654 static void
8655 reload_cse_invalidate_mem (mem_rtx)
8656 rtx mem_rtx;
8658 register int i;
8660 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8662 rtx x;
8664 for (x = reg_values[i]; x; x = XEXP (x, 1))
8666 if (XEXP (x, 0) != 0
8667 && reload_cse_mem_conflict_p (mem_rtx, XEXP (x, 0)))
8669 /* If this is the only entry on the list, clear
8670 reg_values[i]. Otherwise, just clear this entry on
8671 the list. */
8672 if (XEXP (x, 1) == 0 && x == reg_values[i])
8674 reg_values[i] = 0;
8675 break;
8677 XEXP (x, 0) = 0;
8683 /* Invalidate DEST, which is being assigned to or clobbered. The
8684 second parameter exists so that this function can be passed to
8685 note_stores; it is ignored. */
8687 static void
8688 reload_cse_invalidate_rtx (dest, ignore, data)
8689 rtx dest;
8690 rtx ignore ATTRIBUTE_UNUSED;
8691 void *data ATTRIBUTE_UNUSED;
8693 while (GET_CODE (dest) == STRICT_LOW_PART
8694 || GET_CODE (dest) == SIGN_EXTRACT
8695 || GET_CODE (dest) == ZERO_EXTRACT
8696 || GET_CODE (dest) == SUBREG)
8697 dest = XEXP (dest, 0);
8699 if (GET_CODE (dest) == REG)
8700 reload_cse_invalidate_regno (REGNO (dest), GET_MODE (dest), 1);
8701 else if (GET_CODE (dest) == MEM)
8702 reload_cse_invalidate_mem (dest);
8705 /* Do a very simple CSE pass over the hard registers.
8707 This function detects no-op moves where we happened to assign two
8708 different pseudo-registers to the same hard register, and then
8709 copied one to the other. Reload will generate a useless
8710 instruction copying a register to itself.
8712 This function also detects cases where we load a value from memory
8713 into two different registers, and (if memory is more expensive than
8714 registers) changes it to simply copy the first register into the
8715 second register.
8717 Another optimization is performed that scans the operands of each
8718 instruction to see whether the value is already available in a
8719 hard register. It then replaces the operand with the hard register
8720 if possible, much like an optional reload would. */
8722 static void
8723 reload_cse_regs_1 (first)
8724 rtx first;
8726 char *firstobj;
8727 rtx callmem;
8728 register int i;
8729 rtx insn;
8731 init_alias_analysis ();
8733 reg_values = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
8734 bzero ((char *)reg_values, FIRST_PSEUDO_REGISTER * sizeof (rtx));
8736 /* Create our EXPR_LIST structures on reload_obstack, so that we can
8737 free them when we are done. */
8738 push_obstacks (&reload_obstack, &reload_obstack);
8739 firstobj = (char *) obstack_alloc (&reload_obstack, 0);
8741 /* We pass this to reload_cse_invalidate_mem to invalidate all of
8742 memory for a non-const call instruction. */
8743 callmem = gen_rtx_MEM (BLKmode, const0_rtx);
8745 /* This is used in reload_cse_invalidate_regno to avoid consing a
8746 new REG in a loop in that function. */
8747 invalidate_regno_rtx = gen_rtx_REG (VOIDmode, 0);
8749 for (insn = first; insn; insn = NEXT_INSN (insn))
8751 rtx body;
8753 if (GET_CODE (insn) == CODE_LABEL)
8755 /* Forget all the register values at a code label. We don't
8756 try to do anything clever around jumps. */
8757 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8758 reg_values[i] = 0;
8760 continue;
8763 #ifdef NON_SAVING_SETJMP
8764 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
8765 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
8767 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8768 reg_values[i] = 0;
8770 continue;
8772 #endif
8774 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
8775 continue;
8777 /* If this is a call instruction, forget anything stored in a
8778 call clobbered register, or, if this is not a const call, in
8779 memory. */
8780 if (GET_CODE (insn) == CALL_INSN)
8782 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8783 if (call_used_regs[i])
8784 reload_cse_invalidate_regno (i, VOIDmode, 1);
8786 if (! CONST_CALL_P (insn))
8787 reload_cse_invalidate_mem (callmem);
8791 /* Forget all the register values at a volatile asm. */
8792 if (GET_CODE (insn) == INSN
8793 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
8794 && MEM_VOLATILE_P (PATTERN (insn)))
8795 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8796 reg_values[i] = 0;
8798 body = PATTERN (insn);
8799 if (GET_CODE (body) == SET)
8801 int count = 0;
8802 if (reload_cse_noop_set_p (body, insn))
8804 /* If this sets the return value of the function, we must keep
8805 a USE around, in case this is in a different basic block
8806 than the final USE. Otherwise, we could loose important
8807 register lifeness information on SMALL_REGISTER_CLASSES
8808 machines, where return registers might be used as spills:
8809 subsequent passes assume that spill registers are dead at
8810 the end of a basic block. */
8811 if (REG_FUNCTION_VALUE_P (SET_DEST (body)))
8813 pop_obstacks ();
8814 PATTERN (insn) = gen_rtx_USE (VOIDmode, SET_DEST (body));
8815 INSN_CODE (insn) = -1;
8816 REG_NOTES (insn) = NULL_RTX;
8817 push_obstacks (&reload_obstack, &reload_obstack);
8819 else
8821 PUT_CODE (insn, NOTE);
8822 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8823 NOTE_SOURCE_FILE (insn) = 0;
8826 /* We're done with this insn. */
8827 continue;
8830 /* It's not a no-op, but we can try to simplify it. */
8831 count += reload_cse_simplify_set (body, insn);
8833 if (count > 0)
8834 apply_change_group ();
8835 else
8836 reload_cse_simplify_operands (insn);
8838 reload_cse_record_set (body, body);
8840 else if (GET_CODE (body) == PARALLEL)
8842 int count = 0;
8843 rtx value = NULL_RTX;
8845 /* If every action in a PARALLEL is a noop, we can delete
8846 the entire PARALLEL. */
8847 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8849 rtx part = XVECEXP (body, 0, i);
8850 if (GET_CODE (part) == SET)
8852 if (! reload_cse_noop_set_p (part, insn))
8853 break;
8854 if (REG_FUNCTION_VALUE_P (SET_DEST (part)))
8856 if (value)
8857 break;
8858 value = SET_DEST (part);
8861 else if (GET_CODE (part) != CLOBBER)
8862 break;
8864 if (i < 0)
8866 if (value)
8868 pop_obstacks ();
8869 PATTERN (insn) = gen_rtx_USE (VOIDmode, value);
8870 INSN_CODE (insn) = -1;
8871 REG_NOTES (insn) = NULL_RTX;
8872 push_obstacks (&reload_obstack, &reload_obstack);
8874 else
8876 PUT_CODE (insn, NOTE);
8877 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8878 NOTE_SOURCE_FILE (insn) = 0;
8881 /* We're done with this insn. */
8882 continue;
8885 /* It's not a no-op, but we can try to simplify it. */
8886 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8887 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
8888 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
8890 if (count > 0)
8891 apply_change_group ();
8892 else
8893 reload_cse_simplify_operands (insn);
8895 /* Look through the PARALLEL and record the values being
8896 set, if possible. Also handle any CLOBBERs. */
8897 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8899 rtx x = XVECEXP (body, 0, i);
8901 if (GET_CODE (x) == SET)
8902 reload_cse_record_set (x, body);
8903 else
8904 note_stores (x, reload_cse_invalidate_rtx, NULL);
8907 else
8908 note_stores (body, reload_cse_invalidate_rtx, NULL);
8910 #ifdef AUTO_INC_DEC
8911 /* Clobber any registers which appear in REG_INC notes. We
8912 could keep track of the changes to their values, but it is
8913 unlikely to help. */
8915 rtx x;
8917 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
8918 if (REG_NOTE_KIND (x) == REG_INC)
8919 reload_cse_invalidate_rtx (XEXP (x, 0), NULL_RTX, NULL);
8921 #endif
8923 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
8924 after we have processed the insn. */
8925 if (GET_CODE (insn) == CALL_INSN)
8927 rtx x;
8929 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
8930 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
8931 reload_cse_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX,
8932 NULL);
8936 /* Clean up. */
8937 end_alias_analysis ();
8939 /* Free all the temporary structures we created, and go back to the
8940 regular obstacks. */
8941 obstack_free (&reload_obstack, firstobj);
8942 pop_obstacks ();
8945 /* Call cse / combine like post-reload optimization phases.
8946 FIRST is the first instruction. */
8947 void
8948 reload_cse_regs (first)
8949 rtx first;
8951 reload_cse_regs_1 (first);
8952 reload_combine ();
8953 reload_cse_move2add (first);
8954 if (flag_expensive_optimizations)
8955 reload_cse_regs_1 (first);
8958 /* Return whether the values known for REGNO are equal to VAL. MODE
8959 is the mode of the object that VAL is being copied to; this matters
8960 if VAL is a CONST_INT. */
8962 static int
8963 reload_cse_regno_equal_p (regno, val, mode)
8964 int regno;
8965 rtx val;
8966 enum machine_mode mode;
8968 rtx x;
8970 if (val == 0)
8971 return 0;
8973 for (x = reg_values[regno]; x; x = XEXP (x, 1))
8974 if (XEXP (x, 0) != 0
8975 && rtx_equal_p (XEXP (x, 0), val)
8976 && (! flag_float_store || GET_CODE (XEXP (x, 0)) != MEM
8977 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
8978 && (GET_CODE (val) != CONST_INT
8979 || mode == GET_MODE (x)
8980 || (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
8981 /* On a big endian machine if the value spans more than
8982 one register then this register holds the high part of
8983 it and we can't use it.
8985 ??? We should also compare with the high part of the
8986 value. */
8987 && !(WORDS_BIG_ENDIAN
8988 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
8989 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
8990 GET_MODE_BITSIZE (GET_MODE (x))))))
8991 return 1;
8993 return 0;
8996 /* See whether a single set is a noop. SET is the set instruction we
8997 are should check, and INSN is the instruction from which it came. */
8999 static int
9000 reload_cse_noop_set_p (set, insn)
9001 rtx set;
9002 rtx insn ATTRIBUTE_UNUSED;
9004 rtx src, dest;
9005 enum machine_mode dest_mode;
9006 int dreg, sreg;
9007 int ret;
9009 src = SET_SRC (set);
9010 dest = SET_DEST (set);
9011 dest_mode = GET_MODE (dest);
9013 if (side_effects_p (src))
9014 return 0;
9016 dreg = true_regnum (dest);
9017 sreg = true_regnum (src);
9019 /* Check for setting a register to itself. In this case, we don't
9020 have to worry about REG_DEAD notes. */
9021 if (dreg >= 0 && dreg == sreg)
9022 return 1;
9024 ret = 0;
9025 if (dreg >= 0)
9027 /* Check for setting a register to itself. */
9028 if (dreg == sreg)
9029 ret = 1;
9031 /* Check for setting a register to a value which we already know
9032 is in the register. */
9033 else if (reload_cse_regno_equal_p (dreg, src, dest_mode))
9034 ret = 1;
9036 /* Check for setting a register DREG to another register SREG
9037 where SREG is equal to a value which is already in DREG. */
9038 else if (sreg >= 0)
9040 rtx x;
9042 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
9044 rtx tmp;
9046 if (XEXP (x, 0) == 0)
9047 continue;
9049 if (dest_mode == GET_MODE (x))
9050 tmp = XEXP (x, 0);
9051 else if (GET_MODE_BITSIZE (dest_mode)
9052 < GET_MODE_BITSIZE (GET_MODE (x)))
9053 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
9054 else
9055 continue;
9057 if (tmp
9058 && reload_cse_regno_equal_p (dreg, tmp, dest_mode))
9060 ret = 1;
9061 break;
9066 else if (GET_CODE (dest) == MEM)
9068 /* Check for storing a register to memory when we know that the
9069 register is equivalent to the memory location. */
9070 if (sreg >= 0
9071 && reload_cse_regno_equal_p (sreg, dest, dest_mode)
9072 && ! side_effects_p (dest))
9073 ret = 1;
9076 return ret;
9079 /* Try to simplify a single SET instruction. SET is the set pattern.
9080 INSN is the instruction it came from.
9081 This function only handles one case: if we set a register to a value
9082 which is not a register, we try to find that value in some other register
9083 and change the set into a register copy. */
9085 static int
9086 reload_cse_simplify_set (set, insn)
9087 rtx set;
9088 rtx insn;
9090 int dreg;
9091 rtx src;
9092 enum machine_mode dest_mode;
9093 enum reg_class dclass;
9094 register int i;
9096 dreg = true_regnum (SET_DEST (set));
9097 if (dreg < 0)
9098 return 0;
9100 src = SET_SRC (set);
9101 if (side_effects_p (src) || true_regnum (src) >= 0)
9102 return 0;
9104 dclass = REGNO_REG_CLASS (dreg);
9106 /* If memory loads are cheaper than register copies, don't change them. */
9107 if (GET_CODE (src) == MEM
9108 && MEMORY_MOVE_COST (GET_MODE (src), dclass, 1) < 2)
9109 return 0;
9111 /* If the constant is cheaper than a register, don't change it. */
9112 if (CONSTANT_P (src)
9113 && rtx_cost (src, SET) < 2)
9114 return 0;
9116 dest_mode = GET_MODE (SET_DEST (set));
9117 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
9119 if (i != dreg
9120 && REGISTER_MOVE_COST (REGNO_REG_CLASS (i), dclass) == 2
9121 && reload_cse_regno_equal_p (i, src, dest_mode))
9123 int validated;
9125 /* Pop back to the real obstacks while changing the insn. */
9126 pop_obstacks ();
9128 validated = validate_change (insn, &SET_SRC (set),
9129 gen_rtx_REG (dest_mode, i), 1);
9131 /* Go back to the obstack we are using for temporary
9132 storage. */
9133 push_obstacks (&reload_obstack, &reload_obstack);
9135 if (validated)
9136 return 1;
9139 return 0;
9142 /* Try to replace operands in INSN with equivalent values that are already
9143 in registers. This can be viewed as optional reloading.
9145 For each non-register operand in the insn, see if any hard regs are
9146 known to be equivalent to that operand. Record the alternatives which
9147 can accept these hard registers. Among all alternatives, select the
9148 ones which are better or equal to the one currently matching, where
9149 "better" is in terms of '?' and '!' constraints. Among the remaining
9150 alternatives, select the one which replaces most operands with
9151 hard registers. */
9153 static int
9154 reload_cse_simplify_operands (insn)
9155 rtx insn;
9157 int i,j;
9159 const char *constraints[MAX_RECOG_OPERANDS];
9161 /* Vector recording how bad an alternative is. */
9162 int *alternative_reject;
9163 /* Vector recording how many registers can be introduced by choosing
9164 this alternative. */
9165 int *alternative_nregs;
9166 /* Array of vectors recording, for each operand and each alternative,
9167 which hard register to substitute, or -1 if the operand should be
9168 left as it is. */
9169 int *op_alt_regno[MAX_RECOG_OPERANDS];
9170 /* Array of alternatives, sorted in order of decreasing desirability. */
9171 int *alternative_order;
9172 rtx reg = gen_rtx_REG (VOIDmode, -1);
9174 extract_insn (insn);
9176 if (recog_data.n_alternatives == 0 || recog_data.n_operands == 0)
9177 return 0;
9179 /* Figure out which alternative currently matches. */
9180 if (! constrain_operands (1))
9181 fatal_insn_not_found (insn);
9183 alternative_reject = (int *) alloca (recog_data.n_alternatives * sizeof (int));
9184 alternative_nregs = (int *) alloca (recog_data.n_alternatives * sizeof (int));
9185 alternative_order = (int *) alloca (recog_data.n_alternatives * sizeof (int));
9186 bzero ((char *)alternative_reject, recog_data.n_alternatives * sizeof (int));
9187 bzero ((char *)alternative_nregs, recog_data.n_alternatives * sizeof (int));
9189 for (i = 0; i < recog_data.n_operands; i++)
9191 enum machine_mode mode;
9192 int regno;
9193 const char *p;
9195 op_alt_regno[i] = (int *) alloca (recog_data.n_alternatives * sizeof (int));
9196 for (j = 0; j < recog_data.n_alternatives; j++)
9197 op_alt_regno[i][j] = -1;
9199 p = constraints[i] = recog_data.constraints[i];
9200 mode = recog_data.operand_mode[i];
9202 /* Add the reject values for each alternative given by the constraints
9203 for this operand. */
9204 j = 0;
9205 while (*p != '\0')
9207 char c = *p++;
9208 if (c == ',')
9209 j++;
9210 else if (c == '?')
9211 alternative_reject[j] += 3;
9212 else if (c == '!')
9213 alternative_reject[j] += 300;
9216 /* We won't change operands which are already registers. We
9217 also don't want to modify output operands. */
9218 regno = true_regnum (recog_data.operand[i]);
9219 if (regno >= 0
9220 || constraints[i][0] == '='
9221 || constraints[i][0] == '+')
9222 continue;
9224 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9226 int class = (int) NO_REGS;
9228 if (! reload_cse_regno_equal_p (regno, recog_data.operand[i], mode))
9229 continue;
9231 REGNO (reg) = regno;
9232 PUT_MODE (reg, mode);
9234 /* We found a register equal to this operand. Now look for all
9235 alternatives that can accept this register and have not been
9236 assigned a register they can use yet. */
9237 j = 0;
9238 p = constraints[i];
9239 for (;;)
9241 char c = *p++;
9243 switch (c)
9245 case '=': case '+': case '?':
9246 case '#': case '&': case '!':
9247 case '*': case '%':
9248 case '0': case '1': case '2': case '3': case '4':
9249 case '5': case '6': case '7': case '8': case '9':
9250 case 'm': case '<': case '>': case 'V': case 'o':
9251 case 'E': case 'F': case 'G': case 'H':
9252 case 's': case 'i': case 'n':
9253 case 'I': case 'J': case 'K': case 'L':
9254 case 'M': case 'N': case 'O': case 'P':
9255 #ifdef EXTRA_CONSTRAINT
9256 case 'Q': case 'R': case 'S': case 'T': case 'U':
9257 #endif
9258 case 'p': case 'X':
9259 /* These don't say anything we care about. */
9260 break;
9262 case 'g': case 'r':
9263 class = reg_class_subunion[(int) class][(int) GENERAL_REGS];
9264 break;
9266 default:
9267 class
9268 = reg_class_subunion[(int) class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
9269 break;
9271 case ',': case '\0':
9272 /* See if REGNO fits this alternative, and set it up as the
9273 replacement register if we don't have one for this
9274 alternative yet and the operand being replaced is not
9275 a cheap CONST_INT. */
9276 if (op_alt_regno[i][j] == -1
9277 && reg_fits_class_p (reg, class, 0, mode)
9278 && (GET_CODE (recog_data.operand[i]) != CONST_INT
9279 || (rtx_cost (recog_data.operand[i], SET)
9280 > rtx_cost (reg, SET))))
9282 alternative_nregs[j]++;
9283 op_alt_regno[i][j] = regno;
9285 j++;
9286 break;
9289 if (c == '\0')
9290 break;
9295 /* Record all alternatives which are better or equal to the currently
9296 matching one in the alternative_order array. */
9297 for (i = j = 0; i < recog_data.n_alternatives; i++)
9298 if (alternative_reject[i] <= alternative_reject[which_alternative])
9299 alternative_order[j++] = i;
9300 recog_data.n_alternatives = j;
9302 /* Sort it. Given a small number of alternatives, a dumb algorithm
9303 won't hurt too much. */
9304 for (i = 0; i < recog_data.n_alternatives - 1; i++)
9306 int best = i;
9307 int best_reject = alternative_reject[alternative_order[i]];
9308 int best_nregs = alternative_nregs[alternative_order[i]];
9309 int tmp;
9311 for (j = i + 1; j < recog_data.n_alternatives; j++)
9313 int this_reject = alternative_reject[alternative_order[j]];
9314 int this_nregs = alternative_nregs[alternative_order[j]];
9316 if (this_reject < best_reject
9317 || (this_reject == best_reject && this_nregs < best_nregs))
9319 best = j;
9320 best_reject = this_reject;
9321 best_nregs = this_nregs;
9325 tmp = alternative_order[best];
9326 alternative_order[best] = alternative_order[i];
9327 alternative_order[i] = tmp;
9330 /* Substitute the operands as determined by op_alt_regno for the best
9331 alternative. */
9332 j = alternative_order[0];
9334 /* Pop back to the real obstacks while changing the insn. */
9335 pop_obstacks ();
9337 for (i = 0; i < recog_data.n_operands; i++)
9339 enum machine_mode mode = recog_data.operand_mode[i];
9340 if (op_alt_regno[i][j] == -1)
9341 continue;
9343 validate_change (insn, recog_data.operand_loc[i],
9344 gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
9347 for (i = recog_data.n_dups - 1; i >= 0; i--)
9349 int op = recog_data.dup_num[i];
9350 enum machine_mode mode = recog_data.operand_mode[op];
9352 if (op_alt_regno[op][j] == -1)
9353 continue;
9355 validate_change (insn, recog_data.dup_loc[i],
9356 gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
9359 /* Go back to the obstack we are using for temporary
9360 storage. */
9361 push_obstacks (&reload_obstack, &reload_obstack);
9363 return apply_change_group ();
9366 /* These two variables are used to pass information from
9367 reload_cse_record_set to reload_cse_check_clobber. */
9369 static int reload_cse_check_clobbered;
9370 static rtx reload_cse_check_src;
9372 /* See if DEST overlaps with RELOAD_CSE_CHECK_SRC. If it does, set
9373 RELOAD_CSE_CHECK_CLOBBERED. This is called via note_stores. The
9374 second argument, which is passed by note_stores, is ignored. */
9376 static void
9377 reload_cse_check_clobber (dest, ignore, data)
9378 rtx dest;
9379 rtx ignore ATTRIBUTE_UNUSED;
9380 void *data ATTRIBUTE_UNUSED;
9382 if (reg_overlap_mentioned_p (dest, reload_cse_check_src))
9383 reload_cse_check_clobbered = 1;
9386 /* Record the result of a SET instruction. SET is the set pattern.
9387 BODY is the pattern of the insn that it came from. */
9389 static void
9390 reload_cse_record_set (set, body)
9391 rtx set;
9392 rtx body;
9394 rtx dest, src, x;
9395 int dreg, sreg;
9396 enum machine_mode dest_mode;
9398 dest = SET_DEST (set);
9399 src = SET_SRC (set);
9400 dreg = true_regnum (dest);
9401 sreg = true_regnum (src);
9402 dest_mode = GET_MODE (dest);
9404 /* Some machines don't define AUTO_INC_DEC, but they still use push
9405 instructions. We need to catch that case here in order to
9406 invalidate the stack pointer correctly. Note that invalidating
9407 the stack pointer is different from invalidating DEST. */
9408 x = dest;
9409 while (GET_CODE (x) == SUBREG
9410 || GET_CODE (x) == ZERO_EXTRACT
9411 || GET_CODE (x) == SIGN_EXTRACT
9412 || GET_CODE (x) == STRICT_LOW_PART)
9413 x = XEXP (x, 0);
9414 if (push_operand (x, GET_MODE (x)))
9416 reload_cse_invalidate_rtx (stack_pointer_rtx, NULL_RTX, NULL);
9417 reload_cse_invalidate_rtx (dest, NULL_RTX, NULL);
9418 return;
9421 /* We can only handle an assignment to a register, or a store of a
9422 register to a memory location. For other cases, we just clobber
9423 the destination. We also have to just clobber if there are side
9424 effects in SRC or DEST. */
9425 if ((dreg < 0 && GET_CODE (dest) != MEM)
9426 || side_effects_p (src)
9427 || side_effects_p (dest))
9429 reload_cse_invalidate_rtx (dest, NULL_RTX, NULL);
9430 return;
9433 #ifdef HAVE_cc0
9434 /* We don't try to handle values involving CC, because it's a pain
9435 to keep track of when they have to be invalidated. */
9436 if (reg_mentioned_p (cc0_rtx, src)
9437 || reg_mentioned_p (cc0_rtx, dest))
9439 reload_cse_invalidate_rtx (dest, NULL_RTX, NULL);
9440 return;
9442 #endif
9444 /* If BODY is a PARALLEL, then we need to see whether the source of
9445 SET is clobbered by some other instruction in the PARALLEL. */
9446 if (GET_CODE (body) == PARALLEL)
9448 int i;
9450 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
9452 rtx x;
9454 x = XVECEXP (body, 0, i);
9455 if (x == set)
9456 continue;
9458 reload_cse_check_clobbered = 0;
9459 reload_cse_check_src = src;
9460 note_stores (x, reload_cse_check_clobber, NULL);
9461 if (reload_cse_check_clobbered)
9463 reload_cse_invalidate_rtx (dest, NULL_RTX, NULL);
9464 return;
9469 if (dreg >= 0)
9471 int i;
9473 /* This is an assignment to a register. Update the value we
9474 have stored for the register. */
9475 if (sreg >= 0)
9477 rtx x;
9479 /* This is a copy from one register to another. Any values
9480 which were valid for SREG are now valid for DREG. If the
9481 mode changes, we use gen_lowpart_common to extract only
9482 the part of the value that is copied. */
9483 reg_values[dreg] = 0;
9484 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
9486 rtx tmp;
9488 if (XEXP (x, 0) == 0)
9489 continue;
9490 if (dest_mode == GET_MODE (XEXP (x, 0)))
9491 tmp = XEXP (x, 0);
9492 else if (GET_MODE_BITSIZE (dest_mode)
9493 > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
9494 continue;
9495 else
9496 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
9497 if (tmp)
9498 reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, tmp,
9499 reg_values[dreg]);
9502 else
9503 reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, src, NULL_RTX);
9505 /* We've changed DREG, so invalidate any values held by other
9506 registers that depend upon it. */
9507 reload_cse_invalidate_regno (dreg, dest_mode, 0);
9509 /* If this assignment changes more than one hard register,
9510 forget anything we know about the others. */
9511 for (i = 1; i < HARD_REGNO_NREGS (dreg, dest_mode); i++)
9512 reg_values[dreg + i] = 0;
9514 else if (GET_CODE (dest) == MEM)
9516 /* Invalidate conflicting memory locations. */
9517 reload_cse_invalidate_mem (dest);
9519 /* If we're storing a register to memory, add DEST to the list
9520 in REG_VALUES. */
9521 if (sreg >= 0 && ! side_effects_p (dest))
9522 reg_values[sreg] = gen_rtx_EXPR_LIST (dest_mode, dest,
9523 reg_values[sreg]);
9525 else
9527 /* We should have bailed out earlier. */
9528 abort ();
9532 /* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
9533 addressing now.
9534 This code might also be useful when reload gave up on reg+reg addresssing
9535 because of clashes between the return register and INDEX_REG_CLASS. */
9537 /* The maximum number of uses of a register we can keep track of to
9538 replace them with reg+reg addressing. */
9539 #define RELOAD_COMBINE_MAX_USES 6
9541 /* INSN is the insn where a register has ben used, and USEP points to the
9542 location of the register within the rtl. */
9543 struct reg_use { rtx insn, *usep; };
9545 /* If the register is used in some unknown fashion, USE_INDEX is negative.
9546 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
9547 indicates where it becomes live again.
9548 Otherwise, USE_INDEX is the index of the last encountered use of the
9549 register (which is first among these we have seen since we scan backwards),
9550 OFFSET contains the constant offset that is added to the register in
9551 all encountered uses, and USE_RUID indicates the first encountered, i.e.
9552 last, of these uses.
9553 STORE_RUID is always meaningful if we only want to use a value in a
9554 register in a different place: it denotes the next insn in the insn
9555 stream (i.e. the last ecountered) that sets or clobbers the register. */
9556 static struct
9558 struct reg_use reg_use[RELOAD_COMBINE_MAX_USES];
9559 int use_index;
9560 rtx offset;
9561 int store_ruid;
9562 int use_ruid;
9563 } reg_state[FIRST_PSEUDO_REGISTER];
9565 /* Reverse linear uid. This is increased in reload_combine while scanning
9566 the instructions from last to first. It is used to set last_label_ruid
9567 and the store_ruid / use_ruid fields in reg_state. */
9568 static int reload_combine_ruid;
9570 #define LABEL_LIVE(LABEL) \
9571 (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno])
9573 static void
9574 reload_combine ()
9576 rtx insn, set;
9577 int first_index_reg = 1, last_index_reg = 0;
9578 int i;
9579 int last_label_ruid;
9580 int min_labelno, n_labels;
9581 HARD_REG_SET ever_live_at_start, *label_live;
9583 /* If reg+reg can be used in offsetable memory adresses, the main chunk of
9584 reload has already used it where appropriate, so there is no use in
9585 trying to generate it now. */
9586 if (double_reg_address_ok && INDEX_REG_CLASS != NO_REGS)
9587 return;
9589 /* To avoid wasting too much time later searching for an index register,
9590 determine the minimum and maximum index register numbers. */
9591 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9593 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i))
9595 if (! last_index_reg)
9596 last_index_reg = i;
9597 first_index_reg = i;
9600 /* If no index register is available, we can quit now. */
9601 if (first_index_reg > last_index_reg)
9602 return;
9604 /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime
9605 information is a bit fuzzy immediately after reload, but it's
9606 still good enough to determine which registers are live at a jump
9607 destination. */
9608 min_labelno = get_first_label_num ();
9609 n_labels = max_label_num () - min_labelno;
9610 label_live = (HARD_REG_SET *) xmalloc (n_labels * sizeof (HARD_REG_SET));
9611 CLEAR_HARD_REG_SET (ever_live_at_start);
9612 for (i = n_basic_blocks - 1; i >= 0; i--)
9614 insn = BLOCK_HEAD (i);
9615 if (GET_CODE (insn) == CODE_LABEL)
9617 HARD_REG_SET live;
9619 REG_SET_TO_HARD_REG_SET (live, BASIC_BLOCK (i)->global_live_at_start);
9620 compute_use_by_pseudos (&live, BASIC_BLOCK (i)->global_live_at_start);
9621 COPY_HARD_REG_SET (LABEL_LIVE (insn), live);
9622 IOR_HARD_REG_SET (ever_live_at_start, live);
9626 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
9627 last_label_ruid = reload_combine_ruid = 0;
9628 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9630 reg_state[i].store_ruid = reload_combine_ruid;
9631 if (fixed_regs[i])
9632 reg_state[i].use_index = -1;
9633 else
9634 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
9637 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
9639 rtx note;
9641 /* We cannot do our optimization across labels. Invalidating all the use
9642 information we have would be costly, so we just note where the label
9643 is and then later disable any optimization that would cross it. */
9644 if (GET_CODE (insn) == CODE_LABEL)
9645 last_label_ruid = reload_combine_ruid;
9646 if (GET_CODE (insn) == BARRIER)
9648 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9649 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
9651 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
9652 continue;
9653 reload_combine_ruid++;
9655 /* Look for (set (REGX) (CONST_INT))
9656 (set (REGX) (PLUS (REGX) (REGY)))
9658 ... (MEM (REGX)) ...
9659 and convert it to
9660 (set (REGZ) (CONST_INT))
9662 ... (MEM (PLUS (REGZ) (REGY)))... .
9664 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
9665 and that we know all uses of REGX before it dies. */
9666 set = single_set (insn);
9667 if (set != NULL_RTX
9668 && GET_CODE (SET_DEST (set)) == REG
9669 && (HARD_REGNO_NREGS (REGNO (SET_DEST (set)),
9670 GET_MODE (SET_DEST (set)))
9671 == 1)
9672 && GET_CODE (SET_SRC (set)) == PLUS
9673 && GET_CODE (XEXP (SET_SRC (set), 1)) == REG
9674 && rtx_equal_p (XEXP (SET_SRC (set), 0), SET_DEST (set))
9675 && last_label_ruid < reg_state[REGNO (SET_DEST (set))].use_ruid)
9677 rtx reg = SET_DEST (set);
9678 rtx plus = SET_SRC (set);
9679 rtx base = XEXP (plus, 1);
9680 rtx prev = prev_nonnote_insn (insn);
9681 rtx prev_set = prev ? single_set (prev) : NULL_RTX;
9682 int regno = REGNO (reg);
9683 rtx const_reg = NULL_RTX;
9684 rtx reg_sum = NULL_RTX;
9686 /* Now, we need an index register.
9687 We'll set index_reg to this index register, const_reg to the
9688 register that is to be loaded with the constant
9689 (denoted as REGZ in the substitution illustration above),
9690 and reg_sum to the register-register that we want to use to
9691 substitute uses of REG (typically in MEMs) with.
9692 First check REG and BASE for being index registers;
9693 we can use them even if they are not dead. */
9694 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno)
9695 || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
9696 REGNO (base)))
9698 const_reg = reg;
9699 reg_sum = plus;
9701 else
9703 /* Otherwise, look for a free index register. Since we have
9704 checked above that neiter REG nor BASE are index registers,
9705 if we find anything at all, it will be different from these
9706 two registers. */
9707 for (i = first_index_reg; i <= last_index_reg; i++)
9709 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i)
9710 && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
9711 && reg_state[i].store_ruid <= reg_state[regno].use_ruid
9712 && HARD_REGNO_NREGS (i, GET_MODE (reg)) == 1)
9714 rtx index_reg = gen_rtx_REG (GET_MODE (reg), i);
9715 const_reg = index_reg;
9716 reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
9717 break;
9721 /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
9722 (REGY), i.e. BASE, is not clobbered before the last use we'll
9723 create. */
9724 if (prev_set
9725 && GET_CODE (SET_SRC (prev_set)) == CONST_INT
9726 && rtx_equal_p (SET_DEST (prev_set), reg)
9727 && reg_state[regno].use_index >= 0
9728 && reg_state[REGNO (base)].store_ruid <= reg_state[regno].use_ruid
9729 && reg_sum)
9731 int i;
9733 /* Change destination register and - if necessary - the
9734 constant value in PREV, the constant loading instruction. */
9735 validate_change (prev, &SET_DEST (prev_set), const_reg, 1);
9736 if (reg_state[regno].offset != const0_rtx)
9737 validate_change (prev,
9738 &SET_SRC (prev_set),
9739 GEN_INT (INTVAL (SET_SRC (prev_set))
9740 + INTVAL (reg_state[regno].offset)),
9742 /* Now for every use of REG that we have recorded, replace REG
9743 with REG_SUM. */
9744 for (i = reg_state[regno].use_index;
9745 i < RELOAD_COMBINE_MAX_USES; i++)
9746 validate_change (reg_state[regno].reg_use[i].insn,
9747 reg_state[regno].reg_use[i].usep,
9748 reg_sum, 1);
9750 if (apply_change_group ())
9752 rtx *np;
9754 /* Delete the reg-reg addition. */
9755 PUT_CODE (insn, NOTE);
9756 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
9757 NOTE_SOURCE_FILE (insn) = 0;
9759 if (reg_state[regno].offset != const0_rtx)
9761 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
9762 are now invalid. */
9763 for (np = &REG_NOTES (prev); *np; )
9765 if (REG_NOTE_KIND (*np) == REG_EQUAL
9766 || REG_NOTE_KIND (*np) == REG_EQUIV)
9767 *np = XEXP (*np, 1);
9768 else
9769 np = &XEXP (*np, 1);
9772 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
9773 reg_state[REGNO (const_reg)].store_ruid = reload_combine_ruid;
9774 continue;
9778 note_stores (PATTERN (insn), reload_combine_note_store, NULL);
9779 if (GET_CODE (insn) == CALL_INSN)
9781 rtx link;
9783 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9785 if (call_used_regs[i])
9787 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
9788 reg_state[i].store_ruid = reload_combine_ruid;
9791 for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
9792 link = XEXP (link, 1))
9794 rtx use = XEXP (link, 0);
9795 int regno = REGNO (XEXP (use, 0));
9796 if (GET_CODE (use) == CLOBBER)
9798 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
9799 reg_state[regno].store_ruid = reload_combine_ruid;
9801 else
9802 reg_state[regno].use_index = -1;
9805 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) != RETURN)
9807 /* Non-spill registers might be used at the call destination in
9808 some unknown fashion, so we have to mark the unknown use. */
9809 HARD_REG_SET *live;
9810 if ((condjump_p (insn) || condjump_in_parallel_p (insn))
9811 && JUMP_LABEL (insn))
9812 live = &LABEL_LIVE (JUMP_LABEL (insn));
9813 else
9814 live = &ever_live_at_start;
9815 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9817 if (TEST_HARD_REG_BIT (*live, i))
9818 reg_state[i].use_index = -1;
9821 reload_combine_note_use (&PATTERN (insn), insn);
9822 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
9824 if (REG_NOTE_KIND (note) == REG_INC
9825 && GET_CODE (XEXP (note, 0)) == REG)
9827 int regno = REGNO (XEXP (note, 0));
9829 reg_state[regno].store_ruid = reload_combine_ruid;
9830 reg_state[regno].use_index = -1;
9834 free (label_live);
9837 /* Check if DST is a register or a subreg of a register; if it is,
9838 update reg_state[regno].store_ruid and reg_state[regno].use_index
9839 accordingly. Called via note_stores from reload_combine. */
9840 static void
9841 reload_combine_note_store (dst, set, data)
9842 rtx dst, set;
9843 void *data ATTRIBUTE_UNUSED;
9845 int regno = 0;
9846 int i;
9847 unsigned size = GET_MODE_SIZE (GET_MODE (dst));
9849 if (GET_CODE (dst) == SUBREG)
9851 regno = SUBREG_WORD (dst);
9852 dst = SUBREG_REG (dst);
9854 if (GET_CODE (dst) != REG)
9855 return;
9856 regno += REGNO (dst);
9858 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
9859 careful with registers / register parts that are not full words.
9861 Similarly for ZERO_EXTRACT and SIGN_EXTRACT. */
9862 if (GET_CODE (set) != SET
9863 || GET_CODE (SET_DEST (set)) == ZERO_EXTRACT
9864 || GET_CODE (SET_DEST (set)) == SIGN_EXTRACT
9865 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART)
9867 for (i = (size - 1) / UNITS_PER_WORD + regno; i >= regno; i--)
9869 reg_state[i].use_index = -1;
9870 reg_state[i].store_ruid = reload_combine_ruid;
9873 else
9875 for (i = (size - 1) / UNITS_PER_WORD + regno; i >= regno; i--)
9877 reg_state[i].store_ruid = reload_combine_ruid;
9878 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
9883 /* XP points to a piece of rtl that has to be checked for any uses of
9884 registers.
9885 *XP is the pattern of INSN, or a part of it.
9886 Called from reload_combine, and recursively by itself. */
9887 static void
9888 reload_combine_note_use (xp, insn)
9889 rtx *xp, insn;
9891 rtx x = *xp;
9892 enum rtx_code code = x->code;
9893 const char *fmt;
9894 int i, j;
9895 rtx offset = const0_rtx; /* For the REG case below. */
9897 switch (code)
9899 case SET:
9900 if (GET_CODE (SET_DEST (x)) == REG)
9902 reload_combine_note_use (&SET_SRC (x), insn);
9903 return;
9905 break;
9907 case CLOBBER:
9908 if (GET_CODE (SET_DEST (x)) == REG)
9909 return;
9910 break;
9912 case PLUS:
9913 /* We are interested in (plus (reg) (const_int)) . */
9914 if (GET_CODE (XEXP (x, 0)) != REG || GET_CODE (XEXP (x, 1)) != CONST_INT)
9915 break;
9916 offset = XEXP (x, 1);
9917 x = XEXP (x, 0);
9918 /* Fall through. */
9919 case REG:
9921 int regno = REGNO (x);
9922 int use_index;
9924 /* Some spurious USEs of pseudo registers might remain.
9925 Just ignore them. */
9926 if (regno >= FIRST_PSEUDO_REGISTER)
9927 return;
9929 /* If this register is already used in some unknown fashion, we
9930 can't do anything.
9931 If we decrement the index from zero to -1, we can't store more
9932 uses, so this register becomes used in an unknown fashion. */
9933 use_index = --reg_state[regno].use_index;
9934 if (use_index < 0)
9935 return;
9937 if (use_index != RELOAD_COMBINE_MAX_USES - 1)
9939 /* We have found another use for a register that is already
9940 used later. Check if the offsets match; if not, mark the
9941 register as used in an unknown fashion. */
9942 if (! rtx_equal_p (offset, reg_state[regno].offset))
9944 reg_state[regno].use_index = -1;
9945 return;
9948 else
9950 /* This is the first use of this register we have seen since we
9951 marked it as dead. */
9952 reg_state[regno].offset = offset;
9953 reg_state[regno].use_ruid = reload_combine_ruid;
9955 reg_state[regno].reg_use[use_index].insn = insn;
9956 reg_state[regno].reg_use[use_index].usep = xp;
9957 return;
9960 default:
9961 break;
9964 /* Recursively process the components of X. */
9965 fmt = GET_RTX_FORMAT (code);
9966 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9968 if (fmt[i] == 'e')
9969 reload_combine_note_use (&XEXP (x, i), insn);
9970 else if (fmt[i] == 'E')
9972 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9973 reload_combine_note_use (&XVECEXP (x, i, j), insn);
9978 /* See if we can reduce the cost of a constant by replacing a move with
9979 an add. */
9980 /* We cannot do our optimization across labels. Invalidating all the
9981 information about register contents we have would be costly, so we
9982 use last_label_luid (local variable of reload_cse_move2add) to note
9983 where the label is and then later disable any optimization that would
9984 cross it.
9985 reg_offset[n] / reg_base_reg[n] / reg_mode[n] are only valid if
9986 reg_set_luid[n] is larger than last_label_luid[n] . */
9987 static int reg_set_luid[FIRST_PSEUDO_REGISTER];
9988 /* reg_offset[n] has to be CONST_INT for it and reg_base_reg[n] /
9989 reg_mode[n] to be valid.
9990 If reg_offset[n] is a CONST_INT and reg_base_reg[n] is negative, register n
9991 has been set to reg_offset[n] in mode reg_mode[n] .
9992 If reg_offset[n] is a CONST_INT and reg_base_reg[n] is non-negative,
9993 register n has been set to the sum of reg_offset[n] and register
9994 reg_base_reg[n], calculated in mode reg_mode[n] . */
9995 static rtx reg_offset[FIRST_PSEUDO_REGISTER];
9996 static int reg_base_reg[FIRST_PSEUDO_REGISTER];
9997 static enum machine_mode reg_mode[FIRST_PSEUDO_REGISTER];
9998 /* move2add_luid is linearily increased while scanning the instructions
9999 from first to last. It is used to set reg_set_luid in
10000 reload_cse_move2add and move2add_note_store. */
10001 static int move2add_luid;
10003 /* Generate a CONST_INT and force it in the range of MODE. */
10004 static rtx
10005 gen_mode_int (mode, value)
10006 enum machine_mode mode;
10007 HOST_WIDE_INT value;
10009 HOST_WIDE_INT cval = value & GET_MODE_MASK (mode);
10010 int width = GET_MODE_BITSIZE (mode);
10012 /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative number,
10013 sign extend it. */
10014 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
10015 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
10016 cval |= (HOST_WIDE_INT) -1 << width;
10018 return GEN_INT (cval);
10021 static void
10022 reload_cse_move2add (first)
10023 rtx first;
10025 int i;
10026 rtx insn;
10027 int last_label_luid;
10029 for (i = FIRST_PSEUDO_REGISTER-1; i >= 0; i--)
10030 reg_set_luid[i] = 0;
10032 last_label_luid = 0;
10033 move2add_luid = 1;
10034 for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++)
10036 rtx pat, note;
10038 if (GET_CODE (insn) == CODE_LABEL)
10039 last_label_luid = move2add_luid;
10040 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
10041 continue;
10042 pat = PATTERN (insn);
10043 /* For simplicity, we only perform this optimization on
10044 straightforward SETs. */
10045 if (GET_CODE (pat) == SET
10046 && GET_CODE (SET_DEST (pat)) == REG)
10048 rtx reg = SET_DEST (pat);
10049 int regno = REGNO (reg);
10050 rtx src = SET_SRC (pat);
10052 /* Check if we have valid information on the contents of this
10053 register in the mode of REG. */
10054 /* ??? We don't know how zero / sign extension is handled, hence
10055 we can't go from a narrower to a wider mode. */
10056 if (reg_set_luid[regno] > last_label_luid
10057 && (GET_MODE_SIZE (GET_MODE (reg))
10058 <= GET_MODE_SIZE (reg_mode[regno]))
10059 && GET_CODE (reg_offset[regno]) == CONST_INT)
10061 /* Try to transform (set (REGX) (CONST_INT A))
10063 (set (REGX) (CONST_INT B))
10065 (set (REGX) (CONST_INT A))
10067 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
10069 if (GET_CODE (src) == CONST_INT && reg_base_reg[regno] < 0)
10071 int success = 0;
10072 rtx new_src
10073 = gen_mode_int (GET_MODE (reg),
10074 INTVAL (src) - INTVAL (reg_offset[regno]));
10075 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
10076 use (set (reg) (reg)) instead.
10077 We don't delete this insn, nor do we convert it into a
10078 note, to avoid losing register notes or the return
10079 value flag. jump2 already knowns how to get rid of
10080 no-op moves. */
10081 if (new_src == const0_rtx)
10082 success = validate_change (insn, &SET_SRC (pat), reg, 0);
10083 else if (rtx_cost (new_src, PLUS) < rtx_cost (src, SET)
10084 && have_add2_insn (GET_MODE (reg)))
10085 success = validate_change (insn, &PATTERN (insn),
10086 gen_add2_insn (reg, new_src), 0);
10087 reg_set_luid[regno] = move2add_luid;
10088 reg_mode[regno] = GET_MODE (reg);
10089 reg_offset[regno] = src;
10090 continue;
10093 /* Try to transform (set (REGX) (REGY))
10094 (set (REGX) (PLUS (REGX) (CONST_INT A)))
10096 (set (REGX) (REGY))
10097 (set (REGX) (PLUS (REGX) (CONST_INT B)))
10099 (REGX) (REGY))
10100 (set (REGX) (PLUS (REGX) (CONST_INT A)))
10102 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
10103 else if (GET_CODE (src) == REG
10104 && reg_base_reg[regno] == REGNO (src)
10105 && reg_set_luid[regno] > reg_set_luid[REGNO (src)])
10107 rtx next = next_nonnote_insn (insn);
10108 rtx set = NULL_RTX;
10109 if (next)
10110 set = single_set (next);
10111 if (next
10112 && set
10113 && SET_DEST (set) == reg
10114 && GET_CODE (SET_SRC (set)) == PLUS
10115 && XEXP (SET_SRC (set), 0) == reg
10116 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
10118 rtx src3 = XEXP (SET_SRC (set), 1);
10119 rtx new_src
10120 = gen_mode_int (GET_MODE (reg),
10121 INTVAL (src3)
10122 - INTVAL (reg_offset[regno]));
10123 int success = 0;
10125 if (new_src == const0_rtx)
10126 /* See above why we create (set (reg) (reg)) here. */
10127 success
10128 = validate_change (next, &SET_SRC (set), reg, 0);
10129 else if ((rtx_cost (new_src, PLUS)
10130 < 2 + rtx_cost (src3, SET))
10131 && have_add2_insn (GET_MODE (reg)))
10132 success
10133 = validate_change (next, &PATTERN (next),
10134 gen_add2_insn (reg, new_src), 0);
10135 if (success)
10137 /* INSN might be the first insn in a basic block
10138 if the preceding insn is a conditional jump
10139 or a possible-throwing call. */
10140 PUT_CODE (insn, NOTE);
10141 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
10142 NOTE_SOURCE_FILE (insn) = 0;
10144 insn = next;
10145 reg_set_luid[regno] = move2add_luid;
10146 reg_mode[regno] = GET_MODE (reg);
10147 reg_offset[regno] = src3;
10148 continue;
10154 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
10156 if (REG_NOTE_KIND (note) == REG_INC
10157 && GET_CODE (XEXP (note, 0)) == REG)
10159 /* Indicate that this register has been recently written to,
10160 but the exact contents are not available. */
10161 int regno = REGNO (XEXP (note, 0));
10162 if (regno < FIRST_PSEUDO_REGISTER)
10164 reg_set_luid[regno] = move2add_luid;
10165 reg_offset[regno] = note;
10169 note_stores (PATTERN (insn), move2add_note_store, NULL);
10170 /* If this is a CALL_INSN, all call used registers are stored with
10171 unknown values. */
10172 if (GET_CODE (insn) == CALL_INSN)
10174 for (i = FIRST_PSEUDO_REGISTER-1; i >= 0; i--)
10176 if (call_used_regs[i])
10178 reg_set_luid[i] = move2add_luid;
10179 reg_offset[i] = insn; /* Invalidate contents. */
10186 /* SET is a SET or CLOBBER that sets DST.
10187 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
10188 Called from reload_cse_move2add via note_stores. */
10189 static void
10190 move2add_note_store (dst, set, data)
10191 rtx dst, set;
10192 void *data ATTRIBUTE_UNUSED;
10194 int regno = 0;
10195 int i;
10197 enum machine_mode mode = GET_MODE (dst);
10198 if (GET_CODE (dst) == SUBREG)
10200 regno = SUBREG_WORD (dst);
10201 dst = SUBREG_REG (dst);
10203 if (GET_CODE (dst) != REG)
10204 return;
10206 regno += REGNO (dst);
10208 if (HARD_REGNO_NREGS (regno, mode) == 1 && GET_CODE (set) == SET
10209 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
10210 && GET_CODE (SET_DEST (set)) != SIGN_EXTRACT
10211 && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
10213 rtx src = SET_SRC (set);
10215 reg_mode[regno] = mode;
10216 switch (GET_CODE (src))
10218 case PLUS:
10220 rtx src0 = XEXP (src, 0);
10221 if (GET_CODE (src0) == REG)
10223 if (REGNO (src0) != regno
10224 || reg_offset[regno] != const0_rtx)
10226 reg_base_reg[regno] = REGNO (src0);
10227 reg_set_luid[regno] = move2add_luid;
10229 reg_offset[regno] = XEXP (src, 1);
10230 break;
10232 reg_set_luid[regno] = move2add_luid;
10233 reg_offset[regno] = set; /* Invalidate contents. */
10234 break;
10237 case REG:
10238 reg_base_reg[regno] = REGNO (SET_SRC (set));
10239 reg_offset[regno] = const0_rtx;
10240 reg_set_luid[regno] = move2add_luid;
10241 break;
10243 default:
10244 reg_base_reg[regno] = -1;
10245 reg_offset[regno] = SET_SRC (set);
10246 reg_set_luid[regno] = move2add_luid;
10247 break;
10250 else
10252 for (i = regno + HARD_REGNO_NREGS (regno, mode) - 1; i >= regno; i--)
10254 /* Indicate that this register has been recently written to,
10255 but the exact contents are not available. */
10256 reg_set_luid[i] = move2add_luid;
10257 reg_offset[i] = dst;
10262 #ifdef AUTO_INC_DEC
10263 static void
10264 add_auto_inc_notes (insn, x)
10265 rtx insn;
10266 rtx x;
10268 enum rtx_code code = GET_CODE (x);
10269 const char *fmt;
10270 int i, j;
10272 if (code == MEM && auto_inc_p (XEXP (x, 0)))
10274 REG_NOTES (insn)
10275 = gen_rtx_EXPR_LIST (REG_INC, XEXP (XEXP (x, 0), 0), REG_NOTES (insn));
10276 return;
10279 /* Scan all the operand sub-expressions. */
10280 fmt = GET_RTX_FORMAT (code);
10281 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10283 if (fmt[i] == 'e')
10284 add_auto_inc_notes (insn, XEXP (x, i));
10285 else if (fmt[i] == 'E')
10286 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10287 add_auto_inc_notes (insn, XVECEXP (x, i, j));
10290 #endif