H
[official-gcc.git] / gcc / reload1.c
blob36fb75b4ca8282cfe824f9c241b6fa6dbf8169d4
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
25 #include "machmode.h"
26 #include "hard-reg-set.h"
27 #include "rtl.h"
28 #include "obstack.h"
29 #include "insn-config.h"
30 #include "insn-flags.h"
31 #include "insn-codes.h"
32 #include "flags.h"
33 #include "expr.h"
34 #include "regs.h"
35 #include "basic-block.h"
36 #include "reload.h"
37 #include "recog.h"
38 #include "output.h"
39 #include "real.h"
40 #include "toplev.h"
42 /* This file contains the reload pass of the compiler, which is
43 run after register allocation has been done. It checks that
44 each insn is valid (operands required to be in registers really
45 are in registers of the proper class) and fixes up invalid ones
46 by copying values temporarily into registers for the insns
47 that need them.
49 The results of register allocation are described by the vector
50 reg_renumber; the insns still contain pseudo regs, but reg_renumber
51 can be used to find which hard reg, if any, a pseudo reg is in.
53 The technique we always use is to free up a few hard regs that are
54 called ``reload regs'', and for each place where a pseudo reg
55 must be in a hard reg, copy it temporarily into one of the reload regs.
57 All the pseudos that were formerly allocated to the hard regs that
58 are now in use as reload regs must be ``spilled''. This means
59 that they go to other hard regs, or to stack slots if no other
60 available hard regs can be found. Spilling can invalidate more
61 insns, requiring additional need for reloads, so we must keep checking
62 until the process stabilizes.
64 For machines with different classes of registers, we must keep track
65 of the register class needed for each reload, and make sure that
66 we allocate enough reload registers of each class.
68 The file reload.c contains the code that checks one insn for
69 validity and reports the reloads that it needs. This file
70 is in charge of scanning the entire rtl code, accumulating the
71 reload needs, spilling, assigning reload registers to use for
72 fixing up each insn, and generating the new insns to copy values
73 into the reload registers. */
76 #ifndef REGISTER_MOVE_COST
77 #define REGISTER_MOVE_COST(x, y) 2
78 #endif
80 /* During reload_as_needed, element N contains a REG rtx for the hard reg
81 into which reg N has been reloaded (perhaps for a previous insn). */
82 static rtx *reg_last_reload_reg;
84 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
85 for an output reload that stores into reg N. */
86 static char *reg_has_output_reload;
88 /* Indicates which hard regs are reload-registers for an output reload
89 in the current insn. */
90 static HARD_REG_SET reg_is_output_reload;
92 /* Element N is the constant value to which pseudo reg N is equivalent,
93 or zero if pseudo reg N is not equivalent to a constant.
94 find_reloads looks at this in order to replace pseudo reg N
95 with the constant it stands for. */
96 rtx *reg_equiv_constant;
98 /* Element N is a memory location to which pseudo reg N is equivalent,
99 prior to any register elimination (such as frame pointer to stack
100 pointer). Depending on whether or not it is a valid address, this value
101 is transferred to either reg_equiv_address or reg_equiv_mem. */
102 rtx *reg_equiv_memory_loc;
104 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
105 This is used when the address is not valid as a memory address
106 (because its displacement is too big for the machine.) */
107 rtx *reg_equiv_address;
109 /* Element N is the memory slot to which pseudo reg N is equivalent,
110 or zero if pseudo reg N is not equivalent to a memory slot. */
111 rtx *reg_equiv_mem;
113 /* Widest width in which each pseudo reg is referred to (via subreg). */
114 static int *reg_max_ref_width;
116 /* Element N is the insn that initialized reg N from its equivalent
117 constant or memory slot. */
118 static rtx *reg_equiv_init;
120 /* During reload_as_needed, element N contains the last pseudo regno reloaded
121 into hard register N. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
126 /* During reload_as_needed, element N contains the insn for which
127 hard register N was last used. Its contents are significant only
128 when reg_reloaded_valid is set for this register. */
129 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
131 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid */
132 static HARD_REG_SET reg_reloaded_valid;
133 /* Indicate if the register was dead at the end of the reload.
134 This is only valid if reg_reloaded_contents is set and valid. */
135 static HARD_REG_SET reg_reloaded_dead;
137 /* Number of spill-regs so far; number of valid elements of spill_regs. */
138 static int n_spills;
140 /* In parallel with spill_regs, contains REG rtx's for those regs.
141 Holds the last rtx used for any given reg, or 0 if it has never
142 been used for spilling yet. This rtx is reused, provided it has
143 the proper mode. */
144 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
146 /* In parallel with spill_regs, contains nonzero for a spill reg
147 that was stored after the last time it was used.
148 The precise value is the insn generated to do the store. */
149 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
151 /* This table is the inverse mapping of spill_regs:
152 indexed by hard reg number,
153 it contains the position of that reg in spill_regs,
154 or -1 for something that is not in spill_regs. */
155 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
157 /* This reg set indicates registers that may not be used for retrying global
158 allocation. The registers that may not be used include all spill registers
159 and the frame pointer (if we are using one). */
160 HARD_REG_SET forbidden_regs;
162 /* This reg set indicates registers that are not good for spill registers.
163 They will not be used to complete groups of spill registers. This includes
164 all fixed registers, registers that may be eliminated, and, if
165 SMALL_REGISTER_CLASSES is zero, registers explicitly used in the rtl.
167 (spill_reg_order prevents these registers from being used to start a
168 group.) */
169 static HARD_REG_SET bad_spill_regs;
171 /* Describes order of use of registers for reloading
172 of spilled pseudo-registers. `spills' is the number of
173 elements that are actually valid; new ones are added at the end. */
174 static short spill_regs[FIRST_PSEUDO_REGISTER];
176 /* This reg set indicates those registers that have been used a spill
177 registers. This information is used in reorg.c, to help figure out
178 what registers are live at any point. It is assumed that all spill_regs
179 are dead at every CODE_LABEL. */
180 HARD_REG_SET used_spill_regs;
182 /* Index of last register assigned as a spill register. We allocate in
183 a round-robin fashion. */
184 static int last_spill_reg;
186 /* Describes order of preference for putting regs into spill_regs.
187 Contains the numbers of all the hard regs, in order most preferred first.
188 This order is different for each function.
189 It is set up by order_regs_for_reload.
190 Empty elements at the end contain -1. */
191 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
193 /* 1 for a hard register that appears explicitly in the rtl
194 (for example, function value registers, special registers
195 used by insns, structure value pointer registers). */
196 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
198 /* Indicates if a register was counted against the need for
199 groups. 0 means it can count against max_nongroup instead. */
200 static HARD_REG_SET counted_for_groups;
202 /* Indicates if a register was counted against the need for
203 non-groups. 0 means it can become part of a new group.
204 During choose_reload_regs, 1 here means don't use this reg
205 as part of a group, even if it seems to be otherwise ok. */
206 static HARD_REG_SET counted_for_nongroups;
208 /* Nonzero if indirect addressing is supported on the machine; this means
209 that spilling (REG n) does not require reloading it into a register in
210 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
211 value indicates the level of indirect addressing supported, e.g., two
212 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
213 a hard register. */
214 static char spill_indirect_levels;
216 /* Nonzero if indirect addressing is supported when the innermost MEM is
217 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
218 which these are valid is the same as spill_indirect_levels, above. */
219 char indirect_symref_ok;
221 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
222 char double_reg_address_ok;
224 /* Record the stack slot for each spilled hard register. */
225 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
227 /* Width allocated so far for that stack slot. */
228 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
230 /* Record which pseudos needed to be spilled. */
231 static regset spilled_pseudos;
233 /* Indexed by register class and basic block number, nonzero if there is
234 any need for a spill register of that class in that basic block.
235 The pointer is 0 if we did stupid allocation and don't know
236 the structure of basic blocks. */
237 char *basic_block_needs[N_REG_CLASSES];
239 /* First uid used by insns created by reload in this function.
240 Used in find_equiv_reg. */
241 int reload_first_uid;
243 /* Flag set by local-alloc or global-alloc if anything is live in
244 a call-clobbered reg across calls. */
245 int caller_save_needed;
247 /* The register class to use for a base register when reloading an
248 address. This is normally BASE_REG_CLASS, but it may be different
249 when using SMALL_REGISTER_CLASSES and passing parameters in
250 registers. */
251 enum reg_class reload_address_base_reg_class;
253 /* The register class to use for an index register when reloading an
254 address. This is normally INDEX_REG_CLASS, but it may be different
255 when using SMALL_REGISTER_CLASSES and passing parameters in
256 registers. */
257 enum reg_class reload_address_index_reg_class;
259 /* Set to 1 while reload_as_needed is operating.
260 Required by some machines to handle any generated moves differently. */
262 int reload_in_progress = 0;
264 /* These arrays record the insn_code of insns that may be needed to
265 perform input and output reloads of special objects. They provide a
266 place to pass a scratch register. */
268 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
269 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
271 /* This obstack is used for allocation of rtl during register elimination.
272 The allocated storage can be freed once find_reloads has processed the
273 insn. */
275 struct obstack reload_obstack;
277 /* Points to the beginning of the reload_obstack. All insn_chain structures
278 are allocated first. */
279 char *reload_startobj;
281 /* The point after all insn_chain structures. Used to quickly deallocate
282 memory used while processing one insn. */
283 char *reload_firstobj;
285 #define obstack_chunk_alloc xmalloc
286 #define obstack_chunk_free free
288 /* List of labels that must never be deleted. */
289 extern rtx forced_labels;
291 /* List of insn_chain instructions, one for every insn that reload needs to
292 examine. */
293 struct insn_chain *reload_insn_chain;
295 /* List of insns needing reloads. */
296 static struct insn_chain *insns_need_reload;
298 /* This structure is used to record information about register eliminations.
299 Each array entry describes one possible way of eliminating a register
300 in favor of another. If there is more than one way of eliminating a
301 particular register, the most preferred should be specified first. */
303 static struct elim_table
305 int from; /* Register number to be eliminated. */
306 int to; /* Register number used as replacement. */
307 int initial_offset; /* Initial difference between values. */
308 int can_eliminate; /* Non-zero if this elimination can be done. */
309 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
310 insns made by reload. */
311 int offset; /* Current offset between the two regs. */
312 int max_offset; /* Maximum offset between the two regs. */
313 int previous_offset; /* Offset at end of previous insn. */
314 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
315 rtx from_rtx; /* REG rtx for the register to be eliminated.
316 We cannot simply compare the number since
317 we might then spuriously replace a hard
318 register corresponding to a pseudo
319 assigned to the reg to be eliminated. */
320 rtx to_rtx; /* REG rtx for the replacement. */
321 } reg_eliminate[] =
323 /* If a set of eliminable registers was specified, define the table from it.
324 Otherwise, default to the normal case of the frame pointer being
325 replaced by the stack pointer. */
327 #ifdef ELIMINABLE_REGS
328 ELIMINABLE_REGS;
329 #else
330 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
331 #endif
333 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
335 /* Record the number of pending eliminations that have an offset not equal
336 to their initial offset. If non-zero, we use a new copy of each
337 replacement result in any insns encountered. */
338 static int num_not_at_initial_offset;
340 /* Count the number of registers that we may be able to eliminate. */
341 static int num_eliminable;
343 /* For each label, we record the offset of each elimination. If we reach
344 a label by more than one path and an offset differs, we cannot do the
345 elimination. This information is indexed by the number of the label.
346 The first table is an array of flags that records whether we have yet
347 encountered a label and the second table is an array of arrays, one
348 entry in the latter array for each elimination. */
350 static char *offsets_known_at;
351 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
353 /* Number of labels in the current function. */
355 static int num_labels;
357 struct hard_reg_n_uses { int regno; int uses; };
359 static void dump_needs PROTO((FILE *));
360 static void maybe_fix_stack_asms PROTO((void));
361 static int calculate_needs_all_insns PROTO((int));
362 static int calculate_needs PROTO((struct insn_chain *, rtx, int));
363 static int find_reload_regs PROTO((int, FILE *));
364 static int find_tworeg_group PROTO((int, int, FILE *));
365 static int find_group PROTO((int, int, FILE *));
366 static int possible_group_p PROTO((int, int *));
367 static void count_possible_groups PROTO((int *, enum machine_mode *,
368 int *, int));
369 static int modes_equiv_for_class_p PROTO((enum machine_mode,
370 enum machine_mode,
371 enum reg_class));
372 static void delete_caller_save_insns PROTO((void));
373 static void spill_failure PROTO((rtx));
374 static int new_spill_reg PROTO((int, int, int *, int *, int,
375 FILE *));
376 static void delete_dead_insn PROTO((rtx));
377 static void alter_reg PROTO((int, int));
378 static void set_label_offsets PROTO((rtx, rtx, int));
379 static int eliminate_regs_in_insn PROTO((rtx, int));
380 static void mark_not_eliminable PROTO((rtx, rtx));
381 static void set_initial_elim_offsets PROTO((void));
382 static void init_elim_table PROTO((void));
383 static void update_eliminables PROTO((HARD_REG_SET *));
384 static int spill_hard_reg PROTO((int, int, FILE *, int));
385 static void finish_spills PROTO((int, FILE *));
386 static void scan_paradoxical_subregs PROTO((rtx));
387 static int hard_reg_use_compare PROTO((const GENERIC_PTR, const GENERIC_PTR));
388 static void order_regs_for_reload PROTO((void));
389 static int compare_spill_regs PROTO((const GENERIC_PTR, const GENERIC_PTR));
390 static void reload_as_needed PROTO((int));
391 static void forget_old_reloads_1 PROTO((rtx, rtx));
392 static int reload_reg_class_lower PROTO((const GENERIC_PTR, const GENERIC_PTR));
393 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
394 enum machine_mode));
395 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
396 enum machine_mode));
397 static int reload_reg_free_p PROTO((int, int, enum reload_type));
398 static int reload_reg_free_before_p PROTO((int, int, enum reload_type, int));
399 static int reload_reg_free_for_value_p PROTO((int, int, enum reload_type, rtx, rtx, int));
400 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
401 static int allocate_reload_reg PROTO((struct insn_chain *, int, int, int));
402 static void choose_reload_regs PROTO((struct insn_chain *, rtx));
403 static void merge_assigned_reloads PROTO((rtx));
404 static void emit_reload_insns PROTO((struct insn_chain *));
405 static void delete_output_reload PROTO((rtx, int, rtx));
406 static void inc_for_reload PROTO((rtx, rtx, int));
407 static int constraint_accepts_reg_p PROTO((char *, rtx));
408 static void reload_cse_regs_1 PROTO((rtx));
409 static void reload_cse_invalidate_regno PROTO((int, enum machine_mode, int));
410 static int reload_cse_mem_conflict_p PROTO((rtx, rtx));
411 static void reload_cse_invalidate_mem PROTO((rtx));
412 static void reload_cse_invalidate_rtx PROTO((rtx, rtx));
413 static int reload_cse_regno_equal_p PROTO((int, rtx, enum machine_mode));
414 static int reload_cse_noop_set_p PROTO((rtx, rtx));
415 static int reload_cse_simplify_set PROTO((rtx, rtx));
416 static int reload_cse_simplify_operands PROTO((rtx));
417 static void reload_cse_check_clobber PROTO((rtx, rtx));
418 static void reload_cse_record_set PROTO((rtx, rtx));
419 static void reload_cse_delete_death_notes PROTO((rtx));
420 static void reload_cse_no_longer_dead PROTO((int, enum machine_mode));
421 static void reload_combine PROTO((void));
422 static void reload_combine_note_use PROTO((rtx *, rtx));
423 static void reload_combine_note_store PROTO((rtx, rtx));
424 static void reload_cse_move2add PROTO((rtx));
425 static void move2add_note_store PROTO((rtx, rtx));
427 /* Initialize the reload pass once per compilation. */
429 void
430 init_reload ()
432 register int i;
434 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
435 Set spill_indirect_levels to the number of levels such addressing is
436 permitted, zero if it is not permitted at all. */
438 register rtx tem
439 = gen_rtx_MEM (Pmode,
440 gen_rtx_PLUS (Pmode,
441 gen_rtx_REG (Pmode, LAST_VIRTUAL_REGISTER + 1),
442 GEN_INT (4)));
443 spill_indirect_levels = 0;
445 while (memory_address_p (QImode, tem))
447 spill_indirect_levels++;
448 tem = gen_rtx_MEM (Pmode, tem);
451 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
453 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
454 indirect_symref_ok = memory_address_p (QImode, tem);
456 /* See if reg+reg is a valid (and offsettable) address. */
458 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
460 tem = gen_rtx_PLUS (Pmode,
461 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
462 gen_rtx_REG (Pmode, i));
463 /* This way, we make sure that reg+reg is an offsettable address. */
464 tem = plus_constant (tem, 4);
466 if (memory_address_p (QImode, tem))
468 double_reg_address_ok = 1;
469 break;
473 /* Initialize obstack for our rtl allocation. */
474 gcc_obstack_init (&reload_obstack);
475 reload_startobj = (char *) obstack_alloc (&reload_obstack, 0);
477 /* Decide which register class should be used when reloading
478 addresses. If we are using SMALL_REGISTER_CLASSES, and any
479 parameters are passed in registers, then we do not want to use
480 those registers when reloading an address. Otherwise, if a
481 function argument needs a reload, we may wind up clobbering
482 another argument to the function which was already computed. If
483 we find a subset class which simply avoids those registers, we
484 use it instead. ??? It would be better to only use the
485 restricted class when we actually are loading function arguments,
486 but that is hard to determine. */
487 reload_address_base_reg_class = BASE_REG_CLASS;
488 reload_address_index_reg_class = INDEX_REG_CLASS;
489 if (SMALL_REGISTER_CLASSES)
491 int regno;
492 HARD_REG_SET base, index;
493 enum reg_class *p;
495 COPY_HARD_REG_SET (base, reg_class_contents[BASE_REG_CLASS]);
496 COPY_HARD_REG_SET (index, reg_class_contents[INDEX_REG_CLASS]);
497 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
499 if (FUNCTION_ARG_REGNO_P (regno))
501 CLEAR_HARD_REG_BIT (base, regno);
502 CLEAR_HARD_REG_BIT (index, regno);
506 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[BASE_REG_CLASS],
507 baseok);
508 for (p = reg_class_subclasses[BASE_REG_CLASS];
509 *p != LIM_REG_CLASSES;
510 p++)
512 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[*p], usebase);
513 continue;
514 usebase:
515 reload_address_base_reg_class = *p;
516 break;
518 baseok:;
520 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[INDEX_REG_CLASS],
521 indexok);
522 for (p = reg_class_subclasses[INDEX_REG_CLASS];
523 *p != LIM_REG_CLASSES;
524 p++)
526 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[*p], useindex);
527 continue;
528 useindex:
529 reload_address_index_reg_class = *p;
530 break;
532 indexok:;
536 /* List of insn chains that are currently unused. */
537 static struct insn_chain *unused_insn_chains = 0;
539 /* Allocate an empty insn_chain structure. */
540 struct insn_chain *
541 new_insn_chain ()
543 struct insn_chain *c;
545 if (unused_insn_chains == 0)
547 c = obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
548 c->live_before = OBSTACK_ALLOC_REG_SET (&reload_obstack);
549 c->live_after = OBSTACK_ALLOC_REG_SET (&reload_obstack);
551 else
553 c = unused_insn_chains;
554 unused_insn_chains = c->next;
556 c->is_caller_save_insn = 0;
557 c->need_reload = 0;
558 c->need_elim = 0;
559 return c;
562 /* Small utility function to set all regs in hard reg set TO which are
563 allocated to pseudos in regset FROM. */
564 void
565 compute_use_by_pseudos (to, from)
566 HARD_REG_SET *to;
567 regset from;
569 int regno;
570 EXECUTE_IF_SET_IN_REG_SET
571 (from, FIRST_PSEUDO_REGISTER, regno,
573 int r = reg_renumber[regno];
574 int nregs;
575 if (r < 0)
576 abort ();
577 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (regno));
578 while (nregs-- > 0)
579 SET_HARD_REG_BIT (*to, r + nregs);
583 /* Global variables used by reload and its subroutines. */
585 /* Set during calculate_needs if an insn needs register elimination. */
586 static int something_needs_elimination;
588 /* For each class, number of reload regs needed in that class.
589 This is the maximum over all insns of the needs in that class
590 of the individual insn. */
591 static int max_needs[N_REG_CLASSES];
593 /* For each class, size of group of consecutive regs
594 that is needed for the reloads of this class. */
595 static int group_size[N_REG_CLASSES];
597 /* For each class, max number of consecutive groups needed.
598 (Each group contains group_size[CLASS] consecutive registers.) */
599 static int max_groups[N_REG_CLASSES];
601 /* For each class, max number needed of regs that don't belong
602 to any of the groups. */
603 static int max_nongroups[N_REG_CLASSES];
605 /* For each class, the machine mode which requires consecutive
606 groups of regs of that class.
607 If two different modes ever require groups of one class,
608 they must be the same size and equally restrictive for that class,
609 otherwise we can't handle the complexity. */
610 static enum machine_mode group_mode[N_REG_CLASSES];
612 /* Record the insn where each maximum need is first found. */
613 static rtx max_needs_insn[N_REG_CLASSES];
614 static rtx max_groups_insn[N_REG_CLASSES];
615 static rtx max_nongroups_insn[N_REG_CLASSES];
617 /* Nonzero means we couldn't get enough spill regs. */
618 static int failure;
620 /* Main entry point for the reload pass.
622 FIRST is the first insn of the function being compiled.
624 GLOBAL nonzero means we were called from global_alloc
625 and should attempt to reallocate any pseudoregs that we
626 displace from hard regs we will use for reloads.
627 If GLOBAL is zero, we do not have enough information to do that,
628 so any pseudo reg that is spilled must go to the stack.
630 DUMPFILE is the global-reg debugging dump file stream, or 0.
631 If it is nonzero, messages are written to it to describe
632 which registers are seized as reload regs, which pseudo regs
633 are spilled from them, and where the pseudo regs are reallocated to.
635 Return value is nonzero if reload failed
636 and we must not do any more for this function. */
639 reload (first, global, dumpfile)
640 rtx first;
641 int global;
642 FILE *dumpfile;
644 register int i, j;
645 register rtx insn;
646 register struct elim_table *ep;
648 /* The two pointers used to track the true location of the memory used
649 for label offsets. */
650 char *real_known_ptr = NULL_PTR;
651 int (*real_at_ptr)[NUM_ELIMINABLE_REGS];
653 int something_changed;
655 /* Make sure even insns with volatile mem refs are recognizable. */
656 init_recog ();
658 failure = 0;
660 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
662 /* Make sure that the last insn in the chain
663 is not something that needs reloading. */
664 emit_note (NULL_PTR, NOTE_INSN_DELETED);
666 /* Enable find_equiv_reg to distinguish insns made by reload. */
667 reload_first_uid = get_max_uid ();
669 for (i = 0; i < N_REG_CLASSES; i++)
670 basic_block_needs[i] = 0;
672 #ifdef SECONDARY_MEMORY_NEEDED
673 /* Initialize the secondary memory table. */
674 clear_secondary_mem ();
675 #endif
677 /* Remember which hard regs appear explicitly
678 before we merge into `regs_ever_live' the ones in which
679 pseudo regs have been allocated. */
680 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
682 /* We don't have a stack slot for any spill reg yet. */
683 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
684 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
686 /* Initialize the save area information for caller-save, in case some
687 are needed. */
688 init_save_areas ();
690 /* Compute which hard registers are now in use
691 as homes for pseudo registers.
692 This is done here rather than (eg) in global_alloc
693 because this point is reached even if not optimizing. */
694 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
695 mark_home_live (i);
697 /* A function that receives a nonlocal goto must save all call-saved
698 registers. */
699 if (current_function_has_nonlocal_label)
700 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
702 if (! call_used_regs[i] && ! fixed_regs[i])
703 regs_ever_live[i] = 1;
706 /* Find all the pseudo registers that didn't get hard regs
707 but do have known equivalent constants or memory slots.
708 These include parameters (known equivalent to parameter slots)
709 and cse'd or loop-moved constant memory addresses.
711 Record constant equivalents in reg_equiv_constant
712 so they will be substituted by find_reloads.
713 Record memory equivalents in reg_mem_equiv so they can
714 be substituted eventually by altering the REG-rtx's. */
716 reg_equiv_constant = (rtx *) xmalloc (max_regno * sizeof (rtx));
717 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
718 reg_equiv_memory_loc = (rtx *) xmalloc (max_regno * sizeof (rtx));
719 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
720 reg_equiv_mem = (rtx *) xmalloc (max_regno * sizeof (rtx));
721 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
722 reg_equiv_init = (rtx *) xmalloc (max_regno * sizeof (rtx));
723 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
724 reg_equiv_address = (rtx *) xmalloc (max_regno * sizeof (rtx));
725 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
726 reg_max_ref_width = (int *) xmalloc (max_regno * sizeof (int));
727 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
729 if (SMALL_REGISTER_CLASSES)
730 CLEAR_HARD_REG_SET (forbidden_regs);
732 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
733 Also find all paradoxical subregs and find largest such for each pseudo.
734 On machines with small register classes, record hard registers that
735 are used for user variables. These can never be used for spills.
736 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
737 caller-saved registers must be marked live. */
739 for (insn = first; insn; insn = NEXT_INSN (insn))
741 rtx set = single_set (insn);
743 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
744 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
745 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
746 if (! call_used_regs[i])
747 regs_ever_live[i] = 1;
749 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
751 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
752 if (note
753 #ifdef LEGITIMATE_PIC_OPERAND_P
754 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
755 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
756 #endif
759 rtx x = XEXP (note, 0);
760 i = REGNO (SET_DEST (set));
761 if (i > LAST_VIRTUAL_REGISTER)
763 if (GET_CODE (x) == MEM)
765 /* If the operand is a PLUS, the MEM may be shared,
766 so make sure we have an unshared copy here. */
767 if (GET_CODE (XEXP (x, 0)) == PLUS)
768 x = copy_rtx (x);
770 reg_equiv_memory_loc[i] = x;
772 else if (CONSTANT_P (x))
774 if (LEGITIMATE_CONSTANT_P (x))
775 reg_equiv_constant[i] = x;
776 else
777 reg_equiv_memory_loc[i]
778 = force_const_mem (GET_MODE (SET_DEST (set)), x);
780 else
781 continue;
783 /* If this register is being made equivalent to a MEM
784 and the MEM is not SET_SRC, the equivalencing insn
785 is one with the MEM as a SET_DEST and it occurs later.
786 So don't mark this insn now. */
787 if (GET_CODE (x) != MEM
788 || rtx_equal_p (SET_SRC (set), x))
789 reg_equiv_init[i] = insn;
794 /* If this insn is setting a MEM from a register equivalent to it,
795 this is the equivalencing insn. */
796 else if (set && GET_CODE (SET_DEST (set)) == MEM
797 && GET_CODE (SET_SRC (set)) == REG
798 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
799 && rtx_equal_p (SET_DEST (set),
800 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
801 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
803 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
804 scan_paradoxical_subregs (PATTERN (insn));
807 init_elim_table ();
809 num_labels = max_label_num () - get_first_label_num ();
811 /* Allocate the tables used to store offset information at labels. */
812 /* We used to use alloca here, but the size of what it would try to
813 allocate would occasionally cause it to exceed the stack limit and
814 cause a core dump. */
815 real_known_ptr = xmalloc (num_labels);
816 real_at_ptr
817 = (int (*)[NUM_ELIMINABLE_REGS])
818 xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
820 offsets_known_at = real_known_ptr - get_first_label_num ();
821 offsets_at
822 = (int (*)[NUM_ELIMINABLE_REGS]) (real_at_ptr - get_first_label_num ());
824 /* Alter each pseudo-reg rtx to contain its hard reg number.
825 Assign stack slots to the pseudos that lack hard regs or equivalents.
826 Do not touch virtual registers. */
828 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
829 alter_reg (i, -1);
831 /* If we have some registers we think can be eliminated, scan all insns to
832 see if there is an insn that sets one of these registers to something
833 other than itself plus a constant. If so, the register cannot be
834 eliminated. Doing this scan here eliminates an extra pass through the
835 main reload loop in the most common case where register elimination
836 cannot be done. */
837 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
838 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
839 || GET_CODE (insn) == CALL_INSN)
840 note_stores (PATTERN (insn), mark_not_eliminable);
842 #ifndef REGISTER_CONSTRAINTS
843 /* If all the pseudo regs have hard regs,
844 except for those that are never referenced,
845 we know that no reloads are needed. */
846 /* But that is not true if there are register constraints, since
847 in that case some pseudos might be in the wrong kind of hard reg. */
849 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
850 if (reg_renumber[i] == -1 && REG_N_REFS (i) != 0)
851 break;
853 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
855 free (real_known_ptr);
856 free (real_at_ptr);
857 free (reg_equiv_constant);
858 free (reg_equiv_memory_loc);
859 free (reg_equiv_mem);
860 free (reg_equiv_init);
861 free (reg_equiv_address);
862 free (reg_max_ref_width);
863 return 0;
865 #endif
867 /* Compute the order of preference for hard registers to spill.
868 Store them by decreasing preference in potential_reload_regs. */
870 order_regs_for_reload ();
872 maybe_fix_stack_asms ();
874 /* So far, no hard regs have been spilled. */
875 n_spills = 0;
876 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
877 spill_reg_order[i] = -1;
879 /* Initialize to -1, which means take the first spill register. */
880 last_spill_reg = -1;
882 /* On most machines, we can't use any register explicitly used in the
883 rtl as a spill register. But on some, we have to. Those will have
884 taken care to keep the life of hard regs as short as possible. */
886 if (! SMALL_REGISTER_CLASSES)
887 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
889 spilled_pseudos = ALLOCA_REG_SET ();
891 /* Spill any hard regs that we know we can't eliminate. */
892 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
893 if (! ep->can_eliminate)
894 spill_hard_reg (ep->from, global, dumpfile, 1);
896 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
897 if (frame_pointer_needed)
898 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
899 #endif
901 finish_spills (global, dumpfile);
903 if (global)
904 for (i = 0; i < N_REG_CLASSES; i++)
906 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
907 bzero (basic_block_needs[i], n_basic_blocks);
910 /* From now on, we need to emit any moves without making new pseudos. */
911 reload_in_progress = 1;
913 /* This loop scans the entire function each go-round
914 and repeats until one repetition spills no additional hard regs. */
916 /* This flag is set when a pseudo reg is spilled,
917 to require another pass. Note that getting an additional reload
918 reg does not necessarily imply any pseudo reg was spilled;
919 sometimes we find a reload reg that no pseudo reg was allocated in. */
920 something_changed = 1;
922 /* This flag is set if there are any insns that require register
923 eliminations. */
924 something_needs_elimination = 0;
925 while (something_changed)
927 HOST_WIDE_INT starting_frame_size;
929 something_changed = 0;
930 bzero ((char *) max_needs, sizeof max_needs);
931 bzero ((char *) max_groups, sizeof max_groups);
932 bzero ((char *) max_nongroups, sizeof max_nongroups);
933 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
934 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
935 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
936 bzero ((char *) group_size, sizeof group_size);
937 for (i = 0; i < N_REG_CLASSES; i++)
938 group_mode[i] = VOIDmode;
940 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done
941 here because the stack size may be a part of the offset computation
942 for register elimination, and there might have been new stack slots
943 created in the last iteration of this loop. */
944 assign_stack_local (BLKmode, 0, 0);
946 starting_frame_size = get_frame_size ();
948 set_initial_elim_offsets ();
950 /* For each pseudo register that has an equivalent location defined,
951 try to eliminate any eliminable registers (such as the frame pointer)
952 assuming initial offsets for the replacement register, which
953 is the normal case.
955 If the resulting location is directly addressable, substitute
956 the MEM we just got directly for the old REG.
958 If it is not addressable but is a constant or the sum of a hard reg
959 and constant, it is probably not addressable because the constant is
960 out of range, in that case record the address; we will generate
961 hairy code to compute the address in a register each time it is
962 needed. Similarly if it is a hard register, but one that is not
963 valid as an address register.
965 If the location is not addressable, but does not have one of the
966 above forms, assign a stack slot. We have to do this to avoid the
967 potential of producing lots of reloads if, e.g., a location involves
968 a pseudo that didn't get a hard register and has an equivalent memory
969 location that also involves a pseudo that didn't get a hard register.
971 Perhaps at some point we will improve reload_when_needed handling
972 so this problem goes away. But that's very hairy. */
974 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
975 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
977 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
979 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
980 XEXP (x, 0)))
981 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
982 else if (CONSTANT_P (XEXP (x, 0))
983 || (GET_CODE (XEXP (x, 0)) == REG
984 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
985 || (GET_CODE (XEXP (x, 0)) == PLUS
986 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
987 && (REGNO (XEXP (XEXP (x, 0), 0))
988 < FIRST_PSEUDO_REGISTER)
989 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
990 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
991 else
993 /* Make a new stack slot. Then indicate that something
994 changed so we go back and recompute offsets for
995 eliminable registers because the allocation of memory
996 below might change some offset. reg_equiv_{mem,address}
997 will be set up for this pseudo on the next pass around
998 the loop. */
999 reg_equiv_memory_loc[i] = 0;
1000 reg_equiv_init[i] = 0;
1001 alter_reg (i, -1);
1002 something_changed = 1;
1006 /* Insert code to save and restore call-clobbered hard regs
1007 around calls. Tell if what mode to use so that we will process
1008 those insns in reload_as_needed if we have to. */
1010 if (caller_save_needed)
1011 setup_save_areas ();
1013 if (starting_frame_size != get_frame_size ())
1014 something_changed = 1;
1016 /* If we allocated another pseudo to the stack, redo elimination
1017 bookkeeping. */
1018 if (something_changed)
1019 continue;
1021 if (caller_save_needed)
1023 save_call_clobbered_regs ();
1024 /* That might have allocated new insn_chain structures. */
1025 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
1028 something_changed |= calculate_needs_all_insns (global);
1030 /* If we allocated any new memory locations, make another pass
1031 since it might have changed elimination offsets. */
1032 if (starting_frame_size != get_frame_size ())
1033 something_changed = 1;
1035 if (dumpfile)
1036 dump_needs (dumpfile);
1039 HARD_REG_SET to_spill;
1040 CLEAR_HARD_REG_SET (to_spill);
1041 update_eliminables (&to_spill);
1042 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1043 if (TEST_HARD_REG_BIT (to_spill, i))
1045 spill_hard_reg (i, global, dumpfile, 1);
1046 something_changed = 1;
1050 finish_spills (global, dumpfile);
1052 /* If all needs are met, we win. */
1054 for (i = 0; i < N_REG_CLASSES; i++)
1055 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1056 break;
1057 if (i == N_REG_CLASSES && ! something_changed)
1058 break;
1060 /* Not all needs are met; must spill some hard regs. */
1062 /* Put all registers spilled so far back in potential_reload_regs, but
1063 put them at the front, since we've already spilled most of the
1064 pseudos in them (we might have left some pseudos unspilled if they
1065 were in a block that didn't need any spill registers of a conflicting
1066 class. We used to try to mark off the need for those registers,
1067 but doing so properly is very complex and reallocating them is the
1068 simpler approach. First, "pack" potential_reload_regs by pushing
1069 any nonnegative entries towards the end. That will leave room
1070 for the registers we already spilled.
1072 Also, undo the marking of the spill registers from the last time
1073 around in FORBIDDEN_REGS since we will be probably be allocating
1074 them again below.
1076 ??? It is theoretically possible that we might end up not using one
1077 of our previously-spilled registers in this allocation, even though
1078 they are at the head of the list. It's not clear what to do about
1079 this, but it was no better before, when we marked off the needs met
1080 by the previously-spilled registers. With the current code, globals
1081 can be allocated into these registers, but locals cannot. */
1083 if (n_spills)
1085 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1086 if (potential_reload_regs[i] != -1)
1087 potential_reload_regs[j--] = potential_reload_regs[i];
1089 for (i = 0; i < n_spills; i++)
1091 potential_reload_regs[i] = spill_regs[i];
1092 spill_reg_order[spill_regs[i]] = -1;
1093 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1096 n_spills = 0;
1099 something_changed |= find_reload_regs (global, dumpfile);
1100 if (failure)
1101 goto failed;
1103 finish_spills (global, dumpfile);
1105 if (something_changed)
1106 delete_caller_save_insns ();
1109 /* If global-alloc was run, notify it of any register eliminations we have
1110 done. */
1111 if (global)
1112 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1113 if (ep->can_eliminate)
1114 mark_elimination (ep->from, ep->to);
1116 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1117 If that insn didn't set the register (i.e., it copied the register to
1118 memory), just delete that insn instead of the equivalencing insn plus
1119 anything now dead. If we call delete_dead_insn on that insn, we may
1120 delete the insn that actually sets the register if the register die
1121 there and that is incorrect. */
1123 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1124 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1125 && GET_CODE (reg_equiv_init[i]) != NOTE)
1127 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1128 delete_dead_insn (reg_equiv_init[i]);
1129 else
1131 PUT_CODE (reg_equiv_init[i], NOTE);
1132 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1133 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1137 /* Use the reload registers where necessary
1138 by generating move instructions to move the must-be-register
1139 values into or out of the reload registers. */
1141 if (insns_need_reload != 0 || something_needs_elimination)
1142 reload_as_needed (global);
1144 /* If we were able to eliminate the frame pointer, show that it is no
1145 longer live at the start of any basic block. If it ls live by
1146 virtue of being in a pseudo, that pseudo will be marked live
1147 and hence the frame pointer will be known to be live via that
1148 pseudo. */
1150 if (! frame_pointer_needed)
1151 for (i = 0; i < n_basic_blocks; i++)
1152 CLEAR_REGNO_REG_SET (basic_block_live_at_start[i],
1153 HARD_FRAME_POINTER_REGNUM);
1155 /* Come here (with failure set nonzero) if we can't get enough spill regs
1156 and we decide not to abort about it. */
1157 failed:
1159 reload_in_progress = 0;
1161 /* Now eliminate all pseudo regs by modifying them into
1162 their equivalent memory references.
1163 The REG-rtx's for the pseudos are modified in place,
1164 so all insns that used to refer to them now refer to memory.
1166 For a reg that has a reg_equiv_address, all those insns
1167 were changed by reloading so that no insns refer to it any longer;
1168 but the DECL_RTL of a variable decl may refer to it,
1169 and if so this causes the debugging info to mention the variable. */
1171 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1173 rtx addr = 0;
1174 int in_struct = 0;
1175 int is_readonly = 0;
1177 if (reg_equiv_memory_loc[i])
1179 in_struct = MEM_IN_STRUCT_P (reg_equiv_memory_loc[i]);
1180 is_readonly = RTX_UNCHANGING_P (reg_equiv_memory_loc[i]);
1183 if (reg_equiv_mem[i])
1184 addr = XEXP (reg_equiv_mem[i], 0);
1186 if (reg_equiv_address[i])
1187 addr = reg_equiv_address[i];
1189 if (addr)
1191 if (reg_renumber[i] < 0)
1193 rtx reg = regno_reg_rtx[i];
1194 XEXP (reg, 0) = addr;
1195 REG_USERVAR_P (reg) = 0;
1196 RTX_UNCHANGING_P (reg) = is_readonly;
1197 MEM_IN_STRUCT_P (reg) = in_struct;
1198 /* We have no alias information about this newly created
1199 MEM. */
1200 MEM_ALIAS_SET (reg) = 0;
1201 PUT_CODE (reg, MEM);
1203 else if (reg_equiv_mem[i])
1204 XEXP (reg_equiv_mem[i], 0) = addr;
1208 /* Make a pass over all the insns and delete all USEs which we inserted
1209 only to tag a REG_EQUAL note on them; if PRESERVE_DEATH_INFO_REGNO_P
1210 is defined, also remove death notes for things that are no longer
1211 registers or no longer die in the insn (e.g., an input and output
1212 pseudo being tied). */
1214 for (insn = first; insn; insn = NEXT_INSN (insn))
1215 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1217 #ifdef PRESERVE_DEATH_INFO_REGNO_P
1218 rtx note, next;
1219 #endif
1221 if (GET_CODE (PATTERN (insn)) == USE
1222 && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1224 PUT_CODE (insn, NOTE);
1225 NOTE_SOURCE_FILE (insn) = 0;
1226 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1227 continue;
1229 #ifdef PRESERVE_DEATH_INFO_REGNO_P
1230 for (note = REG_NOTES (insn); note; note = next)
1232 next = XEXP (note, 1);
1233 if (REG_NOTE_KIND (note) == REG_DEAD
1234 && (GET_CODE (XEXP (note, 0)) != REG
1235 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1236 remove_note (insn, note);
1238 #endif
1241 /* If we are doing stack checking, give a warning if this function's
1242 frame size is larger than we expect. */
1243 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1245 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1247 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1248 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
1249 size += UNITS_PER_WORD;
1251 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1252 warning ("frame size too large for reliable stack checking");
1255 /* Indicate that we no longer have known memory locations or constants. */
1256 if (reg_equiv_constant)
1257 free (reg_equiv_constant);
1258 reg_equiv_constant = 0;
1259 if (reg_equiv_memory_loc)
1260 free (reg_equiv_memory_loc);
1261 reg_equiv_memory_loc = 0;
1263 if (real_known_ptr)
1264 free (real_known_ptr);
1265 if (real_at_ptr)
1266 free (real_at_ptr);
1268 free (reg_equiv_mem);
1269 free (reg_equiv_init);
1270 free (reg_equiv_address);
1271 free (reg_max_ref_width);
1273 FREE_REG_SET (spilled_pseudos);
1275 CLEAR_HARD_REG_SET (used_spill_regs);
1276 for (i = 0; i < n_spills; i++)
1277 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1279 /* Free all the insn_chain structures at once. */
1280 obstack_free (&reload_obstack, reload_startobj);
1281 unused_insn_chains = 0;
1283 return failure;
1286 /* Yet another special case. Unfortunately, reg-stack forces people to
1287 write incorrect clobbers in asm statements. These clobbers must not
1288 cause the register to appear in bad_spill_regs, otherwise we'll call
1289 fatal_insn later. We clear the corresponding regnos in the live
1290 register sets to avoid this.
1291 The whole thing is rather sick, I'm afraid. */
1292 static void
1293 maybe_fix_stack_asms ()
1295 #ifdef STACK_REGS
1296 char *constraints[MAX_RECOG_OPERANDS];
1297 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1298 struct insn_chain *chain;
1300 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1302 int i, noperands;
1303 HARD_REG_SET clobbered, allowed;
1304 rtx pat;
1306 if (GET_RTX_CLASS (GET_CODE (chain->insn)) != 'i'
1307 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1308 continue;
1309 pat = PATTERN (chain->insn);
1310 if (GET_CODE (pat) != PARALLEL)
1311 continue;
1313 CLEAR_HARD_REG_SET (clobbered);
1314 CLEAR_HARD_REG_SET (allowed);
1316 /* First, make a mask of all stack regs that are clobbered. */
1317 for (i = 0; i < XVECLEN (pat, 0); i++)
1319 rtx t = XVECEXP (pat, 0, i);
1320 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1321 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1324 /* Get the operand values and constraints out of the insn. */
1325 decode_asm_operands (pat, recog_operand, recog_operand_loc,
1326 constraints, operand_mode);
1328 /* For every operand, see what registers are allowed. */
1329 for (i = 0; i < noperands; i++)
1331 char *p = constraints[i];
1332 /* For every alternative, we compute the class of registers allowed
1333 for reloading in CLS, and merge its contents into the reg set
1334 ALLOWED. */
1335 int cls = (int) NO_REGS;
1337 for (;;)
1339 char c = *p++;
1341 if (c == '\0' || c == ',' || c == '#')
1343 /* End of one alternative - mark the regs in the current
1344 class, and reset the class. */
1345 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1346 cls = NO_REGS;
1347 if (c == '#')
1348 do {
1349 c = *p++;
1350 } while (c != '\0' && c != ',');
1351 if (c == '\0')
1352 break;
1353 continue;
1356 switch (c)
1358 case '=': case '+': case '*': case '%': case '?': case '!':
1359 case '0': case '1': case '2': case '3': case '4': case 'm':
1360 case '<': case '>': case 'V': case 'o': case '&': case 'E':
1361 case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1362 case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1363 case 'P':
1364 #ifdef EXTRA_CONSTRAINT
1365 case 'Q': case 'R': case 'S': case 'T': case 'U':
1366 #endif
1367 break;
1369 case 'p':
1370 cls = (int) reg_class_subunion[cls][(int) BASE_REG_CLASS];
1371 break;
1373 case 'g':
1374 case 'r':
1375 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1376 break;
1378 default:
1379 cls = (int) reg_class_subunion[cls][(int) REG_CLASS_FROM_LETTER (c)];
1384 /* Those of the registers which are clobbered, but allowed by the
1385 constraints, must be usable as reload registers. So clear them
1386 out of the life information. */
1387 AND_HARD_REG_SET (allowed, clobbered);
1388 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1389 if (TEST_HARD_REG_BIT (allowed, i))
1391 CLEAR_REGNO_REG_SET (chain->live_before, i);
1392 CLEAR_REGNO_REG_SET (chain->live_after, i);
1396 #endif
1399 /* Walk the insns of the current function, starting with FIRST, and collect
1400 information about the need to do register elimination and the need to
1401 perform reloads. */
1402 static int
1403 calculate_needs_all_insns (global)
1404 int global;
1406 int something_changed = 0;
1407 rtx after_call = 0;
1408 struct insn_chain **pprev_reload = &insns_need_reload;
1409 struct insn_chain *chain;
1411 /* Compute the most additional registers needed by any instruction.
1412 Collect information separately for each class of regs. */
1414 for (chain = reload_insn_chain; chain; chain = chain->next)
1416 rtx insn = chain->insn;
1418 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
1419 might include REG_LABEL), we need to see what effects this
1420 has on the known offsets at labels. */
1422 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1423 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1424 && REG_NOTES (insn) != 0))
1425 set_label_offsets (insn, insn, 0);
1427 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1429 rtx old_body = PATTERN (insn);
1430 int old_code = INSN_CODE (insn);
1431 rtx old_notes = REG_NOTES (insn);
1432 int did_elimination = 0;
1434 /* Nonzero means don't use a reload reg that overlaps
1435 the place where a function value can be returned. */
1436 rtx avoid_return_reg = 0;
1438 /* Set avoid_return_reg if this is an insn
1439 that might use the value of a function call. */
1440 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
1442 if (GET_CODE (PATTERN (insn)) == SET)
1443 after_call = SET_DEST (PATTERN (insn));
1444 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1445 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1446 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1447 else
1448 after_call = 0;
1450 else if (SMALL_REGISTER_CLASSES && after_call != 0
1451 && !(GET_CODE (PATTERN (insn)) == SET
1452 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx)
1453 && GET_CODE (PATTERN (insn)) != USE)
1455 if (reg_referenced_p (after_call, PATTERN (insn)))
1456 avoid_return_reg = after_call;
1457 after_call = 0;
1460 /* If needed, eliminate any eliminable registers. */
1461 if (num_eliminable)
1462 did_elimination = eliminate_regs_in_insn (insn, 0);
1464 /* Analyze the instruction. */
1465 find_reloads (insn, 0, spill_indirect_levels, global,
1466 spill_reg_order);
1468 /* Remember for later shortcuts which insns had any reloads or
1469 register eliminations. */
1470 chain->need_elim = did_elimination;
1471 chain->need_reload = n_reloads > 0;
1473 /* Discard any register replacements done. */
1474 if (did_elimination)
1476 obstack_free (&reload_obstack, reload_firstobj);
1477 PATTERN (insn) = old_body;
1478 INSN_CODE (insn) = old_code;
1479 REG_NOTES (insn) = old_notes;
1480 something_needs_elimination = 1;
1483 if (n_reloads != 0)
1485 *pprev_reload = chain;
1486 pprev_reload = &chain->next_need_reload;
1487 something_changed |= calculate_needs (chain, avoid_return_reg,
1488 global);
1492 *pprev_reload = 0;
1493 return something_changed;
1496 /* To compute the number of reload registers of each class
1497 needed for an insn, we must simulate what choose_reload_regs
1498 can do. We do this by splitting an insn into an "input" and
1499 an "output" part. RELOAD_OTHER reloads are used in both.
1500 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
1501 which must be live over the entire input section of reloads,
1502 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
1503 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
1504 inputs.
1506 The registers needed for output are RELOAD_OTHER and
1507 RELOAD_FOR_OUTPUT, which are live for the entire output
1508 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
1509 reloads for each operand.
1511 The total number of registers needed is the maximum of the
1512 inputs and outputs. */
1514 static int
1515 calculate_needs (chain, avoid_return_reg, global)
1516 struct insn_chain *chain;
1517 rtx avoid_return_reg;
1518 int global;
1520 rtx insn = chain->insn;
1521 int something_changed = 0;
1522 int i;
1524 /* Each `struct needs' corresponds to one RELOAD_... type. */
1525 struct {
1526 struct needs other;
1527 struct needs input;
1528 struct needs output;
1529 struct needs insn;
1530 struct needs other_addr;
1531 struct needs op_addr;
1532 struct needs op_addr_reload;
1533 struct needs in_addr[MAX_RECOG_OPERANDS];
1534 struct needs in_addr_addr[MAX_RECOG_OPERANDS];
1535 struct needs out_addr[MAX_RECOG_OPERANDS];
1536 struct needs out_addr_addr[MAX_RECOG_OPERANDS];
1537 } insn_needs;
1539 bzero ((char *) &insn_needs, sizeof insn_needs);
1541 /* Count each reload once in every class
1542 containing the reload's own class. */
1544 for (i = 0; i < n_reloads; i++)
1546 register enum reg_class *p;
1547 enum reg_class class = reload_reg_class[i];
1548 int size;
1549 enum machine_mode mode;
1550 struct needs *this_needs;
1552 /* Don't count the dummy reloads, for which one of the
1553 regs mentioned in the insn can be used for reloading.
1554 Don't count optional reloads.
1555 Don't count reloads that got combined with others. */
1556 if (reload_reg_rtx[i] != 0
1557 || reload_optional[i] != 0
1558 || (reload_out[i] == 0 && reload_in[i] == 0
1559 && ! reload_secondary_p[i]))
1560 continue;
1562 /* Show that a reload register of this class is needed
1563 in this basic block. We do not use insn_needs and
1564 insn_groups because they are overly conservative for
1565 this purpose. */
1566 if (global && ! basic_block_needs[(int) class][chain->block])
1568 basic_block_needs[(int) class][chain->block] = 1;
1569 something_changed = 1;
1572 mode = reload_inmode[i];
1573 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1574 mode = reload_outmode[i];
1575 size = CLASS_MAX_NREGS (class, mode);
1577 /* Decide which time-of-use to count this reload for. */
1578 switch (reload_when_needed[i])
1580 case RELOAD_OTHER:
1581 this_needs = &insn_needs.other;
1582 break;
1583 case RELOAD_FOR_INPUT:
1584 this_needs = &insn_needs.input;
1585 break;
1586 case RELOAD_FOR_OUTPUT:
1587 this_needs = &insn_needs.output;
1588 break;
1589 case RELOAD_FOR_INSN:
1590 this_needs = &insn_needs.insn;
1591 break;
1592 case RELOAD_FOR_OTHER_ADDRESS:
1593 this_needs = &insn_needs.other_addr;
1594 break;
1595 case RELOAD_FOR_INPUT_ADDRESS:
1596 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1597 break;
1598 case RELOAD_FOR_INPADDR_ADDRESS:
1599 this_needs = &insn_needs.in_addr_addr[reload_opnum[i]];
1600 break;
1601 case RELOAD_FOR_OUTPUT_ADDRESS:
1602 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1603 break;
1604 case RELOAD_FOR_OUTADDR_ADDRESS:
1605 this_needs = &insn_needs.out_addr_addr[reload_opnum[i]];
1606 break;
1607 case RELOAD_FOR_OPERAND_ADDRESS:
1608 this_needs = &insn_needs.op_addr;
1609 break;
1610 case RELOAD_FOR_OPADDR_ADDR:
1611 this_needs = &insn_needs.op_addr_reload;
1612 break;
1615 if (size > 1)
1617 enum machine_mode other_mode, allocate_mode;
1619 /* Count number of groups needed separately from
1620 number of individual regs needed. */
1621 this_needs->groups[(int) class]++;
1622 p = reg_class_superclasses[(int) class];
1623 while (*p != LIM_REG_CLASSES)
1624 this_needs->groups[(int) *p++]++;
1626 /* Record size and mode of a group of this class. */
1627 /* If more than one size group is needed,
1628 make all groups the largest needed size. */
1629 if (group_size[(int) class] < size)
1631 other_mode = group_mode[(int) class];
1632 allocate_mode = mode;
1634 group_size[(int) class] = size;
1635 group_mode[(int) class] = mode;
1637 else
1639 other_mode = mode;
1640 allocate_mode = group_mode[(int) class];
1643 /* Crash if two dissimilar machine modes both need
1644 groups of consecutive regs of the same class. */
1646 if (other_mode != VOIDmode && other_mode != allocate_mode
1647 && ! modes_equiv_for_class_p (allocate_mode,
1648 other_mode, class))
1649 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1650 insn);
1652 else if (size == 1)
1654 this_needs->regs[(unsigned char)reload_nongroup[i]][(int) class] += 1;
1655 p = reg_class_superclasses[(int) class];
1656 while (*p != LIM_REG_CLASSES)
1657 this_needs->regs[(unsigned char)reload_nongroup[i]][(int) *p++] += 1;
1659 else
1660 abort ();
1663 /* All reloads have been counted for this insn;
1664 now merge the various times of use.
1665 This sets insn_needs, etc., to the maximum total number
1666 of registers needed at any point in this insn. */
1668 for (i = 0; i < N_REG_CLASSES; i++)
1670 int j, in_max, out_max;
1672 /* Compute normal and nongroup needs. */
1673 for (j = 0; j <= 1; j++)
1675 int k;
1676 for (in_max = 0, out_max = 0, k = 0; k < reload_n_operands; k++)
1678 in_max = MAX (in_max,
1679 (insn_needs.in_addr[k].regs[j][i]
1680 + insn_needs.in_addr_addr[k].regs[j][i]));
1681 out_max = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1682 out_max = MAX (out_max,
1683 insn_needs.out_addr_addr[k].regs[j][i]);
1686 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1687 and operand addresses but not things used to reload
1688 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1689 don't conflict with things needed to reload inputs or
1690 outputs. */
1692 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1693 insn_needs.op_addr_reload.regs[j][i]),
1694 in_max);
1696 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1698 insn_needs.input.regs[j][i]
1699 = MAX (insn_needs.input.regs[j][i]
1700 + insn_needs.op_addr.regs[j][i]
1701 + insn_needs.insn.regs[j][i],
1702 in_max + insn_needs.input.regs[j][i]);
1704 insn_needs.output.regs[j][i] += out_max;
1705 insn_needs.other.regs[j][i]
1706 += MAX (MAX (insn_needs.input.regs[j][i],
1707 insn_needs.output.regs[j][i]),
1708 insn_needs.other_addr.regs[j][i]);
1712 /* Now compute group needs. */
1713 for (in_max = 0, out_max = 0, j = 0; j < reload_n_operands; j++)
1715 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1716 in_max = MAX (in_max, insn_needs.in_addr_addr[j].groups[i]);
1717 out_max = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1718 out_max = MAX (out_max, insn_needs.out_addr_addr[j].groups[i]);
1721 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1722 insn_needs.op_addr_reload.groups[i]),
1723 in_max);
1724 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1726 insn_needs.input.groups[i]
1727 = MAX (insn_needs.input.groups[i]
1728 + insn_needs.op_addr.groups[i]
1729 + insn_needs.insn.groups[i],
1730 in_max + insn_needs.input.groups[i]);
1732 insn_needs.output.groups[i] += out_max;
1733 insn_needs.other.groups[i]
1734 += MAX (MAX (insn_needs.input.groups[i],
1735 insn_needs.output.groups[i]),
1736 insn_needs.other_addr.groups[i]);
1739 /* If this insn stores the value of a function call,
1740 and that value is in a register that has been spilled,
1741 and if the insn needs a reload in a class
1742 that might use that register as the reload register,
1743 then add an extra need in that class.
1744 This makes sure we have a register available that does
1745 not overlap the return value. */
1747 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
1749 int regno = REGNO (avoid_return_reg);
1750 int nregs
1751 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1752 int r;
1753 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1755 /* First compute the "basic needs", which counts a
1756 need only in the smallest class in which it
1757 is required. */
1759 bcopy ((char *) insn_needs.other.regs[0],
1760 (char *) basic_needs, sizeof basic_needs);
1761 bcopy ((char *) insn_needs.other.groups,
1762 (char *) basic_groups, sizeof basic_groups);
1764 for (i = 0; i < N_REG_CLASSES; i++)
1766 enum reg_class *p;
1768 if (basic_needs[i] >= 0)
1769 for (p = reg_class_superclasses[i];
1770 *p != LIM_REG_CLASSES; p++)
1771 basic_needs[(int) *p] -= basic_needs[i];
1773 if (basic_groups[i] >= 0)
1774 for (p = reg_class_superclasses[i];
1775 *p != LIM_REG_CLASSES; p++)
1776 basic_groups[(int) *p] -= basic_groups[i];
1779 /* Now count extra regs if there might be a conflict with
1780 the return value register. */
1782 for (r = regno; r < regno + nregs; r++)
1783 if (spill_reg_order[r] >= 0)
1784 for (i = 0; i < N_REG_CLASSES; i++)
1785 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1787 if (basic_needs[i] > 0)
1789 enum reg_class *p;
1791 insn_needs.other.regs[0][i]++;
1792 p = reg_class_superclasses[i];
1793 while (*p != LIM_REG_CLASSES)
1794 insn_needs.other.regs[0][(int) *p++]++;
1796 if (basic_groups[i] > 0)
1798 enum reg_class *p;
1800 insn_needs.other.groups[i]++;
1801 p = reg_class_superclasses[i];
1802 while (*p != LIM_REG_CLASSES)
1803 insn_needs.other.groups[(int) *p++]++;
1808 /* For each class, collect maximum need of any insn. */
1810 for (i = 0; i < N_REG_CLASSES; i++)
1812 if (max_needs[i] < insn_needs.other.regs[0][i])
1814 max_needs[i] = insn_needs.other.regs[0][i];
1815 max_needs_insn[i] = insn;
1817 if (max_groups[i] < insn_needs.other.groups[i])
1819 max_groups[i] = insn_needs.other.groups[i];
1820 max_groups_insn[i] = insn;
1822 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1824 max_nongroups[i] = insn_needs.other.regs[1][i];
1825 max_nongroups_insn[i] = insn;
1829 /* Record the needs for later. */
1830 chain->need = insn_needs.other;
1832 return something_changed;
1835 /* Find a group of exactly 2 registers.
1837 First try to fill out the group by spilling a single register which
1838 would allow completion of the group.
1840 Then try to create a new group from a pair of registers, neither of
1841 which are explicitly used.
1843 Then try to create a group from any pair of registers. */
1844 static int
1845 find_tworeg_group (global, class, dumpfile)
1846 int global;
1847 int class;
1848 FILE *dumpfile;
1850 int i;
1851 /* First, look for a register that will complete a group. */
1852 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1854 int j, other;
1856 j = potential_reload_regs[i];
1857 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1858 && ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1859 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1860 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1861 && HARD_REGNO_MODE_OK (other, group_mode[class])
1862 && ! TEST_HARD_REG_BIT (counted_for_nongroups, other)
1863 /* We don't want one part of another group.
1864 We could get "two groups" that overlap! */
1865 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1866 || (j < FIRST_PSEUDO_REGISTER - 1
1867 && (other = j + 1, spill_reg_order[other] >= 0)
1868 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1869 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1870 && HARD_REGNO_MODE_OK (j, group_mode[class])
1871 && ! TEST_HARD_REG_BIT (counted_for_nongroups, other)
1872 && ! TEST_HARD_REG_BIT (counted_for_groups, other))))
1874 register enum reg_class *p;
1876 /* We have found one that will complete a group,
1877 so count off one group as provided. */
1878 max_groups[class]--;
1879 p = reg_class_superclasses[class];
1880 while (*p != LIM_REG_CLASSES)
1882 if (group_size [(int) *p] <= group_size [class])
1883 max_groups[(int) *p]--;
1884 p++;
1887 /* Indicate both these regs are part of a group. */
1888 SET_HARD_REG_BIT (counted_for_groups, j);
1889 SET_HARD_REG_BIT (counted_for_groups, other);
1890 break;
1893 /* We can't complete a group, so start one. */
1894 /* Look for a pair neither of which is explicitly used. */
1895 if (SMALL_REGISTER_CLASSES && i == FIRST_PSEUDO_REGISTER)
1896 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1898 int j, k;
1899 j = potential_reload_regs[i];
1900 /* Verify that J+1 is a potential reload reg. */
1901 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1902 if (potential_reload_regs[k] == j + 1)
1903 break;
1904 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1905 && k < FIRST_PSEUDO_REGISTER
1906 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1907 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1908 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1909 && HARD_REGNO_MODE_OK (j, group_mode[class])
1910 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1911 j + 1)
1912 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1913 /* Reject J at this stage
1914 if J+1 was explicitly used. */
1915 && ! regs_explicitly_used[j + 1])
1916 break;
1918 /* Now try any group at all
1919 whose registers are not in bad_spill_regs. */
1920 if (i == FIRST_PSEUDO_REGISTER)
1921 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1923 int j, k;
1924 j = potential_reload_regs[i];
1925 /* Verify that J+1 is a potential reload reg. */
1926 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1927 if (potential_reload_regs[k] == j + 1)
1928 break;
1929 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1930 && k < FIRST_PSEUDO_REGISTER
1931 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1932 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1933 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1934 && HARD_REGNO_MODE_OK (j, group_mode[class])
1935 && ! TEST_HARD_REG_BIT (counted_for_nongroups, j + 1)
1936 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1937 break;
1940 /* I should be the index in potential_reload_regs
1941 of the new reload reg we have found. */
1943 if (i < FIRST_PSEUDO_REGISTER)
1944 return new_spill_reg (i, class, max_needs, NULL_PTR,
1945 global, dumpfile);
1947 /* There are no groups left to spill. */
1948 spill_failure (max_groups_insn[class]);
1949 failure = 1;
1950 return 1;
1953 /* Find a group of more than 2 registers.
1954 Look for a sufficient sequence of unspilled registers, and spill them all
1955 at once. */
1956 static int
1957 find_group (global, class, dumpfile)
1958 int global;
1959 int class;
1960 FILE *dumpfile;
1962 int something_changed = 0;
1963 int i;
1965 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1967 int j, k;
1969 j = potential_reload_regs[i];
1970 if (j >= 0
1971 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1972 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1974 /* Check each reg in the sequence. */
1975 for (k = 0; k < group_size[class]; k++)
1976 if (! (spill_reg_order[j + k] < 0
1977 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1978 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1979 break;
1980 /* We got a full sequence, so spill them all. */
1981 if (k == group_size[class])
1983 register enum reg_class *p;
1984 for (k = 0; k < group_size[class]; k++)
1986 int idx;
1987 SET_HARD_REG_BIT (counted_for_groups, j + k);
1988 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1989 if (potential_reload_regs[idx] == j + k)
1990 break;
1991 something_changed |= new_spill_reg (idx, class, max_needs,
1992 NULL_PTR, global,
1993 dumpfile);
1996 /* We have found one that will complete a group,
1997 so count off one group as provided. */
1998 max_groups[class]--;
1999 p = reg_class_superclasses[class];
2000 while (*p != LIM_REG_CLASSES)
2002 if (group_size [(int) *p]
2003 <= group_size [class])
2004 max_groups[(int) *p]--;
2005 p++;
2007 return something_changed;
2011 /* There are no groups left. */
2012 spill_failure (max_groups_insn[class]);
2013 failure = 1;
2014 return 1;
2017 /* Find more reload regs to satisfy the remaining need.
2018 Do it by ascending class number, since otherwise a reg
2019 might be spilled for a big class and might fail to count
2020 for a smaller class even though it belongs to that class.
2022 Count spilled regs in `spills', and add entries to
2023 `spill_regs' and `spill_reg_order'.
2025 ??? Note there is a problem here.
2026 When there is a need for a group in a high-numbered class,
2027 and also need for non-group regs that come from a lower class,
2028 the non-group regs are chosen first. If there aren't many regs,
2029 they might leave no room for a group.
2031 This was happening on the 386. To fix it, we added the code
2032 that calls possible_group_p, so that the lower class won't
2033 break up the last possible group.
2035 Really fixing the problem would require changes above
2036 in counting the regs already spilled, and in choose_reload_regs.
2037 It might be hard to avoid introducing bugs there. */
2039 static int
2040 find_reload_regs (global, dumpfile)
2041 int global;
2042 FILE *dumpfile;
2044 int class;
2045 int something_changed = 0;
2047 CLEAR_HARD_REG_SET (counted_for_groups);
2048 CLEAR_HARD_REG_SET (counted_for_nongroups);
2050 for (class = 0; class < N_REG_CLASSES; class++)
2052 /* First get the groups of registers.
2053 If we got single registers first, we might fragment
2054 possible groups. */
2055 while (max_groups[class] > 0)
2057 /* If any single spilled regs happen to form groups,
2058 count them now. Maybe we don't really need
2059 to spill another group. */
2060 count_possible_groups (group_size, group_mode, max_groups, class);
2062 if (max_groups[class] <= 0)
2063 break;
2065 /* Groups of size 2 (the only groups used on most machines)
2066 are treated specially. */
2067 if (group_size[class] == 2)
2068 something_changed |= find_tworeg_group (global, class, dumpfile);
2069 else
2070 something_changed |= find_group (global, class, dumpfile);
2072 if (failure)
2073 return 1;
2076 /* Now similarly satisfy all need for single registers. */
2078 while (max_needs[class] > 0 || max_nongroups[class] > 0)
2080 int i;
2081 /* If we spilled enough regs, but they weren't counted
2082 against the non-group need, see if we can count them now.
2083 If so, we can avoid some actual spilling. */
2084 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
2085 for (i = 0; i < n_spills; i++)
2087 int regno = spill_regs[i];
2088 if (TEST_HARD_REG_BIT (reg_class_contents[class], regno)
2089 && !TEST_HARD_REG_BIT (counted_for_groups, regno)
2090 && !TEST_HARD_REG_BIT (counted_for_nongroups, regno)
2091 && max_nongroups[class] > 0)
2093 register enum reg_class *p;
2095 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2096 max_nongroups[class]--;
2097 p = reg_class_superclasses[class];
2098 while (*p != LIM_REG_CLASSES)
2099 max_nongroups[(int) *p++]--;
2102 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
2103 break;
2105 /* Consider the potential reload regs that aren't
2106 yet in use as reload regs, in order of preference.
2107 Find the most preferred one that's in this class. */
2109 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2111 int regno = potential_reload_regs[i];
2112 if (regno >= 0
2113 && TEST_HARD_REG_BIT (reg_class_contents[class], regno)
2114 /* If this reg will not be available for groups,
2115 pick one that does not foreclose possible groups.
2116 This is a kludge, and not very general,
2117 but it should be sufficient to make the 386 work,
2118 and the problem should not occur on machines with
2119 more registers. */
2120 && (max_nongroups[class] == 0
2121 || possible_group_p (regno, max_groups)))
2122 break;
2125 /* If we couldn't get a register, try to get one even if we
2126 might foreclose possible groups. This may cause problems
2127 later, but that's better than aborting now, since it is
2128 possible that we will, in fact, be able to form the needed
2129 group even with this allocation. */
2131 if (i >= FIRST_PSEUDO_REGISTER
2132 && (asm_noperands (max_needs[class] > 0
2133 ? max_needs_insn[class]
2134 : max_nongroups_insn[class])
2135 < 0))
2136 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2137 if (potential_reload_regs[i] >= 0
2138 && TEST_HARD_REG_BIT (reg_class_contents[class],
2139 potential_reload_regs[i]))
2140 break;
2142 /* I should be the index in potential_reload_regs
2143 of the new reload reg we have found. */
2145 if (i >= FIRST_PSEUDO_REGISTER)
2147 /* There are no possible registers left to spill. */
2148 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
2149 : max_nongroups_insn[class]);
2150 failure = 1;
2151 return 1;
2153 else
2154 something_changed |= new_spill_reg (i, class, max_needs,
2155 max_nongroups, global,
2156 dumpfile);
2159 return something_changed;
2162 static void
2163 dump_needs (dumpfile)
2164 FILE *dumpfile;
2166 static char *reg_class_names[] = REG_CLASS_NAMES;
2167 int i;
2169 for (i = 0; i < N_REG_CLASSES; i++)
2171 if (max_needs[i] > 0)
2172 fprintf (dumpfile,
2173 ";; Need %d reg%s of class %s (for insn %d).\n",
2174 max_needs[i], max_needs[i] == 1 ? "" : "s",
2175 reg_class_names[i], INSN_UID (max_needs_insn[i]));
2176 if (max_nongroups[i] > 0)
2177 fprintf (dumpfile,
2178 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
2179 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
2180 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
2181 if (max_groups[i] > 0)
2182 fprintf (dumpfile,
2183 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
2184 max_groups[i], max_groups[i] == 1 ? "" : "s",
2185 mode_name[(int) group_mode[i]],
2186 reg_class_names[i], INSN_UID (max_groups_insn[i]));
2190 /* Delete all insns that were inserted by emit_caller_save_insns during
2191 this iteration. */
2192 static void
2193 delete_caller_save_insns ()
2195 struct insn_chain *c = reload_insn_chain;
2197 while (c != 0)
2199 while (c != 0 && c->is_caller_save_insn)
2201 struct insn_chain *next = c->next;
2202 rtx insn = c->insn;
2204 if (insn == basic_block_head[c->block])
2205 basic_block_head[c->block] = NEXT_INSN (insn);
2206 if (insn == basic_block_end[c->block])
2207 basic_block_end[c->block] = PREV_INSN (insn);
2208 if (c == reload_insn_chain)
2209 reload_insn_chain = next;
2211 if (NEXT_INSN (insn) != 0)
2212 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2213 if (PREV_INSN (insn) != 0)
2214 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2216 if (next)
2217 next->prev = c->prev;
2218 if (c->prev)
2219 c->prev->next = next;
2220 c->next = unused_insn_chains;
2221 unused_insn_chains = c;
2222 c = next;
2224 if (c != 0)
2225 c = c->next;
2229 /* Nonzero if, after spilling reg REGNO for non-groups,
2230 it will still be possible to find a group if we still need one. */
2232 static int
2233 possible_group_p (regno, max_groups)
2234 int regno;
2235 int *max_groups;
2237 int i;
2238 int class = (int) NO_REGS;
2240 for (i = 0; i < (int) N_REG_CLASSES; i++)
2241 if (max_groups[i] > 0)
2243 class = i;
2244 break;
2247 if (class == (int) NO_REGS)
2248 return 1;
2250 /* Consider each pair of consecutive registers. */
2251 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2253 /* Ignore pairs that include reg REGNO. */
2254 if (i == regno || i + 1 == regno)
2255 continue;
2257 /* Ignore pairs that are outside the class that needs the group.
2258 ??? Here we fail to handle the case where two different classes
2259 independently need groups. But this never happens with our
2260 current machine descriptions. */
2261 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2262 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2263 continue;
2265 /* A pair of consecutive regs we can still spill does the trick. */
2266 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2267 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2268 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2269 return 1;
2271 /* A pair of one already spilled and one we can spill does it
2272 provided the one already spilled is not otherwise reserved. */
2273 if (spill_reg_order[i] < 0
2274 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2275 && spill_reg_order[i + 1] >= 0
2276 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2277 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2278 return 1;
2279 if (spill_reg_order[i + 1] < 0
2280 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2281 && spill_reg_order[i] >= 0
2282 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2283 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2284 return 1;
2287 return 0;
2290 /* Count any groups of CLASS that can be formed from the registers recently
2291 spilled. */
2293 static void
2294 count_possible_groups (group_size, group_mode, max_groups, class)
2295 int *group_size;
2296 enum machine_mode *group_mode;
2297 int *max_groups;
2298 int class;
2300 HARD_REG_SET new;
2301 int i, j;
2303 /* Now find all consecutive groups of spilled registers
2304 and mark each group off against the need for such groups.
2305 But don't count them against ordinary need, yet. */
2307 if (group_size[class] == 0)
2308 return;
2310 CLEAR_HARD_REG_SET (new);
2312 /* Make a mask of all the regs that are spill regs in class I. */
2313 for (i = 0; i < n_spills; i++)
2314 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2315 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2316 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2317 SET_HARD_REG_BIT (new, spill_regs[i]);
2319 /* Find each consecutive group of them. */
2320 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2321 if (TEST_HARD_REG_BIT (new, i)
2322 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2323 && HARD_REGNO_MODE_OK (i, group_mode[class]))
2325 for (j = 1; j < group_size[class]; j++)
2326 if (! TEST_HARD_REG_BIT (new, i + j))
2327 break;
2329 if (j == group_size[class])
2331 /* We found a group. Mark it off against this class's need for
2332 groups, and against each superclass too. */
2333 register enum reg_class *p;
2335 max_groups[class]--;
2336 p = reg_class_superclasses[class];
2337 while (*p != LIM_REG_CLASSES)
2339 if (group_size [(int) *p] <= group_size [class])
2340 max_groups[(int) *p]--;
2341 p++;
2344 /* Don't count these registers again. */
2345 for (j = 0; j < group_size[class]; j++)
2346 SET_HARD_REG_BIT (counted_for_groups, i + j);
2349 /* Skip to the last reg in this group. When i is incremented above,
2350 it will then point to the first reg of the next possible group. */
2351 i += j - 1;
2355 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2356 another mode that needs to be reloaded for the same register class CLASS.
2357 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2358 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2360 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2361 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2362 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2363 causes unnecessary failures on machines requiring alignment of register
2364 groups when the two modes are different sizes, because the larger mode has
2365 more strict alignment rules than the smaller mode. */
2367 static int
2368 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2369 enum machine_mode allocate_mode, other_mode;
2370 enum reg_class class;
2372 register int regno;
2373 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2375 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2376 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2377 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2378 return 0;
2380 return 1;
2383 /* Handle the failure to find a register to spill.
2384 INSN should be one of the insns which needed this particular spill reg. */
2386 static void
2387 spill_failure (insn)
2388 rtx insn;
2390 if (asm_noperands (PATTERN (insn)) >= 0)
2391 error_for_asm (insn, "`asm' needs too many reloads");
2392 else
2393 fatal_insn ("Unable to find a register to spill.", insn);
2396 /* Add a new register to the tables of available spill-registers
2397 (as well as spilling all pseudos allocated to the register).
2398 I is the index of this register in potential_reload_regs.
2399 CLASS is the regclass whose need is being satisfied.
2400 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2401 so that this register can count off against them.
2402 MAX_NONGROUPS is 0 if this register is part of a group.
2403 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2405 static int
2406 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2407 int i;
2408 int class;
2409 int *max_needs;
2410 int *max_nongroups;
2411 int global;
2412 FILE *dumpfile;
2414 register enum reg_class *p;
2415 int val;
2416 int regno = potential_reload_regs[i];
2418 if (i >= FIRST_PSEUDO_REGISTER)
2419 abort (); /* Caller failed to find any register. */
2421 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2423 static char *reg_class_names[] = REG_CLASS_NAMES;
2424 fatal ("fixed or forbidden register %d (%s) was spilled for class %s.\n\
2425 This may be due to a compiler bug or to impossible asm\n\
2426 statements or clauses.", regno, reg_names[regno], reg_class_names[class]);
2429 /* Make reg REGNO an additional reload reg. */
2431 potential_reload_regs[i] = -1;
2432 spill_regs[n_spills] = regno;
2433 spill_reg_order[regno] = n_spills;
2434 if (dumpfile)
2435 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2437 /* Clear off the needs we just satisfied. */
2439 max_needs[class]--;
2440 p = reg_class_superclasses[class];
2441 while (*p != LIM_REG_CLASSES)
2442 max_needs[(int) *p++]--;
2444 if (max_nongroups && max_nongroups[class] > 0)
2446 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2447 max_nongroups[class]--;
2448 p = reg_class_superclasses[class];
2449 while (*p != LIM_REG_CLASSES)
2450 max_nongroups[(int) *p++]--;
2453 /* Spill every pseudo reg that was allocated to this reg
2454 or to something that overlaps this reg. */
2456 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2458 /* If there are some registers still to eliminate and this register
2459 wasn't ever used before, additional stack space may have to be
2460 allocated to store this register. Thus, we may have changed the offset
2461 between the stack and frame pointers, so mark that something has changed.
2462 (If new pseudos were spilled, thus requiring more space, VAL would have
2463 been set non-zero by the call to spill_hard_reg above since additional
2464 reloads may be needed in that case.
2466 One might think that we need only set VAL to 1 if this is a call-used
2467 register. However, the set of registers that must be saved by the
2468 prologue is not identical to the call-used set. For example, the
2469 register used by the call insn for the return PC is a call-used register,
2470 but must be saved by the prologue. */
2471 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2472 val = 1;
2474 regs_ever_live[spill_regs[n_spills]] = 1;
2475 n_spills++;
2477 return val;
2480 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2481 data that is dead in INSN. */
2483 static void
2484 delete_dead_insn (insn)
2485 rtx insn;
2487 rtx prev = prev_real_insn (insn);
2488 rtx prev_dest;
2490 /* If the previous insn sets a register that dies in our insn, delete it
2491 too. */
2492 if (prev && GET_CODE (PATTERN (prev)) == SET
2493 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2494 && reg_mentioned_p (prev_dest, PATTERN (insn))
2495 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2496 && ! side_effects_p (SET_SRC (PATTERN (prev))))
2497 delete_dead_insn (prev);
2499 PUT_CODE (insn, NOTE);
2500 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2501 NOTE_SOURCE_FILE (insn) = 0;
2504 /* Modify the home of pseudo-reg I.
2505 The new home is present in reg_renumber[I].
2507 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2508 or it may be -1, meaning there is none or it is not relevant.
2509 This is used so that all pseudos spilled from a given hard reg
2510 can share one stack slot. */
2512 static void
2513 alter_reg (i, from_reg)
2514 register int i;
2515 int from_reg;
2517 /* When outputting an inline function, this can happen
2518 for a reg that isn't actually used. */
2519 if (regno_reg_rtx[i] == 0)
2520 return;
2522 /* If the reg got changed to a MEM at rtl-generation time,
2523 ignore it. */
2524 if (GET_CODE (regno_reg_rtx[i]) != REG)
2525 return;
2527 /* Modify the reg-rtx to contain the new hard reg
2528 number or else to contain its pseudo reg number. */
2529 REGNO (regno_reg_rtx[i])
2530 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2532 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2533 allocate a stack slot for it. */
2535 if (reg_renumber[i] < 0
2536 && REG_N_REFS (i) > 0
2537 && reg_equiv_constant[i] == 0
2538 && reg_equiv_memory_loc[i] == 0)
2540 register rtx x;
2541 int inherent_size = PSEUDO_REGNO_BYTES (i);
2542 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2543 int adjust = 0;
2545 /* Each pseudo reg has an inherent size which comes from its own mode,
2546 and a total size which provides room for paradoxical subregs
2547 which refer to the pseudo reg in wider modes.
2549 We can use a slot already allocated if it provides both
2550 enough inherent space and enough total space.
2551 Otherwise, we allocate a new slot, making sure that it has no less
2552 inherent space, and no less total space, then the previous slot. */
2553 if (from_reg == -1)
2555 /* No known place to spill from => no slot to reuse. */
2556 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
2557 inherent_size == total_size ? 0 : -1);
2558 if (BYTES_BIG_ENDIAN)
2559 /* Cancel the big-endian correction done in assign_stack_local.
2560 Get the address of the beginning of the slot.
2561 This is so we can do a big-endian correction unconditionally
2562 below. */
2563 adjust = inherent_size - total_size;
2565 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2567 /* Reuse a stack slot if possible. */
2568 else if (spill_stack_slot[from_reg] != 0
2569 && spill_stack_slot_width[from_reg] >= total_size
2570 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2571 >= inherent_size))
2572 x = spill_stack_slot[from_reg];
2573 /* Allocate a bigger slot. */
2574 else
2576 /* Compute maximum size needed, both for inherent size
2577 and for total size. */
2578 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2579 rtx stack_slot;
2580 if (spill_stack_slot[from_reg])
2582 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2583 > inherent_size)
2584 mode = GET_MODE (spill_stack_slot[from_reg]);
2585 if (spill_stack_slot_width[from_reg] > total_size)
2586 total_size = spill_stack_slot_width[from_reg];
2588 /* Make a slot with that size. */
2589 x = assign_stack_local (mode, total_size,
2590 inherent_size == total_size ? 0 : -1);
2591 stack_slot = x;
2592 if (BYTES_BIG_ENDIAN)
2594 /* Cancel the big-endian correction done in assign_stack_local.
2595 Get the address of the beginning of the slot.
2596 This is so we can do a big-endian correction unconditionally
2597 below. */
2598 adjust = GET_MODE_SIZE (mode) - total_size;
2599 if (adjust)
2600 stack_slot = gen_rtx_MEM (mode_for_size (total_size
2601 * BITS_PER_UNIT,
2602 MODE_INT, 1),
2603 plus_constant (XEXP (x, 0), adjust));
2605 spill_stack_slot[from_reg] = stack_slot;
2606 spill_stack_slot_width[from_reg] = total_size;
2609 /* On a big endian machine, the "address" of the slot
2610 is the address of the low part that fits its inherent mode. */
2611 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2612 adjust += (total_size - inherent_size);
2614 /* If we have any adjustment to make, or if the stack slot is the
2615 wrong mode, make a new stack slot. */
2616 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2618 x = gen_rtx_MEM (GET_MODE (regno_reg_rtx[i]),
2619 plus_constant (XEXP (x, 0), adjust));
2621 /* If this was shared among registers, must ensure we never
2622 set it readonly since that can cause scheduling
2623 problems. Note we would only have in this adjustment
2624 case in any event, since the code above doesn't set it. */
2626 if (from_reg == -1)
2627 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2630 /* Save the stack slot for later. */
2631 reg_equiv_memory_loc[i] = x;
2635 /* Mark the slots in regs_ever_live for the hard regs
2636 used by pseudo-reg number REGNO. */
2638 void
2639 mark_home_live (regno)
2640 int regno;
2642 register int i, lim;
2643 i = reg_renumber[regno];
2644 if (i < 0)
2645 return;
2646 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2647 while (i < lim)
2648 regs_ever_live[i++] = 1;
2651 /* This function handles the tracking of elimination offsets around branches.
2653 X is a piece of RTL being scanned.
2655 INSN is the insn that it came from, if any.
2657 INITIAL_P is non-zero if we are to set the offset to be the initial
2658 offset and zero if we are setting the offset of the label to be the
2659 current offset. */
2661 static void
2662 set_label_offsets (x, insn, initial_p)
2663 rtx x;
2664 rtx insn;
2665 int initial_p;
2667 enum rtx_code code = GET_CODE (x);
2668 rtx tem;
2669 unsigned int i;
2670 struct elim_table *p;
2672 switch (code)
2674 case LABEL_REF:
2675 if (LABEL_REF_NONLOCAL_P (x))
2676 return;
2678 x = XEXP (x, 0);
2680 /* ... fall through ... */
2682 case CODE_LABEL:
2683 /* If we know nothing about this label, set the desired offsets. Note
2684 that this sets the offset at a label to be the offset before a label
2685 if we don't know anything about the label. This is not correct for
2686 the label after a BARRIER, but is the best guess we can make. If
2687 we guessed wrong, we will suppress an elimination that might have
2688 been possible had we been able to guess correctly. */
2690 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2692 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2693 offsets_at[CODE_LABEL_NUMBER (x)][i]
2694 = (initial_p ? reg_eliminate[i].initial_offset
2695 : reg_eliminate[i].offset);
2696 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2699 /* Otherwise, if this is the definition of a label and it is
2700 preceded by a BARRIER, set our offsets to the known offset of
2701 that label. */
2703 else if (x == insn
2704 && (tem = prev_nonnote_insn (insn)) != 0
2705 && GET_CODE (tem) == BARRIER)
2707 num_not_at_initial_offset = 0;
2708 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2710 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2711 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2712 if (reg_eliminate[i].can_eliminate
2713 && (reg_eliminate[i].offset
2714 != reg_eliminate[i].initial_offset))
2715 num_not_at_initial_offset++;
2719 else
2720 /* If neither of the above cases is true, compare each offset
2721 with those previously recorded and suppress any eliminations
2722 where the offsets disagree. */
2724 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2725 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2726 != (initial_p ? reg_eliminate[i].initial_offset
2727 : reg_eliminate[i].offset))
2728 reg_eliminate[i].can_eliminate = 0;
2730 return;
2732 case JUMP_INSN:
2733 set_label_offsets (PATTERN (insn), insn, initial_p);
2735 /* ... fall through ... */
2737 case INSN:
2738 case CALL_INSN:
2739 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2740 and hence must have all eliminations at their initial offsets. */
2741 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2742 if (REG_NOTE_KIND (tem) == REG_LABEL)
2743 set_label_offsets (XEXP (tem, 0), insn, 1);
2744 return;
2746 case ADDR_VEC:
2747 case ADDR_DIFF_VEC:
2748 /* Each of the labels in the address vector must be at their initial
2749 offsets. We want the first field for ADDR_VEC and the second
2750 field for ADDR_DIFF_VEC. */
2752 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2753 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2754 insn, initial_p);
2755 return;
2757 case SET:
2758 /* We only care about setting PC. If the source is not RETURN,
2759 IF_THEN_ELSE, or a label, disable any eliminations not at
2760 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2761 isn't one of those possibilities. For branches to a label,
2762 call ourselves recursively.
2764 Note that this can disable elimination unnecessarily when we have
2765 a non-local goto since it will look like a non-constant jump to
2766 someplace in the current function. This isn't a significant
2767 problem since such jumps will normally be when all elimination
2768 pairs are back to their initial offsets. */
2770 if (SET_DEST (x) != pc_rtx)
2771 return;
2773 switch (GET_CODE (SET_SRC (x)))
2775 case PC:
2776 case RETURN:
2777 return;
2779 case LABEL_REF:
2780 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2781 return;
2783 case IF_THEN_ELSE:
2784 tem = XEXP (SET_SRC (x), 1);
2785 if (GET_CODE (tem) == LABEL_REF)
2786 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2787 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2788 break;
2790 tem = XEXP (SET_SRC (x), 2);
2791 if (GET_CODE (tem) == LABEL_REF)
2792 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2793 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2794 break;
2795 return;
2797 default:
2798 break;
2801 /* If we reach here, all eliminations must be at their initial
2802 offset because we are doing a jump to a variable address. */
2803 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2804 if (p->offset != p->initial_offset)
2805 p->can_eliminate = 0;
2806 break;
2808 default:
2809 break;
2813 /* Used for communication between the next two function to properly share
2814 the vector for an ASM_OPERANDS. */
2816 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2818 /* Scan X and replace any eliminable registers (such as fp) with a
2819 replacement (such as sp), plus an offset.
2821 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2822 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2823 MEM, we are allowed to replace a sum of a register and the constant zero
2824 with the register, which we cannot do outside a MEM. In addition, we need
2825 to record the fact that a register is referenced outside a MEM.
2827 If INSN is an insn, it is the insn containing X. If we replace a REG
2828 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2829 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2830 the REG is being modified.
2832 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2833 That's used when we eliminate in expressions stored in notes.
2834 This means, do not set ref_outside_mem even if the reference
2835 is outside of MEMs.
2837 If we see a modification to a register we know about, take the
2838 appropriate action (see case SET, below).
2840 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2841 replacements done assuming all offsets are at their initial values. If
2842 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2843 encounter, return the actual location so that find_reloads will do
2844 the proper thing. */
2847 eliminate_regs (x, mem_mode, insn)
2848 rtx x;
2849 enum machine_mode mem_mode;
2850 rtx insn;
2852 enum rtx_code code = GET_CODE (x);
2853 struct elim_table *ep;
2854 int regno;
2855 rtx new;
2856 int i, j;
2857 char *fmt;
2858 int copied = 0;
2860 switch (code)
2862 case CONST_INT:
2863 case CONST_DOUBLE:
2864 case CONST:
2865 case SYMBOL_REF:
2866 case CODE_LABEL:
2867 case PC:
2868 case CC0:
2869 case ASM_INPUT:
2870 case ADDR_VEC:
2871 case ADDR_DIFF_VEC:
2872 case RETURN:
2873 return x;
2875 case ADDRESSOF:
2876 /* This is only for the benefit of the debugging backends, which call
2877 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2878 removed after CSE. */
2879 new = eliminate_regs (XEXP (x, 0), 0, insn);
2880 if (GET_CODE (new) == MEM)
2881 return XEXP (new, 0);
2882 return x;
2884 case REG:
2885 regno = REGNO (x);
2887 /* First handle the case where we encounter a bare register that
2888 is eliminable. Replace it with a PLUS. */
2889 if (regno < FIRST_PSEUDO_REGISTER)
2891 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2892 ep++)
2893 if (ep->from_rtx == x && ep->can_eliminate)
2895 if (! mem_mode
2896 /* Refs inside notes don't count for this purpose. */
2897 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2898 || GET_CODE (insn) == INSN_LIST)))
2899 ep->ref_outside_mem = 1;
2900 return plus_constant (ep->to_rtx, ep->previous_offset);
2904 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2905 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2907 /* In this case, find_reloads would attempt to either use an
2908 incorrect address (if something is not at its initial offset)
2909 or substitute an replaced address into an insn (which loses
2910 if the offset is changed by some later action). So we simply
2911 return the replaced stack slot (assuming it is changed by
2912 elimination) and ignore the fact that this is actually a
2913 reference to the pseudo. Ensure we make a copy of the
2914 address in case it is shared. */
2915 new = eliminate_regs (reg_equiv_memory_loc[regno], mem_mode, insn);
2916 if (new != reg_equiv_memory_loc[regno])
2918 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
2919 && GET_CODE (insn) != INSN_LIST)
2920 REG_NOTES (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn))
2921 = gen_rtx_EXPR_LIST (REG_EQUAL, new, NULL_RTX);
2922 return copy_rtx (new);
2925 return x;
2927 case PLUS:
2928 /* If this is the sum of an eliminable register and a constant, rework
2929 the sum. */
2930 if (GET_CODE (XEXP (x, 0)) == REG
2931 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2932 && CONSTANT_P (XEXP (x, 1)))
2934 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2935 ep++)
2936 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2938 if (! mem_mode
2939 /* Refs inside notes don't count for this purpose. */
2940 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2941 || GET_CODE (insn) == INSN_LIST)))
2942 ep->ref_outside_mem = 1;
2944 /* The only time we want to replace a PLUS with a REG (this
2945 occurs when the constant operand of the PLUS is the negative
2946 of the offset) is when we are inside a MEM. We won't want
2947 to do so at other times because that would change the
2948 structure of the insn in a way that reload can't handle.
2949 We special-case the commonest situation in
2950 eliminate_regs_in_insn, so just replace a PLUS with a
2951 PLUS here, unless inside a MEM. */
2952 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2953 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2954 return ep->to_rtx;
2955 else
2956 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2957 plus_constant (XEXP (x, 1),
2958 ep->previous_offset));
2961 /* If the register is not eliminable, we are done since the other
2962 operand is a constant. */
2963 return x;
2966 /* If this is part of an address, we want to bring any constant to the
2967 outermost PLUS. We will do this by doing register replacement in
2968 our operands and seeing if a constant shows up in one of them.
2970 We assume here this is part of an address (or a "load address" insn)
2971 since an eliminable register is not likely to appear in any other
2972 context.
2974 If we have (plus (eliminable) (reg)), we want to produce
2975 (plus (plus (replacement) (reg) (const))). If this was part of a
2976 normal add insn, (plus (replacement) (reg)) will be pushed as a
2977 reload. This is the desired action. */
2980 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2981 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2983 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2985 /* If one side is a PLUS and the other side is a pseudo that
2986 didn't get a hard register but has a reg_equiv_constant,
2987 we must replace the constant here since it may no longer
2988 be in the position of any operand. */
2989 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2990 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2991 && reg_renumber[REGNO (new1)] < 0
2992 && reg_equiv_constant != 0
2993 && reg_equiv_constant[REGNO (new1)] != 0)
2994 new1 = reg_equiv_constant[REGNO (new1)];
2995 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2996 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2997 && reg_renumber[REGNO (new0)] < 0
2998 && reg_equiv_constant[REGNO (new0)] != 0)
2999 new0 = reg_equiv_constant[REGNO (new0)];
3001 new = form_sum (new0, new1);
3003 /* As above, if we are not inside a MEM we do not want to
3004 turn a PLUS into something else. We might try to do so here
3005 for an addition of 0 if we aren't optimizing. */
3006 if (! mem_mode && GET_CODE (new) != PLUS)
3007 return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
3008 else
3009 return new;
3012 return x;
3014 case MULT:
3015 /* If this is the product of an eliminable register and a
3016 constant, apply the distribute law and move the constant out
3017 so that we have (plus (mult ..) ..). This is needed in order
3018 to keep load-address insns valid. This case is pathological.
3019 We ignore the possibility of overflow here. */
3020 if (GET_CODE (XEXP (x, 0)) == REG
3021 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3022 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3023 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3024 ep++)
3025 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
3027 if (! mem_mode
3028 /* Refs inside notes don't count for this purpose. */
3029 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
3030 || GET_CODE (insn) == INSN_LIST)))
3031 ep->ref_outside_mem = 1;
3033 return
3034 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
3035 ep->previous_offset * INTVAL (XEXP (x, 1)));
3038 /* ... fall through ... */
3040 case CALL:
3041 case COMPARE:
3042 case MINUS:
3043 case DIV: case UDIV:
3044 case MOD: case UMOD:
3045 case AND: case IOR: case XOR:
3046 case ROTATERT: case ROTATE:
3047 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3048 case NE: case EQ:
3049 case GE: case GT: case GEU: case GTU:
3050 case LE: case LT: case LEU: case LTU:
3052 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3053 rtx new1
3054 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
3056 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
3057 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
3059 return x;
3061 case EXPR_LIST:
3062 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
3063 if (XEXP (x, 0))
3065 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3066 if (new != XEXP (x, 0))
3067 x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
3070 /* ... fall through ... */
3072 case INSN_LIST:
3073 /* Now do eliminations in the rest of the chain. If this was
3074 an EXPR_LIST, this might result in allocating more memory than is
3075 strictly needed, but it simplifies the code. */
3076 if (XEXP (x, 1))
3078 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
3079 if (new != XEXP (x, 1))
3080 return gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
3082 return x;
3084 case PRE_INC:
3085 case POST_INC:
3086 case PRE_DEC:
3087 case POST_DEC:
3088 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3089 if (ep->to_rtx == XEXP (x, 0))
3091 int size = GET_MODE_SIZE (mem_mode);
3093 /* If more bytes than MEM_MODE are pushed, account for them. */
3094 #ifdef PUSH_ROUNDING
3095 if (ep->to_rtx == stack_pointer_rtx)
3096 size = PUSH_ROUNDING (size);
3097 #endif
3098 if (code == PRE_DEC || code == POST_DEC)
3099 ep->offset += size;
3100 else
3101 ep->offset -= size;
3104 /* Fall through to generic unary operation case. */
3105 case STRICT_LOW_PART:
3106 case NEG: case NOT:
3107 case SIGN_EXTEND: case ZERO_EXTEND:
3108 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3109 case FLOAT: case FIX:
3110 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3111 case ABS:
3112 case SQRT:
3113 case FFS:
3114 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3115 if (new != XEXP (x, 0))
3116 return gen_rtx_fmt_e (code, GET_MODE (x), new);
3117 return x;
3119 case SUBREG:
3120 /* Similar to above processing, but preserve SUBREG_WORD.
3121 Convert (subreg (mem)) to (mem) if not paradoxical.
3122 Also, if we have a non-paradoxical (subreg (pseudo)) and the
3123 pseudo didn't get a hard reg, we must replace this with the
3124 eliminated version of the memory location because push_reloads
3125 may do the replacement in certain circumstances. */
3126 if (GET_CODE (SUBREG_REG (x)) == REG
3127 && (GET_MODE_SIZE (GET_MODE (x))
3128 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3129 && reg_equiv_memory_loc != 0
3130 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
3132 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
3133 mem_mode, insn);
3135 /* If we didn't change anything, we must retain the pseudo. */
3136 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
3137 new = SUBREG_REG (x);
3138 else
3140 /* In this case, we must show that the pseudo is used in this
3141 insn so that delete_output_reload will do the right thing. */
3142 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
3143 && GET_CODE (insn) != INSN_LIST)
3144 REG_NOTES (emit_insn_before (gen_rtx_USE (VOIDmode,
3145 SUBREG_REG (x)),
3146 insn))
3147 = gen_rtx_EXPR_LIST (REG_EQUAL, new, NULL_RTX);
3149 /* Ensure NEW isn't shared in case we have to reload it. */
3150 new = copy_rtx (new);
3153 else
3154 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
3156 if (new != XEXP (x, 0))
3158 int x_size = GET_MODE_SIZE (GET_MODE (x));
3159 int new_size = GET_MODE_SIZE (GET_MODE (new));
3161 if (GET_CODE (new) == MEM
3162 && ((x_size < new_size
3163 #ifdef WORD_REGISTER_OPERATIONS
3164 /* On these machines, combine can create rtl of the form
3165 (set (subreg:m1 (reg:m2 R) 0) ...)
3166 where m1 < m2, and expects something interesting to
3167 happen to the entire word. Moreover, it will use the
3168 (reg:m2 R) later, expecting all bits to be preserved.
3169 So if the number of words is the same, preserve the
3170 subreg so that push_reloads can see it. */
3171 && ! ((x_size-1)/UNITS_PER_WORD == (new_size-1)/UNITS_PER_WORD)
3172 #endif
3174 || (x_size == new_size))
3177 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
3178 enum machine_mode mode = GET_MODE (x);
3180 if (BYTES_BIG_ENDIAN)
3181 offset += (MIN (UNITS_PER_WORD,
3182 GET_MODE_SIZE (GET_MODE (new)))
3183 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
3185 PUT_MODE (new, mode);
3186 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
3187 return new;
3189 else
3190 return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_WORD (x));
3193 return x;
3195 case USE:
3196 /* If using a register that is the source of an eliminate we still
3197 think can be performed, note it cannot be performed since we don't
3198 know how this register is used. */
3199 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3200 if (ep->from_rtx == XEXP (x, 0))
3201 ep->can_eliminate = 0;
3203 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3204 if (new != XEXP (x, 0))
3205 return gen_rtx_fmt_e (code, GET_MODE (x), new);
3206 return x;
3208 case CLOBBER:
3209 /* If clobbering a register that is the replacement register for an
3210 elimination we still think can be performed, note that it cannot
3211 be performed. Otherwise, we need not be concerned about it. */
3212 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3213 if (ep->to_rtx == XEXP (x, 0))
3214 ep->can_eliminate = 0;
3216 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3217 if (new != XEXP (x, 0))
3218 return gen_rtx_fmt_e (code, GET_MODE (x), new);
3219 return x;
3221 case ASM_OPERANDS:
3223 rtx *temp_vec;
3224 /* Properly handle sharing input and constraint vectors. */
3225 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
3227 /* When we come to a new vector not seen before,
3228 scan all its elements; keep the old vector if none
3229 of them changes; otherwise, make a copy. */
3230 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3231 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3232 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3233 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
3234 mem_mode, insn);
3236 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3237 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3238 break;
3240 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3241 new_asm_operands_vec = old_asm_operands_vec;
3242 else
3243 new_asm_operands_vec
3244 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3247 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3248 if (new_asm_operands_vec == old_asm_operands_vec)
3249 return x;
3251 new = gen_rtx_ASM_OPERANDS (VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3252 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3253 ASM_OPERANDS_OUTPUT_IDX (x),
3254 new_asm_operands_vec,
3255 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3256 ASM_OPERANDS_SOURCE_FILE (x),
3257 ASM_OPERANDS_SOURCE_LINE (x));
3258 new->volatil = x->volatil;
3259 return new;
3262 case SET:
3263 /* Check for setting a register that we know about. */
3264 if (GET_CODE (SET_DEST (x)) == REG)
3266 /* See if this is setting the replacement register for an
3267 elimination.
3269 If DEST is the hard frame pointer, we do nothing because we
3270 assume that all assignments to the frame pointer are for
3271 non-local gotos and are being done at a time when they are valid
3272 and do not disturb anything else. Some machines want to
3273 eliminate a fake argument pointer (or even a fake frame pointer)
3274 with either the real frame or the stack pointer. Assignments to
3275 the hard frame pointer must not prevent this elimination. */
3277 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3278 ep++)
3279 if (ep->to_rtx == SET_DEST (x)
3280 && SET_DEST (x) != hard_frame_pointer_rtx)
3282 /* If it is being incremented, adjust the offset. Otherwise,
3283 this elimination can't be done. */
3284 rtx src = SET_SRC (x);
3286 if (GET_CODE (src) == PLUS
3287 && XEXP (src, 0) == SET_DEST (x)
3288 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3289 ep->offset -= INTVAL (XEXP (src, 1));
3290 else
3291 ep->can_eliminate = 0;
3294 /* Now check to see we are assigning to a register that can be
3295 eliminated. If so, it must be as part of a PARALLEL, since we
3296 will not have been called if this is a single SET. So indicate
3297 that we can no longer eliminate this reg. */
3298 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3299 ep++)
3300 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3301 ep->can_eliminate = 0;
3304 /* Now avoid the loop below in this common case. */
3306 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3307 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3309 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3310 write a CLOBBER insn. */
3311 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3312 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3313 && GET_CODE (insn) != INSN_LIST)
3314 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, SET_DEST (x)), insn);
3316 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3317 return gen_rtx_SET (VOIDmode, new0, new1);
3320 return x;
3322 case MEM:
3323 /* This is only for the benefit of the debugging backends, which call
3324 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
3325 removed after CSE. */
3326 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
3327 return eliminate_regs (XEXP (XEXP (x, 0), 0), 0, insn);
3329 /* Our only special processing is to pass the mode of the MEM to our
3330 recursive call and copy the flags. While we are here, handle this
3331 case more efficiently. */
3332 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3333 if (new != XEXP (x, 0))
3335 new = gen_rtx_MEM (GET_MODE (x), new);
3336 new->volatil = x->volatil;
3337 new->unchanging = x->unchanging;
3338 new->in_struct = x->in_struct;
3339 return new;
3341 else
3342 return x;
3344 default:
3345 break;
3348 /* Process each of our operands recursively. If any have changed, make a
3349 copy of the rtx. */
3350 fmt = GET_RTX_FORMAT (code);
3351 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3353 if (*fmt == 'e')
3355 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3356 if (new != XEXP (x, i) && ! copied)
3358 rtx new_x = rtx_alloc (code);
3359 bcopy ((char *) x, (char *) new_x,
3360 (sizeof (*new_x) - sizeof (new_x->fld)
3361 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3362 x = new_x;
3363 copied = 1;
3365 XEXP (x, i) = new;
3367 else if (*fmt == 'E')
3369 int copied_vec = 0;
3370 for (j = 0; j < XVECLEN (x, i); j++)
3372 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3373 if (new != XVECEXP (x, i, j) && ! copied_vec)
3375 rtvec new_v = gen_rtvec_vv (XVECLEN (x, i),
3376 XVEC (x, i)->elem);
3377 if (! copied)
3379 rtx new_x = rtx_alloc (code);
3380 bcopy ((char *) x, (char *) new_x,
3381 (sizeof (*new_x) - sizeof (new_x->fld)
3382 + (sizeof (new_x->fld[0])
3383 * GET_RTX_LENGTH (code))));
3384 x = new_x;
3385 copied = 1;
3387 XVEC (x, i) = new_v;
3388 copied_vec = 1;
3390 XVECEXP (x, i, j) = new;
3395 return x;
3398 /* Scan INSN and eliminate all eliminable registers in it.
3400 If REPLACE is nonzero, do the replacement destructively. Also
3401 delete the insn as dead it if it is setting an eliminable register.
3403 If REPLACE is zero, do all our allocations in reload_obstack.
3405 If no eliminations were done and this insn doesn't require any elimination
3406 processing (these are not identical conditions: it might be updating sp,
3407 but not referencing fp; this needs to be seen during reload_as_needed so
3408 that the offset between fp and sp can be taken into consideration), zero
3409 is returned. Otherwise, 1 is returned. */
3411 static int
3412 eliminate_regs_in_insn (insn, replace)
3413 rtx insn;
3414 int replace;
3416 rtx old_body = PATTERN (insn);
3417 rtx old_set = single_set (insn);
3418 rtx new_body;
3419 int val = 0;
3420 struct elim_table *ep;
3422 if (! replace)
3423 push_obstacks (&reload_obstack, &reload_obstack);
3425 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3426 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3428 /* Check for setting an eliminable register. */
3429 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3430 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3432 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3433 /* If this is setting the frame pointer register to the
3434 hardware frame pointer register and this is an elimination
3435 that will be done (tested above), this insn is really
3436 adjusting the frame pointer downward to compensate for
3437 the adjustment done before a nonlocal goto. */
3438 if (ep->from == FRAME_POINTER_REGNUM
3439 && ep->to == HARD_FRAME_POINTER_REGNUM)
3441 rtx src = SET_SRC (old_set);
3442 int offset, ok = 0;
3443 rtx prev_insn, prev_set;
3445 if (src == ep->to_rtx)
3446 offset = 0, ok = 1;
3447 else if (GET_CODE (src) == PLUS
3448 && GET_CODE (XEXP (src, 0)) == CONST_INT
3449 && XEXP (src, 1) == ep->to_rtx)
3450 offset = INTVAL (XEXP (src, 0)), ok = 1;
3451 else if (GET_CODE (src) == PLUS
3452 && GET_CODE (XEXP (src, 1)) == CONST_INT
3453 && XEXP (src, 0) == ep->to_rtx)
3454 offset = INTVAL (XEXP (src, 1)), ok = 1;
3455 else if ((prev_insn = prev_nonnote_insn (insn)) != 0
3456 && (prev_set = single_set (prev_insn)) != 0
3457 && rtx_equal_p (SET_DEST (prev_set), src))
3459 src = SET_SRC (prev_set);
3460 if (src == ep->to_rtx)
3461 offset = 0, ok = 1;
3462 else if (GET_CODE (src) == PLUS
3463 && GET_CODE (XEXP (src, 0)) == CONST_INT
3464 && XEXP (src, 1) == ep->to_rtx)
3465 offset = INTVAL (XEXP (src, 0)), ok = 1;
3466 else if (GET_CODE (src) == PLUS
3467 && GET_CODE (XEXP (src, 1)) == CONST_INT
3468 && XEXP (src, 0) == ep->to_rtx)
3469 offset = INTVAL (XEXP (src, 1)), ok = 1;
3472 if (ok)
3474 if (replace)
3476 rtx src
3477 = plus_constant (ep->to_rtx, offset - ep->offset);
3479 /* First see if this insn remains valid when we
3480 make the change. If not, keep the INSN_CODE
3481 the same and let reload fit it up. */
3482 validate_change (insn, &SET_SRC (old_set), src, 1);
3483 validate_change (insn, &SET_DEST (old_set),
3484 ep->to_rtx, 1);
3485 if (! apply_change_group ())
3487 SET_SRC (old_set) = src;
3488 SET_DEST (old_set) = ep->to_rtx;
3492 val = 1;
3493 goto done;
3496 #endif
3498 /* In this case this insn isn't serving a useful purpose. We
3499 will delete it in reload_as_needed once we know that this
3500 elimination is, in fact, being done.
3502 If REPLACE isn't set, we can't delete this insn, but needn't
3503 process it since it won't be used unless something changes. */
3504 if (replace)
3505 delete_dead_insn (insn);
3506 val = 1;
3507 goto done;
3510 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3511 in the insn is the negative of the offset in FROM. Substitute
3512 (set (reg) (reg to)) for the insn and change its code.
3514 We have to do this here, rather than in eliminate_regs, do that we can
3515 change the insn code. */
3517 if (GET_CODE (SET_SRC (old_set)) == PLUS
3518 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3519 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3520 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3521 ep++)
3522 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3523 && ep->can_eliminate)
3525 /* We must stop at the first elimination that will be used.
3526 If this one would replace the PLUS with a REG, do it
3527 now. Otherwise, quit the loop and let eliminate_regs
3528 do its normal replacement. */
3529 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3531 /* We assume here that we don't need a PARALLEL of
3532 any CLOBBERs for this assignment. There's not
3533 much we can do if we do need it. */
3534 PATTERN (insn) = gen_rtx_SET (VOIDmode,
3535 SET_DEST (old_set),
3536 ep->to_rtx);
3537 INSN_CODE (insn) = -1;
3538 val = 1;
3539 goto done;
3542 break;
3546 old_asm_operands_vec = 0;
3548 /* Replace the body of this insn with a substituted form. If we changed
3549 something, return non-zero.
3551 If we are replacing a body that was a (set X (plus Y Z)), try to
3552 re-recognize the insn. We do this in case we had a simple addition
3553 but now can do this as a load-address. This saves an insn in this
3554 common case. */
3556 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3557 if (new_body != old_body)
3559 /* If we aren't replacing things permanently and we changed something,
3560 make another copy to ensure that all the RTL is new. Otherwise
3561 things can go wrong if find_reload swaps commutative operands
3562 and one is inside RTL that has been copied while the other is not. */
3564 /* Don't copy an asm_operands because (1) there's no need and (2)
3565 copy_rtx can't do it properly when there are multiple outputs. */
3566 if (! replace && asm_noperands (old_body) < 0)
3567 new_body = copy_rtx (new_body);
3569 /* If we had a move insn but now we don't, rerecognize it. This will
3570 cause spurious re-recognition if the old move had a PARALLEL since
3571 the new one still will, but we can't call single_set without
3572 having put NEW_BODY into the insn and the re-recognition won't
3573 hurt in this rare case. */
3574 if (old_set != 0
3575 && ((GET_CODE (SET_SRC (old_set)) == REG
3576 && (GET_CODE (new_body) != SET
3577 || GET_CODE (SET_SRC (new_body)) != REG))
3578 /* If this was a load from or store to memory, compare
3579 the MEM in recog_operand to the one in the insn. If they
3580 are not equal, then rerecognize the insn. */
3581 || (old_set != 0
3582 && ((GET_CODE (SET_SRC (old_set)) == MEM
3583 && SET_SRC (old_set) != recog_operand[1])
3584 || (GET_CODE (SET_DEST (old_set)) == MEM
3585 && SET_DEST (old_set) != recog_operand[0])))
3586 /* If this was an add insn before, rerecognize. */
3587 || GET_CODE (SET_SRC (old_set)) == PLUS))
3589 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3590 /* If recognition fails, store the new body anyway.
3591 It's normal to have recognition failures here
3592 due to bizarre memory addresses; reloading will fix them. */
3593 PATTERN (insn) = new_body;
3595 else
3596 PATTERN (insn) = new_body;
3598 val = 1;
3601 /* Loop through all elimination pairs. See if any have changed and
3602 recalculate the number not at initial offset.
3604 Compute the maximum offset (minimum offset if the stack does not
3605 grow downward) for each elimination pair.
3607 We also detect a cases where register elimination cannot be done,
3608 namely, if a register would be both changed and referenced outside a MEM
3609 in the resulting insn since such an insn is often undefined and, even if
3610 not, we cannot know what meaning will be given to it. Note that it is
3611 valid to have a register used in an address in an insn that changes it
3612 (presumably with a pre- or post-increment or decrement).
3614 If anything changes, return nonzero. */
3616 num_not_at_initial_offset = 0;
3617 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3619 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3620 ep->can_eliminate = 0;
3622 ep->ref_outside_mem = 0;
3624 if (ep->previous_offset != ep->offset)
3625 val = 1;
3627 ep->previous_offset = ep->offset;
3628 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3629 num_not_at_initial_offset++;
3631 #ifdef STACK_GROWS_DOWNWARD
3632 ep->max_offset = MAX (ep->max_offset, ep->offset);
3633 #else
3634 ep->max_offset = MIN (ep->max_offset, ep->offset);
3635 #endif
3638 done:
3639 /* If we changed something, perform elimination in REG_NOTES. This is
3640 needed even when REPLACE is zero because a REG_DEAD note might refer
3641 to a register that we eliminate and could cause a different number
3642 of spill registers to be needed in the final reload pass than in
3643 the pre-passes. */
3644 if (val && REG_NOTES (insn) != 0)
3645 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3647 if (! replace)
3648 pop_obstacks ();
3650 return val;
3653 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3654 replacement we currently believe is valid, mark it as not eliminable if X
3655 modifies DEST in any way other than by adding a constant integer to it.
3657 If DEST is the frame pointer, we do nothing because we assume that
3658 all assignments to the hard frame pointer are nonlocal gotos and are being
3659 done at a time when they are valid and do not disturb anything else.
3660 Some machines want to eliminate a fake argument pointer with either the
3661 frame or stack pointer. Assignments to the hard frame pointer must not
3662 prevent this elimination.
3664 Called via note_stores from reload before starting its passes to scan
3665 the insns of the function. */
3667 static void
3668 mark_not_eliminable (dest, x)
3669 rtx dest;
3670 rtx x;
3672 register unsigned int i;
3674 /* A SUBREG of a hard register here is just changing its mode. We should
3675 not see a SUBREG of an eliminable hard register, but check just in
3676 case. */
3677 if (GET_CODE (dest) == SUBREG)
3678 dest = SUBREG_REG (dest);
3680 if (dest == hard_frame_pointer_rtx)
3681 return;
3683 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3684 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3685 && (GET_CODE (x) != SET
3686 || GET_CODE (SET_SRC (x)) != PLUS
3687 || XEXP (SET_SRC (x), 0) != dest
3688 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3690 reg_eliminate[i].can_eliminate_previous
3691 = reg_eliminate[i].can_eliminate = 0;
3692 num_eliminable--;
3696 /* Reset all offsets on eliminable registers to their initial values. */
3697 static void
3698 set_initial_elim_offsets ()
3700 rtx x;
3702 #ifdef ELIMINABLE_REGS
3703 struct elim_table *ep;
3705 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3707 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3708 ep->previous_offset = ep->offset
3709 = ep->max_offset = ep->initial_offset;
3711 #else
3712 #ifdef INITIAL_FRAME_POINTER_OFFSET
3713 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3714 #else
3715 if (!FRAME_POINTER_REQUIRED)
3716 abort ();
3717 reg_eliminate[0].initial_offset = 0;
3718 #endif
3719 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
3720 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3721 #endif
3723 num_not_at_initial_offset = 0;
3725 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
3727 /* Set a known offset for each forced label to be at the initial offset
3728 of each elimination. We do this because we assume that all
3729 computed jumps occur from a location where each elimination is
3730 at its initial offset. */
3732 for (x = forced_labels; x; x = XEXP (x, 1))
3733 if (XEXP (x, 0))
3734 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3737 /* See if anything that happened changes which eliminations are valid.
3738 For example, on the Sparc, whether or not the frame pointer can
3739 be eliminated can depend on what registers have been used. We need
3740 not check some conditions again (such as flag_omit_frame_pointer)
3741 since they can't have changed. */
3743 static void
3744 update_eliminables (pset)
3745 HARD_REG_SET *pset;
3747 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3748 int previous_frame_pointer_needed = frame_pointer_needed;
3749 #endif
3750 struct elim_table *ep;
3752 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3753 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
3754 #ifdef ELIMINABLE_REGS
3755 || ! CAN_ELIMINATE (ep->from, ep->to)
3756 #endif
3758 ep->can_eliminate = 0;
3760 /* Look for the case where we have discovered that we can't replace
3761 register A with register B and that means that we will now be
3762 trying to replace register A with register C. This means we can
3763 no longer replace register C with register B and we need to disable
3764 such an elimination, if it exists. This occurs often with A == ap,
3765 B == sp, and C == fp. */
3767 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3769 struct elim_table *op;
3770 register int new_to = -1;
3772 if (! ep->can_eliminate && ep->can_eliminate_previous)
3774 /* Find the current elimination for ep->from, if there is a
3775 new one. */
3776 for (op = reg_eliminate;
3777 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3778 if (op->from == ep->from && op->can_eliminate)
3780 new_to = op->to;
3781 break;
3784 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3785 disable it. */
3786 for (op = reg_eliminate;
3787 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3788 if (op->from == new_to && op->to == ep->to)
3789 op->can_eliminate = 0;
3793 /* See if any registers that we thought we could eliminate the previous
3794 time are no longer eliminable. If so, something has changed and we
3795 must spill the register. Also, recompute the number of eliminable
3796 registers and see if the frame pointer is needed; it is if there is
3797 no elimination of the frame pointer that we can perform. */
3799 frame_pointer_needed = 1;
3800 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3802 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
3803 && ep->to != HARD_FRAME_POINTER_REGNUM)
3804 frame_pointer_needed = 0;
3806 if (! ep->can_eliminate && ep->can_eliminate_previous)
3808 ep->can_eliminate_previous = 0;
3809 SET_HARD_REG_BIT (*pset, ep->from);
3810 num_eliminable--;
3814 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3815 /* If we didn't need a frame pointer last time, but we do now, spill
3816 the hard frame pointer. */
3817 if (frame_pointer_needed && ! previous_frame_pointer_needed)
3818 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3819 #endif
3822 /* Initialize the table of registers to eliminate. */
3823 static void
3824 init_elim_table ()
3826 struct elim_table *ep;
3828 /* Does this function require a frame pointer? */
3830 frame_pointer_needed = (! flag_omit_frame_pointer
3831 #ifdef EXIT_IGNORE_STACK
3832 /* ?? If EXIT_IGNORE_STACK is set, we will not save
3833 and restore sp for alloca. So we can't eliminate
3834 the frame pointer in that case. At some point,
3835 we should improve this by emitting the
3836 sp-adjusting insns for this case. */
3837 || (current_function_calls_alloca
3838 && EXIT_IGNORE_STACK)
3839 #endif
3840 || FRAME_POINTER_REQUIRED);
3842 num_eliminable = 0;
3844 #ifdef ELIMINABLE_REGS
3845 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3847 ep->can_eliminate = ep->can_eliminate_previous
3848 = (CAN_ELIMINATE (ep->from, ep->to)
3849 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
3851 #else
3852 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3853 = ! frame_pointer_needed;
3854 #endif
3856 /* Count the number of eliminable registers and build the FROM and TO
3857 REG rtx's. Note that code in gen_rtx will cause, e.g.,
3858 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3859 We depend on this. */
3860 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3862 num_eliminable += ep->can_eliminate;
3863 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3864 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3868 /* Kick all pseudos out of hard register REGNO.
3869 If GLOBAL is nonzero, try to find someplace else to put them.
3870 If DUMPFILE is nonzero, log actions taken on that file.
3872 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3873 because we found we can't eliminate some register. In the case, no pseudos
3874 are allowed to be in the register, even if they are only in a block that
3875 doesn't require spill registers, unlike the case when we are spilling this
3876 hard reg to produce another spill register.
3878 Return nonzero if any pseudos needed to be kicked out. */
3880 static int
3881 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3882 register int regno;
3883 int global;
3884 FILE *dumpfile;
3885 int cant_eliminate;
3887 enum reg_class class = REGNO_REG_CLASS (regno);
3888 int something_changed = 0;
3889 register int i;
3891 SET_HARD_REG_BIT (forbidden_regs, regno);
3893 if (cant_eliminate)
3894 regs_ever_live[regno] = 1;
3896 /* Spill every pseudo reg that was allocated to this reg
3897 or to something that overlaps this reg. */
3899 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3900 if (reg_renumber[i] >= 0
3901 && reg_renumber[i] <= regno
3902 && (reg_renumber[i]
3903 + HARD_REGNO_NREGS (reg_renumber[i],
3904 PSEUDO_REGNO_MODE (i))
3905 > regno))
3907 /* If this register belongs solely to a basic block which needed no
3908 spilling of any class that this register is contained in,
3909 leave it be, unless we are spilling this register because
3910 it was a hard register that can't be eliminated. */
3912 if (! cant_eliminate
3913 && basic_block_needs[0]
3914 && REG_BASIC_BLOCK (i) >= 0
3915 && basic_block_needs[(int) class][REG_BASIC_BLOCK (i)] == 0)
3917 enum reg_class *p;
3919 for (p = reg_class_superclasses[(int) class];
3920 *p != LIM_REG_CLASSES; p++)
3921 if (basic_block_needs[(int) *p][REG_BASIC_BLOCK (i)] > 0)
3922 break;
3924 if (*p == LIM_REG_CLASSES)
3925 continue;
3928 /* Mark it as no longer having a hard register home. */
3929 reg_renumber[i] = -1;
3930 /* We will need to scan everything again. */
3931 something_changed = 1;
3932 if (global)
3933 retry_global_alloc (i, forbidden_regs);
3935 alter_reg (i, regno);
3937 if (reg_renumber[i] == -1)
3938 SET_REGNO_REG_SET (spilled_pseudos, i);
3940 if (dumpfile)
3942 if (reg_renumber[i] == -1)
3943 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3944 else
3945 fprintf (dumpfile, " Register %d now in %d.\n\n",
3946 i, reg_renumber[i]);
3950 return something_changed;
3953 /* Clear the contents of spilled_pseudos from the life information in all
3954 insn chains. */
3955 static void
3956 finish_spills (global, dumpfile)
3957 int global;
3958 FILE *dumpfile;
3960 struct insn_chain *chain;
3962 for (chain = reload_insn_chain; chain; chain = chain->next)
3964 AND_COMPL_REG_SET (chain->live_before, spilled_pseudos);
3965 AND_COMPL_REG_SET (chain->live_after, spilled_pseudos);
3969 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3970 Also mark any hard registers used to store user variables as
3971 forbidden from being used for spill registers. */
3973 static void
3974 scan_paradoxical_subregs (x)
3975 register rtx x;
3977 register int i;
3978 register char *fmt;
3979 register enum rtx_code code = GET_CODE (x);
3981 switch (code)
3983 case REG:
3984 if (SMALL_REGISTER_CLASSES && REGNO (x) < FIRST_PSEUDO_REGISTER
3985 && REG_USERVAR_P (x))
3986 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3987 return;
3989 case CONST_INT:
3990 case CONST:
3991 case SYMBOL_REF:
3992 case LABEL_REF:
3993 case CONST_DOUBLE:
3994 case CC0:
3995 case PC:
3996 case USE:
3997 case CLOBBER:
3998 return;
4000 case SUBREG:
4001 if (GET_CODE (SUBREG_REG (x)) == REG
4002 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4003 reg_max_ref_width[REGNO (SUBREG_REG (x))]
4004 = GET_MODE_SIZE (GET_MODE (x));
4005 return;
4007 default:
4008 break;
4011 fmt = GET_RTX_FORMAT (code);
4012 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4014 if (fmt[i] == 'e')
4015 scan_paradoxical_subregs (XEXP (x, i));
4016 else if (fmt[i] == 'E')
4018 register int j;
4019 for (j = XVECLEN (x, i) - 1; j >=0; j--)
4020 scan_paradoxical_subregs (XVECEXP (x, i, j));
4025 static int
4026 hard_reg_use_compare (p1p, p2p)
4027 const GENERIC_PTR p1p;
4028 const GENERIC_PTR p2p;
4030 struct hard_reg_n_uses *p1 = (struct hard_reg_n_uses *)p1p,
4031 *p2 = (struct hard_reg_n_uses *)p2p;
4032 int tem = p1->uses - p2->uses;
4033 if (tem != 0) return tem;
4034 /* If regs are equally good, sort by regno,
4035 so that the results of qsort leave nothing to chance. */
4036 return p1->regno - p2->regno;
4039 /* Choose the order to consider regs for use as reload registers
4040 based on how much trouble would be caused by spilling one.
4041 Store them in order of decreasing preference in potential_reload_regs. */
4043 static void
4044 order_regs_for_reload ()
4046 register unsigned int i;
4047 register int o = 0;
4048 int large = 0;
4050 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
4052 CLEAR_HARD_REG_SET (bad_spill_regs);
4054 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4055 potential_reload_regs[i] = -1;
4057 /* Count number of uses of each hard reg by pseudo regs allocated to it
4058 and then order them by decreasing use. */
4060 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4062 hard_reg_n_uses[i].uses = 0;
4063 hard_reg_n_uses[i].regno = i;
4066 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4068 int regno = reg_renumber[i];
4069 if (regno >= 0)
4071 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
4072 while (regno < lim)
4073 hard_reg_n_uses[regno++].uses += REG_N_REFS (i);
4075 large += REG_N_REFS (i);
4078 /* Now fixed registers (which cannot safely be used for reloading)
4079 get a very high use count so they will be considered least desirable.
4080 Registers used explicitly in the rtl code are almost as bad. */
4082 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4084 if (fixed_regs[i])
4086 hard_reg_n_uses[i].uses += 2 * large + 2;
4087 SET_HARD_REG_BIT (bad_spill_regs, i);
4089 else if (regs_explicitly_used[i])
4091 hard_reg_n_uses[i].uses += large + 1;
4092 if (! SMALL_REGISTER_CLASSES)
4093 /* ??? We are doing this here because of the potential
4094 that bad code may be generated if a register explicitly
4095 used in an insn was used as a spill register for that
4096 insn. But not using these are spill registers may lose
4097 on some machine. We'll have to see how this works out. */
4098 SET_HARD_REG_BIT (bad_spill_regs, i);
4101 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
4102 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
4104 #ifdef ELIMINABLE_REGS
4105 /* If registers other than the frame pointer are eliminable, mark them as
4106 poor choices. */
4107 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
4109 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
4110 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
4112 #endif
4114 /* Prefer registers not so far used, for use in temporary loading.
4115 Among them, if REG_ALLOC_ORDER is defined, use that order.
4116 Otherwise, prefer registers not preserved by calls. */
4118 #ifdef REG_ALLOC_ORDER
4119 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4121 int regno = reg_alloc_order[i];
4123 if (hard_reg_n_uses[regno].uses == 0)
4124 potential_reload_regs[o++] = regno;
4126 #else
4127 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4129 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
4130 potential_reload_regs[o++] = i;
4132 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4134 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
4135 potential_reload_regs[o++] = i;
4137 #endif
4139 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
4140 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
4142 /* Now add the regs that are already used,
4143 preferring those used less often. The fixed and otherwise forbidden
4144 registers will be at the end of this list. */
4146 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4147 if (hard_reg_n_uses[i].uses != 0)
4148 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
4151 /* Used in reload_as_needed to sort the spilled regs. */
4153 static int
4154 compare_spill_regs (r1p, r2p)
4155 const GENERIC_PTR r1p;
4156 const GENERIC_PTR r2p;
4158 short r1 = *(short *)r1p, r2 = *(short *)r2p;
4159 return r1 - r2;
4162 /* Reload pseudo-registers into hard regs around each insn as needed.
4163 Additional register load insns are output before the insn that needs it
4164 and perhaps store insns after insns that modify the reloaded pseudo reg.
4166 reg_last_reload_reg and reg_reloaded_contents keep track of
4167 which registers are already available in reload registers.
4168 We update these for the reloads that we perform,
4169 as the insns are scanned. */
4171 static void
4172 reload_as_needed (live_known)
4173 int live_known;
4175 struct insn_chain *chain;
4176 register int i;
4177 rtx x;
4178 rtx after_call = 0;
4180 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
4181 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
4182 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
4183 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
4184 reg_has_output_reload = (char *) alloca (max_regno);
4185 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4187 /* Reset all offsets on eliminable registers to their initial values. */
4188 #ifdef ELIMINABLE_REGS
4189 for (i = 0; i < (int) NUM_ELIMINABLE_REGS; i++)
4191 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
4192 reg_eliminate[i].initial_offset);
4193 reg_eliminate[i].previous_offset
4194 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
4196 #else
4197 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
4198 reg_eliminate[0].previous_offset
4199 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
4200 #endif
4202 num_not_at_initial_offset = 0;
4204 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
4205 pack registers with group needs. */
4206 if (n_spills > 1)
4208 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
4209 for (i = 0; i < n_spills; i++)
4210 spill_reg_order[spill_regs[i]] = i;
4213 for (chain = reload_insn_chain; chain; chain = chain->next)
4215 rtx insn = chain->insn;
4216 rtx old_next = NEXT_INSN (insn);
4218 /* If we pass a label, copy the offsets from the label information
4219 into the current offsets of each elimination. */
4220 if (GET_CODE (insn) == CODE_LABEL)
4222 num_not_at_initial_offset = 0;
4223 for (i = 0; i < (int) NUM_ELIMINABLE_REGS; i++)
4225 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
4226 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
4227 if (reg_eliminate[i].can_eliminate
4228 && (reg_eliminate[i].offset
4229 != reg_eliminate[i].initial_offset))
4230 num_not_at_initial_offset++;
4234 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4236 rtx avoid_return_reg = 0;
4237 rtx oldpat = PATTERN (insn);
4239 /* Set avoid_return_reg if this is an insn
4240 that might use the value of a function call. */
4241 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
4243 if (GET_CODE (PATTERN (insn)) == SET)
4244 after_call = SET_DEST (PATTERN (insn));
4245 else if (GET_CODE (PATTERN (insn)) == PARALLEL
4246 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
4247 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
4248 else
4249 after_call = 0;
4251 else if (SMALL_REGISTER_CLASSES && after_call != 0
4252 && !(GET_CODE (PATTERN (insn)) == SET
4253 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx)
4254 && GET_CODE (PATTERN (insn)) != USE)
4256 if (reg_referenced_p (after_call, PATTERN (insn)))
4257 avoid_return_reg = after_call;
4258 after_call = 0;
4261 /* If this is a USE and CLOBBER of a MEM, ensure that any
4262 references to eliminable registers have been removed. */
4264 if ((GET_CODE (PATTERN (insn)) == USE
4265 || GET_CODE (PATTERN (insn)) == CLOBBER)
4266 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
4267 XEXP (XEXP (PATTERN (insn), 0), 0)
4268 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4269 GET_MODE (XEXP (PATTERN (insn), 0)),
4270 NULL_RTX);
4272 /* If we need to do register elimination processing, do so.
4273 This might delete the insn, in which case we are done. */
4274 if (num_eliminable && chain->need_elim)
4276 eliminate_regs_in_insn (insn, 1);
4277 if (GET_CODE (insn) == NOTE)
4278 continue;
4281 /* If need_elim is nonzero but need_reload is zero, one might think
4282 that we could simply set n_reloads to 0. However, find_reloads
4283 could have done some manipulation of the insn (such as swapping
4284 commutative operands), and these manipulations are lost during
4285 the first pass for every insn that needs register elimination.
4286 So the actions of find_reloads must be redone here. */
4288 if (! chain->need_elim && ! chain->need_reload)
4289 n_reloads = 0;
4290 /* First find the pseudo regs that must be reloaded for this insn.
4291 This info is returned in the tables reload_... (see reload.h).
4292 Also modify the body of INSN by substituting RELOAD
4293 rtx's for those pseudo regs. */
4294 else
4296 bzero (reg_has_output_reload, max_regno);
4297 CLEAR_HARD_REG_SET (reg_is_output_reload);
4299 find_reloads (insn, 1, spill_indirect_levels, live_known,
4300 spill_reg_order);
4303 if (n_reloads > 0)
4305 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
4306 rtx p;
4307 int class;
4309 /* If this block has not had spilling done for a
4310 particular clas and we have any non-optionals that need a
4311 spill reg in that class, abort. */
4313 for (class = 0; class < N_REG_CLASSES; class++)
4314 if (basic_block_needs[class] != 0
4315 && basic_block_needs[class][chain->block] == 0)
4316 for (i = 0; i < n_reloads; i++)
4317 if (class == (int) reload_reg_class[i]
4318 && reload_reg_rtx[i] == 0
4319 && ! reload_optional[i]
4320 && (reload_in[i] != 0 || reload_out[i] != 0
4321 || reload_secondary_p[i] != 0))
4322 fatal_insn ("Non-optional registers need a spill register", insn);
4324 /* Now compute which reload regs to reload them into. Perhaps
4325 reusing reload regs from previous insns, or else output
4326 load insns to reload them. Maybe output store insns too.
4327 Record the choices of reload reg in reload_reg_rtx. */
4328 choose_reload_regs (chain, avoid_return_reg);
4330 /* Merge any reloads that we didn't combine for fear of
4331 increasing the number of spill registers needed but now
4332 discover can be safely merged. */
4333 if (SMALL_REGISTER_CLASSES)
4334 merge_assigned_reloads (insn);
4336 /* Generate the insns to reload operands into or out of
4337 their reload regs. */
4338 emit_reload_insns (chain);
4340 /* Substitute the chosen reload regs from reload_reg_rtx
4341 into the insn's body (or perhaps into the bodies of other
4342 load and store insn that we just made for reloading
4343 and that we moved the structure into). */
4344 subst_reloads ();
4346 /* If this was an ASM, make sure that all the reload insns
4347 we have generated are valid. If not, give an error
4348 and delete them. */
4350 if (asm_noperands (PATTERN (insn)) >= 0)
4351 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4352 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
4353 && (recog_memoized (p) < 0
4354 || (insn_extract (p),
4355 ! constrain_operands (INSN_CODE (p), 1))))
4357 error_for_asm (insn,
4358 "`asm' operand requires impossible reload");
4359 PUT_CODE (p, NOTE);
4360 NOTE_SOURCE_FILE (p) = 0;
4361 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
4364 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4365 is no longer validly lying around to save a future reload.
4366 Note that this does not detect pseudos that were reloaded
4367 for this insn in order to be stored in
4368 (obeying register constraints). That is correct; such reload
4369 registers ARE still valid. */
4370 note_stores (oldpat, forget_old_reloads_1);
4372 /* There may have been CLOBBER insns placed after INSN. So scan
4373 between INSN and NEXT and use them to forget old reloads. */
4374 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4375 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
4376 note_stores (PATTERN (x), forget_old_reloads_1);
4378 #ifdef AUTO_INC_DEC
4379 /* Likewise for regs altered by auto-increment in this insn.
4380 But note that the reg-notes are not changed by reloading:
4381 they still contain the pseudo-regs, not the spill regs. */
4382 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4383 if (REG_NOTE_KIND (x) == REG_INC)
4385 /* See if this pseudo reg was reloaded in this insn.
4386 If so, its last-reload info is still valid
4387 because it is based on this insn's reload. */
4388 for (i = 0; i < n_reloads; i++)
4389 if (reload_out[i] == XEXP (x, 0))
4390 break;
4392 if (i == n_reloads)
4393 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
4395 #endif
4397 /* A reload reg's contents are unknown after a label. */
4398 if (GET_CODE (insn) == CODE_LABEL)
4399 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4401 /* Don't assume a reload reg is still good after a call insn
4402 if it is a call-used reg. */
4403 else if (GET_CODE (insn) == CALL_INSN)
4404 AND_COMPL_HARD_REG_SET(reg_reloaded_valid, call_used_reg_set);
4406 /* In case registers overlap, allow certain insns to invalidate
4407 particular hard registers. */
4409 #ifdef INSN_CLOBBERS_REGNO_P
4410 for (i = 0 ; i < FIRST_PSEUDO_REGISTER; i++)
4411 if (TEST_HARD_REG_BIT (reg_reloaded_valid, i)
4412 && INSN_CLOBBERS_REGNO_P (insn, i))
4413 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i);
4414 #endif
4416 #ifdef USE_C_ALLOCA
4417 alloca (0);
4418 #endif
4422 /* Discard all record of any value reloaded from X,
4423 or reloaded in X from someplace else;
4424 unless X is an output reload reg of the current insn.
4426 X may be a hard reg (the reload reg)
4427 or it may be a pseudo reg that was reloaded from. */
4429 static void
4430 forget_old_reloads_1 (x, ignored)
4431 rtx x;
4432 rtx ignored ATTRIBUTE_UNUSED;
4434 register int regno;
4435 int nr;
4436 int offset = 0;
4438 /* note_stores does give us subregs of hard regs. */
4439 while (GET_CODE (x) == SUBREG)
4441 offset += SUBREG_WORD (x);
4442 x = SUBREG_REG (x);
4445 if (GET_CODE (x) != REG)
4446 return;
4448 regno = REGNO (x) + offset;
4450 if (regno >= FIRST_PSEUDO_REGISTER)
4451 nr = 1;
4452 else
4454 int i;
4455 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4456 /* Storing into a spilled-reg invalidates its contents.
4457 This can happen if a block-local pseudo is allocated to that reg
4458 and it wasn't spilled because this block's total need is 0.
4459 Then some insn might have an optional reload and use this reg. */
4460 for (i = 0; i < nr; i++)
4461 /* But don't do this if the reg actually serves as an output
4462 reload reg in the current instruction. */
4463 if (n_reloads == 0
4464 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4465 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4468 /* Since value of X has changed,
4469 forget any value previously copied from it. */
4471 while (nr-- > 0)
4472 /* But don't forget a copy if this is the output reload
4473 that establishes the copy's validity. */
4474 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4475 reg_last_reload_reg[regno + nr] = 0;
4478 /* For each reload, the mode of the reload register. */
4479 static enum machine_mode reload_mode[MAX_RELOADS];
4481 /* For each reload, the largest number of registers it will require. */
4482 static int reload_nregs[MAX_RELOADS];
4484 /* Comparison function for qsort to decide which of two reloads
4485 should be handled first. *P1 and *P2 are the reload numbers. */
4487 static int
4488 reload_reg_class_lower (r1p, r2p)
4489 const GENERIC_PTR r1p;
4490 const GENERIC_PTR r2p;
4492 register int r1 = *(short *)r1p, r2 = *(short *)r2p;
4493 register int t;
4495 /* Consider required reloads before optional ones. */
4496 t = reload_optional[r1] - reload_optional[r2];
4497 if (t != 0)
4498 return t;
4500 /* Count all solitary classes before non-solitary ones. */
4501 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4502 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4503 if (t != 0)
4504 return t;
4506 /* Aside from solitaires, consider all multi-reg groups first. */
4507 t = reload_nregs[r2] - reload_nregs[r1];
4508 if (t != 0)
4509 return t;
4511 /* Consider reloads in order of increasing reg-class number. */
4512 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4513 if (t != 0)
4514 return t;
4516 /* If reloads are equally urgent, sort by reload number,
4517 so that the results of qsort leave nothing to chance. */
4518 return r1 - r2;
4521 /* The following HARD_REG_SETs indicate when each hard register is
4522 used for a reload of various parts of the current insn. */
4524 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4525 static HARD_REG_SET reload_reg_used;
4526 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4527 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4528 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4529 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4530 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4531 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4532 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4533 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4534 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4535 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4536 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4537 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4538 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4539 static HARD_REG_SET reload_reg_used_in_op_addr;
4540 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4541 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4542 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4543 static HARD_REG_SET reload_reg_used_in_insn;
4544 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4545 static HARD_REG_SET reload_reg_used_in_other_addr;
4547 /* If reg is in use as a reload reg for any sort of reload. */
4548 static HARD_REG_SET reload_reg_used_at_all;
4550 /* If reg is use as an inherited reload. We just mark the first register
4551 in the group. */
4552 static HARD_REG_SET reload_reg_used_for_inherit;
4554 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4555 TYPE. MODE is used to indicate how many consecutive regs are
4556 actually used. */
4558 static void
4559 mark_reload_reg_in_use (regno, opnum, type, mode)
4560 int regno;
4561 int opnum;
4562 enum reload_type type;
4563 enum machine_mode mode;
4565 int nregs = HARD_REGNO_NREGS (regno, mode);
4566 int i;
4568 for (i = regno; i < nregs + regno; i++)
4570 switch (type)
4572 case RELOAD_OTHER:
4573 SET_HARD_REG_BIT (reload_reg_used, i);
4574 break;
4576 case RELOAD_FOR_INPUT_ADDRESS:
4577 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4578 break;
4580 case RELOAD_FOR_INPADDR_ADDRESS:
4581 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4582 break;
4584 case RELOAD_FOR_OUTPUT_ADDRESS:
4585 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4586 break;
4588 case RELOAD_FOR_OUTADDR_ADDRESS:
4589 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4590 break;
4592 case RELOAD_FOR_OPERAND_ADDRESS:
4593 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4594 break;
4596 case RELOAD_FOR_OPADDR_ADDR:
4597 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4598 break;
4600 case RELOAD_FOR_OTHER_ADDRESS:
4601 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4602 break;
4604 case RELOAD_FOR_INPUT:
4605 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4606 break;
4608 case RELOAD_FOR_OUTPUT:
4609 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4610 break;
4612 case RELOAD_FOR_INSN:
4613 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4614 break;
4617 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4621 /* Similarly, but show REGNO is no longer in use for a reload. */
4623 static void
4624 clear_reload_reg_in_use (regno, opnum, type, mode)
4625 int regno;
4626 int opnum;
4627 enum reload_type type;
4628 enum machine_mode mode;
4630 int nregs = HARD_REGNO_NREGS (regno, mode);
4631 int i;
4633 for (i = regno; i < nregs + regno; i++)
4635 switch (type)
4637 case RELOAD_OTHER:
4638 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4639 break;
4641 case RELOAD_FOR_INPUT_ADDRESS:
4642 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4643 break;
4645 case RELOAD_FOR_INPADDR_ADDRESS:
4646 CLEAR_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4647 break;
4649 case RELOAD_FOR_OUTPUT_ADDRESS:
4650 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4651 break;
4653 case RELOAD_FOR_OUTADDR_ADDRESS:
4654 CLEAR_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4655 break;
4657 case RELOAD_FOR_OPERAND_ADDRESS:
4658 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4659 break;
4661 case RELOAD_FOR_OPADDR_ADDR:
4662 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4663 break;
4665 case RELOAD_FOR_OTHER_ADDRESS:
4666 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4667 break;
4669 case RELOAD_FOR_INPUT:
4670 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4671 break;
4673 case RELOAD_FOR_OUTPUT:
4674 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4675 break;
4677 case RELOAD_FOR_INSN:
4678 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4679 break;
4684 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4685 specified by OPNUM and TYPE. */
4687 static int
4688 reload_reg_free_p (regno, opnum, type)
4689 int regno;
4690 int opnum;
4691 enum reload_type type;
4693 int i;
4695 /* In use for a RELOAD_OTHER means it's not available for anything. */
4696 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
4697 return 0;
4699 switch (type)
4701 case RELOAD_OTHER:
4702 /* In use for anything means we can't use it for RELOAD_OTHER. */
4703 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4704 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4705 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4706 return 0;
4708 for (i = 0; i < reload_n_operands; i++)
4709 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4710 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4711 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4712 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4713 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4714 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4715 return 0;
4717 return 1;
4719 case RELOAD_FOR_INPUT:
4720 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4721 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4722 return 0;
4724 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4725 return 0;
4727 /* If it is used for some other input, can't use it. */
4728 for (i = 0; i < reload_n_operands; i++)
4729 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4730 return 0;
4732 /* If it is used in a later operand's address, can't use it. */
4733 for (i = opnum + 1; i < reload_n_operands; i++)
4734 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4735 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4736 return 0;
4738 return 1;
4740 case RELOAD_FOR_INPUT_ADDRESS:
4741 /* Can't use a register if it is used for an input address for this
4742 operand or used as an input in an earlier one. */
4743 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4744 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4745 return 0;
4747 for (i = 0; i < opnum; i++)
4748 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4749 return 0;
4751 return 1;
4753 case RELOAD_FOR_INPADDR_ADDRESS:
4754 /* Can't use a register if it is used for an input address
4755 for this operand or used as an input in an earlier
4756 one. */
4757 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4758 return 0;
4760 for (i = 0; i < opnum; i++)
4761 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4762 return 0;
4764 return 1;
4766 case RELOAD_FOR_OUTPUT_ADDRESS:
4767 /* Can't use a register if it is used for an output address for this
4768 operand or used as an output in this or a later operand. */
4769 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4770 return 0;
4772 for (i = opnum; i < reload_n_operands; i++)
4773 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4774 return 0;
4776 return 1;
4778 case RELOAD_FOR_OUTADDR_ADDRESS:
4779 /* Can't use a register if it is used for an output address
4780 for this operand or used as an output in this or a
4781 later operand. */
4782 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4783 return 0;
4785 for (i = opnum; i < reload_n_operands; i++)
4786 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4787 return 0;
4789 return 1;
4791 case RELOAD_FOR_OPERAND_ADDRESS:
4792 for (i = 0; i < reload_n_operands; i++)
4793 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4794 return 0;
4796 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4797 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4799 case RELOAD_FOR_OPADDR_ADDR:
4800 for (i = 0; i < reload_n_operands; i++)
4801 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4802 return 0;
4804 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4806 case RELOAD_FOR_OUTPUT:
4807 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4808 outputs, or an operand address for this or an earlier output. */
4809 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4810 return 0;
4812 for (i = 0; i < reload_n_operands; i++)
4813 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4814 return 0;
4816 for (i = 0; i <= opnum; i++)
4817 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4818 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4819 return 0;
4821 return 1;
4823 case RELOAD_FOR_INSN:
4824 for (i = 0; i < reload_n_operands; i++)
4825 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4826 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4827 return 0;
4829 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4830 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4832 case RELOAD_FOR_OTHER_ADDRESS:
4833 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4835 abort ();
4838 /* Return 1 if the value in reload reg REGNO, as used by a reload
4839 needed for the part of the insn specified by OPNUM and TYPE,
4840 is not in use for a reload in any prior part of the insn.
4842 We can assume that the reload reg was already tested for availability
4843 at the time it is needed, and we should not check this again,
4844 in case the reg has already been marked in use.
4846 However, if EQUIV is set, we are checking the availability of a register
4847 holding an equivalence to the value to be loaded into the reload register,
4848 not the availability of the reload register itself.
4850 This is still less stringent than what reload_reg_free_p checks; for
4851 example, compare the checks for RELOAD_OTHER. */
4853 static int
4854 reload_reg_free_before_p (regno, opnum, type, equiv)
4855 int regno;
4856 int opnum;
4857 enum reload_type type;
4858 int equiv;
4860 int i;
4862 switch (type)
4864 case RELOAD_FOR_OTHER_ADDRESS:
4865 /* These always come first. */
4866 if (equiv && TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno))
4867 return 0;
4868 return 1;
4870 case RELOAD_OTHER:
4871 if (equiv && TEST_HARD_REG_BIT (reload_reg_used, regno))
4872 return 0;
4873 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4875 /* If this use is for part of the insn,
4876 check the reg is not in use for any prior part. It is tempting
4877 to try to do this by falling through from objecs that occur
4878 later in the insn to ones that occur earlier, but that will not
4879 correctly take into account the fact that here we MUST ignore
4880 things that would prevent the register from being allocated in
4881 the first place, since we know that it was allocated. */
4883 case RELOAD_FOR_OUTPUT_ADDRESS:
4884 if (equiv
4885 && TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4886 return 0;
4887 /* Earlier reloads include RELOAD_FOR_OUTADDR_ADDRESS reloads. */
4888 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4889 return 0;
4890 /* ... fall through ... */
4891 case RELOAD_FOR_OUTADDR_ADDRESS:
4892 if (equiv
4893 && (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno)
4894 || TEST_HARD_REG_BIT (reload_reg_used, regno)))
4895 return 0;
4896 /* Earlier reloads are for earlier outputs or their addresses,
4897 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4898 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4899 RELOAD_OTHER).. */
4900 for (i = 0; i < opnum; i++)
4901 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4902 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4903 return 0;
4905 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4906 return 0;
4908 for (i = 0; i < reload_n_operands; i++)
4909 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4910 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4911 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4912 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4913 return 0;
4915 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4916 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4917 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4918 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4920 case RELOAD_FOR_OUTPUT:
4921 case RELOAD_FOR_INSN:
4922 /* There is no reason to call this function for output reloads, thus
4923 anything we'd put here wouldn't be tested. So just abort. */
4924 abort ();
4926 case RELOAD_FOR_OPERAND_ADDRESS:
4927 if (equiv && TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4928 return 0;
4930 /* Earlier reloads include RELOAD_FOR_OPADDR_ADDR reloads. */
4931 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4932 return 0;
4934 /* ... fall through ... */
4936 case RELOAD_FOR_OPADDR_ADDR:
4937 if (equiv)
4939 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4940 || TEST_HARD_REG_BIT (reload_reg_used, regno))
4941 return 0;
4942 for (i = 0; i < reload_n_operands; i++)
4943 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4944 return 0;
4946 /* These can't conflict with inputs, or each other, so all we have to
4947 test is input addresses and the addresses of OTHER items. */
4949 for (i = 0; i < reload_n_operands; i++)
4950 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4951 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4952 return 0;
4954 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4956 case RELOAD_FOR_INPUT:
4957 if (equiv && TEST_HARD_REG_BIT (reload_reg_used, regno))
4958 return 0;
4960 /* The only things earlier are the address for this and
4961 earlier inputs, other inputs (which we know we don't conflict
4962 with), and addresses of RELOAD_OTHER objects. */
4964 for (i = 0; i <= opnum; i++)
4965 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4966 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4967 return 0;
4969 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4971 case RELOAD_FOR_INPUT_ADDRESS:
4972 /* Earlier reloads include RELOAD_FOR_INPADDR_ADDRESS reloads. */
4973 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4974 return 0;
4975 /* ... fall through ... */
4976 case RELOAD_FOR_INPADDR_ADDRESS:
4977 if (equiv && TEST_HARD_REG_BIT (reload_reg_used, regno))
4978 return 0;
4980 /* Similarly, all we have to check is for use in earlier inputs'
4981 addresses. */
4982 for (i = 0; i < opnum; i++)
4983 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4984 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4985 return 0;
4987 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4989 abort ();
4992 /* Return 1 if the value in reload reg REGNO, as used by a reload
4993 needed for the part of the insn specified by OPNUM and TYPE,
4994 is still available in REGNO at the end of the insn.
4996 We can assume that the reload reg was already tested for availability
4997 at the time it is needed, and we should not check this again,
4998 in case the reg has already been marked in use. */
5000 static int
5001 reload_reg_reaches_end_p (regno, opnum, type)
5002 int regno;
5003 int opnum;
5004 enum reload_type type;
5006 int i;
5008 switch (type)
5010 case RELOAD_OTHER:
5011 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5012 its value must reach the end. */
5013 return 1;
5015 /* If this use is for part of the insn,
5016 its value reaches if no subsequent part uses the same register.
5017 Just like the above function, don't try to do this with lots
5018 of fallthroughs. */
5020 case RELOAD_FOR_OTHER_ADDRESS:
5021 /* Here we check for everything else, since these don't conflict
5022 with anything else and everything comes later. */
5024 for (i = 0; i < reload_n_operands; i++)
5025 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5026 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5027 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5028 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5029 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5030 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5031 return 0;
5033 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5034 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5035 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5037 case RELOAD_FOR_INPUT_ADDRESS:
5038 case RELOAD_FOR_INPADDR_ADDRESS:
5039 /* Similar, except that we check only for this and subsequent inputs
5040 and the address of only subsequent inputs and we do not need
5041 to check for RELOAD_OTHER objects since they are known not to
5042 conflict. */
5044 for (i = opnum; i < reload_n_operands; i++)
5045 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5046 return 0;
5048 for (i = opnum + 1; i < reload_n_operands; i++)
5049 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5050 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5051 return 0;
5053 for (i = 0; i < reload_n_operands; i++)
5054 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5055 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5056 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5057 return 0;
5059 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5060 return 0;
5062 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5063 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
5065 case RELOAD_FOR_INPUT:
5066 /* Similar to input address, except we start at the next operand for
5067 both input and input address and we do not check for
5068 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5069 would conflict. */
5071 for (i = opnum + 1; i < reload_n_operands; i++)
5072 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5073 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5074 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5075 return 0;
5077 /* ... fall through ... */
5079 case RELOAD_FOR_OPERAND_ADDRESS:
5080 /* Check outputs and their addresses. */
5082 for (i = 0; i < reload_n_operands; i++)
5083 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5084 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5085 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5086 return 0;
5088 return 1;
5090 case RELOAD_FOR_OPADDR_ADDR:
5091 for (i = 0; i < reload_n_operands; i++)
5092 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5093 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5094 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5095 return 0;
5097 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5098 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
5100 case RELOAD_FOR_INSN:
5101 /* These conflict with other outputs with RELOAD_OTHER. So
5102 we need only check for output addresses. */
5104 opnum = -1;
5106 /* ... fall through ... */
5108 case RELOAD_FOR_OUTPUT:
5109 case RELOAD_FOR_OUTPUT_ADDRESS:
5110 case RELOAD_FOR_OUTADDR_ADDRESS:
5111 /* We already know these can't conflict with a later output. So the
5112 only thing to check are later output addresses. */
5113 for (i = opnum + 1; i < reload_n_operands; i++)
5114 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5115 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5116 return 0;
5118 return 1;
5121 abort ();
5124 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5125 Return 0 otherwise.
5127 This function uses the same algorithm as reload_reg_free_p above. */
5130 reloads_conflict (r1, r2)
5131 int r1, r2;
5133 enum reload_type r1_type = reload_when_needed[r1];
5134 enum reload_type r2_type = reload_when_needed[r2];
5135 int r1_opnum = reload_opnum[r1];
5136 int r2_opnum = reload_opnum[r2];
5138 /* RELOAD_OTHER conflicts with everything. */
5139 if (r2_type == RELOAD_OTHER)
5140 return 1;
5142 /* Otherwise, check conflicts differently for each type. */
5144 switch (r1_type)
5146 case RELOAD_FOR_INPUT:
5147 return (r2_type == RELOAD_FOR_INSN
5148 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5149 || r2_type == RELOAD_FOR_OPADDR_ADDR
5150 || r2_type == RELOAD_FOR_INPUT
5151 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5152 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5153 && r2_opnum > r1_opnum));
5155 case RELOAD_FOR_INPUT_ADDRESS:
5156 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5157 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5159 case RELOAD_FOR_INPADDR_ADDRESS:
5160 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5161 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5163 case RELOAD_FOR_OUTPUT_ADDRESS:
5164 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5165 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
5167 case RELOAD_FOR_OUTADDR_ADDRESS:
5168 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5169 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
5171 case RELOAD_FOR_OPERAND_ADDRESS:
5172 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5173 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5175 case RELOAD_FOR_OPADDR_ADDR:
5176 return (r2_type == RELOAD_FOR_INPUT
5177 || r2_type == RELOAD_FOR_OPADDR_ADDR);
5179 case RELOAD_FOR_OUTPUT:
5180 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5181 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5182 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5183 && r2_opnum >= r1_opnum));
5185 case RELOAD_FOR_INSN:
5186 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5187 || r2_type == RELOAD_FOR_INSN
5188 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5190 case RELOAD_FOR_OTHER_ADDRESS:
5191 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5193 case RELOAD_OTHER:
5194 return 1;
5196 default:
5197 abort ();
5201 /* Vector of reload-numbers showing the order in which the reloads should
5202 be processed. */
5203 short reload_order[MAX_RELOADS];
5205 /* Indexed by reload number, 1 if incoming value
5206 inherited from previous insns. */
5207 char reload_inherited[MAX_RELOADS];
5209 /* For an inherited reload, this is the insn the reload was inherited from,
5210 if we know it. Otherwise, this is 0. */
5211 rtx reload_inheritance_insn[MAX_RELOADS];
5213 /* If non-zero, this is a place to get the value of the reload,
5214 rather than using reload_in. */
5215 rtx reload_override_in[MAX_RELOADS];
5217 /* For each reload, the hard register number of the register used,
5218 or -1 if we did not need a register for this reload. */
5219 int reload_spill_index[MAX_RELOADS];
5221 /* Return 1 if the value in reload reg REGNO, as used by a reload
5222 needed for the part of the insn specified by OPNUM and TYPE,
5223 may be used to load VALUE into it.
5225 Other read-only reloads with the same value do not conflict
5226 unless OUT is non-zero and these other reloads have to live while
5227 output reloads live.
5229 RELOADNUM is the number of the reload we want to load this value for;
5230 a reload does not conflict with itself.
5232 The caller has to make sure that there is no conflict with the return
5233 register. */
5234 static int
5235 reload_reg_free_for_value_p (regno, opnum, type, value, out, reloadnum)
5236 int regno;
5237 int opnum;
5238 enum reload_type type;
5239 rtx value, out;
5240 int reloadnum;
5242 int time1;
5243 int i;
5245 /* We use some pseudo 'time' value to check if the lifetimes of the
5246 new register use would overlap with the one of a previous reload
5247 that is not read-only or uses a different value.
5248 The 'time' used doesn't have to be linear in any shape or form, just
5249 monotonic.
5250 Some reload types use different 'buckets' for each operand.
5251 So there are MAX_RECOG_OPERANDS different time values for each
5252 such reload type.
5253 We compute TIME1 as the time when the register for the prospective
5254 new reload ceases to be live, and TIME2 for each existing
5255 reload as the time when that the reload register of that reload
5256 becomes live.
5257 Where there is little to be gained by exact lifetime calculations,
5258 we just make conservative assumptions, i.e. a longer lifetime;
5259 this is done in the 'default:' cases. */
5260 switch (type)
5262 case RELOAD_FOR_OTHER_ADDRESS:
5263 time1 = 0;
5264 break;
5265 /* For each input, we might have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5266 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5267 respectively, to the time values for these, we get distinct time
5268 values. To get distinct time values for each operand, we have to
5269 multiply opnum by at least three. We round that up to four because
5270 multiply by four is often cheaper. */
5271 case RELOAD_FOR_INPADDR_ADDRESS:
5272 time1 = opnum * 4 + 1;
5273 break;
5274 case RELOAD_FOR_INPUT_ADDRESS:
5275 time1 = opnum * 4 + 2;
5276 break;
5277 case RELOAD_FOR_INPUT:
5278 /* All RELOAD_FOR_INPUT reloads remain live till just before the
5279 instruction is executed. */
5280 time1 = (MAX_RECOG_OPERANDS - 1) * 4 + 3;
5281 break;
5282 /* opnum * 4 + 3 < opnum * 4 + 4
5283 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5284 case RELOAD_FOR_OUTPUT_ADDRESS:
5285 time1 = MAX_RECOG_OPERANDS * 4 + opnum;
5286 break;
5287 default:
5288 time1 = MAX_RECOG_OPERANDS * 5;
5291 for (i = 0; i < n_reloads; i++)
5293 rtx reg = reload_reg_rtx[i];
5294 if (reg && GET_CODE (reg) == REG
5295 && ((unsigned) regno - true_regnum (reg)
5296 <= HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)) - (unsigned)1)
5297 && i != reloadnum)
5299 if (out
5300 && reload_when_needed[i] != RELOAD_FOR_INPUT
5301 && reload_when_needed[i] != RELOAD_FOR_INPUT_ADDRESS
5302 && reload_when_needed[i] != RELOAD_FOR_INPADDR_ADDRESS)
5303 return 0;
5304 if (! reload_in[i] || ! rtx_equal_p (reload_in[i], value)
5305 || reload_out[i])
5307 int time2;
5308 switch (reload_when_needed[i])
5310 case RELOAD_FOR_OTHER_ADDRESS:
5311 time2 = 0;
5312 break;
5313 case RELOAD_FOR_INPADDR_ADDRESS:
5314 time2 = reload_opnum[i] * 4 + 1;
5315 break;
5316 case RELOAD_FOR_INPUT_ADDRESS:
5317 time2 = reload_opnum[i] * 4 + 2;
5318 break;
5319 case RELOAD_FOR_INPUT:
5320 time2 = reload_opnum[i] * 4 + 3;
5321 break;
5322 case RELOAD_FOR_OUTPUT:
5323 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5324 instruction is executed. */
5325 time2 = MAX_RECOG_OPERANDS * 4;
5326 break;
5327 /* The first RELOAD_FOR_OUTPUT_ADDRESS reload conflicts with the
5328 RELOAD_FOR_OUTPUT reloads, so assign it the same time value. */
5329 case RELOAD_FOR_OUTPUT_ADDRESS:
5330 time2 = MAX_RECOG_OPERANDS * 4 + reload_opnum[i];
5331 break;
5332 case RELOAD_OTHER:
5333 if (! reload_in[i] || rtx_equal_p (reload_in[i], value))
5335 time2 = MAX_RECOG_OPERANDS * 4;
5336 break;
5338 default:
5339 time2 = 0;
5341 if (time1 >= time2)
5342 return 0;
5346 return 1;
5349 /* Find a spill register to use as a reload register for reload R.
5350 LAST_RELOAD is non-zero if this is the last reload for the insn being
5351 processed.
5353 Set reload_reg_rtx[R] to the register allocated.
5355 If NOERROR is nonzero, we return 1 if successful,
5356 or 0 if we couldn't find a spill reg and we didn't change anything. */
5358 static int
5359 allocate_reload_reg (chain, r, last_reload, noerror)
5360 struct insn_chain *chain;
5361 int r;
5362 int last_reload;
5363 int noerror;
5365 rtx insn = chain->insn;
5366 int i;
5367 int pass;
5368 int count;
5369 rtx new;
5370 int regno;
5372 /* If we put this reload ahead, thinking it is a group,
5373 then insist on finding a group. Otherwise we can grab a
5374 reg that some other reload needs.
5375 (That can happen when we have a 68000 DATA_OR_FP_REG
5376 which is a group of data regs or one fp reg.)
5377 We need not be so restrictive if there are no more reloads
5378 for this insn.
5380 ??? Really it would be nicer to have smarter handling
5381 for that kind of reg class, where a problem like this is normal.
5382 Perhaps those classes should be avoided for reloading
5383 by use of more alternatives. */
5385 int force_group = reload_nregs[r] > 1 && ! last_reload;
5387 /* If we want a single register and haven't yet found one,
5388 take any reg in the right class and not in use.
5389 If we want a consecutive group, here is where we look for it.
5391 We use two passes so we can first look for reload regs to
5392 reuse, which are already in use for other reloads in this insn,
5393 and only then use additional registers.
5394 I think that maximizing reuse is needed to make sure we don't
5395 run out of reload regs. Suppose we have three reloads, and
5396 reloads A and B can share regs. These need two regs.
5397 Suppose A and B are given different regs.
5398 That leaves none for C. */
5399 for (pass = 0; pass < 2; pass++)
5401 /* I is the index in spill_regs.
5402 We advance it round-robin between insns to use all spill regs
5403 equally, so that inherited reloads have a chance
5404 of leapfrogging each other. Don't do this, however, when we have
5405 group needs and failure would be fatal; if we only have a relatively
5406 small number of spill registers, and more than one of them has
5407 group needs, then by starting in the middle, we may end up
5408 allocating the first one in such a way that we are not left with
5409 sufficient groups to handle the rest. */
5411 if (noerror || ! force_group)
5412 i = last_spill_reg;
5413 else
5414 i = -1;
5416 for (count = 0; count < n_spills; count++)
5418 int class = (int) reload_reg_class[r];
5420 i = (i + 1) % n_spills;
5422 if ((reload_reg_free_p (spill_regs[i], reload_opnum[r],
5423 reload_when_needed[r])
5424 || (reload_in[r]
5425 /* We check reload_reg_used to make sure we
5426 don't clobber the return register. */
5427 && ! TEST_HARD_REG_BIT (reload_reg_used, spill_regs[i])
5428 && reload_reg_free_for_value_p (spill_regs[i],
5429 reload_opnum[r],
5430 reload_when_needed[r],
5431 reload_in[r],
5432 reload_out[r], r)))
5433 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
5434 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5435 /* Look first for regs to share, then for unshared. But
5436 don't share regs used for inherited reloads; they are
5437 the ones we want to preserve. */
5438 && (pass
5439 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5440 spill_regs[i])
5441 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5442 spill_regs[i]))))
5444 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5445 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5446 (on 68000) got us two FP regs. If NR is 1,
5447 we would reject both of them. */
5448 if (force_group)
5449 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
5450 /* If we need only one reg, we have already won. */
5451 if (nr == 1)
5453 /* But reject a single reg if we demand a group. */
5454 if (force_group)
5455 continue;
5456 break;
5458 /* Otherwise check that as many consecutive regs as we need
5459 are available here.
5460 Also, don't use for a group registers that are
5461 needed for nongroups. */
5462 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
5463 while (nr > 1)
5465 regno = spill_regs[i] + nr - 1;
5466 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5467 && spill_reg_order[regno] >= 0
5468 && reload_reg_free_p (regno, reload_opnum[r],
5469 reload_when_needed[r])
5470 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
5471 regno)))
5472 break;
5473 nr--;
5475 if (nr == 1)
5476 break;
5480 /* If we found something on pass 1, omit pass 2. */
5481 if (count < n_spills)
5482 break;
5485 /* We should have found a spill register by now. */
5486 if (count == n_spills)
5488 if (noerror)
5489 return 0;
5490 goto failure;
5493 /* I is the index in SPILL_REG_RTX of the reload register we are to
5494 allocate. Get an rtx for it and find its register number. */
5496 new = spill_reg_rtx[i];
5498 if (new == 0 || GET_MODE (new) != reload_mode[r])
5499 spill_reg_rtx[i] = new
5500 = gen_rtx_REG (reload_mode[r], spill_regs[i]);
5502 regno = true_regnum (new);
5504 /* Detect when the reload reg can't hold the reload mode.
5505 This used to be one `if', but Sequent compiler can't handle that. */
5506 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5508 enum machine_mode test_mode = VOIDmode;
5509 if (reload_in[r])
5510 test_mode = GET_MODE (reload_in[r]);
5511 /* If reload_in[r] has VOIDmode, it means we will load it
5512 in whatever mode the reload reg has: to wit, reload_mode[r].
5513 We have already tested that for validity. */
5514 /* Aside from that, we need to test that the expressions
5515 to reload from or into have modes which are valid for this
5516 reload register. Otherwise the reload insns would be invalid. */
5517 if (! (reload_in[r] != 0 && test_mode != VOIDmode
5518 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5519 if (! (reload_out[r] != 0
5520 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
5522 /* The reg is OK. */
5523 last_spill_reg = i;
5525 /* Mark as in use for this insn the reload regs we use
5526 for this. */
5527 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
5528 reload_when_needed[r], reload_mode[r]);
5530 reload_reg_rtx[r] = new;
5531 reload_spill_index[r] = spill_regs[i];
5532 return 1;
5536 /* The reg is not OK. */
5537 if (noerror)
5538 return 0;
5540 failure:
5541 if (asm_noperands (PATTERN (insn)) < 0)
5542 /* It's the compiler's fault. */
5543 fatal_insn ("Could not find a spill register", insn);
5545 /* It's the user's fault; the operand's mode and constraint
5546 don't match. Disable this reload so we don't crash in final. */
5547 error_for_asm (insn,
5548 "`asm' operand constraint incompatible with operand size");
5549 reload_in[r] = 0;
5550 reload_out[r] = 0;
5551 reload_reg_rtx[r] = 0;
5552 reload_optional[r] = 1;
5553 reload_secondary_p[r] = 1;
5555 return 1;
5558 /* Assign hard reg targets for the pseudo-registers we must reload
5559 into hard regs for this insn.
5560 Also output the instructions to copy them in and out of the hard regs.
5562 For machines with register classes, we are responsible for
5563 finding a reload reg in the proper class. */
5565 static void
5566 choose_reload_regs (chain, avoid_return_reg)
5567 struct insn_chain *chain;
5568 rtx avoid_return_reg;
5570 rtx insn = chain->insn;
5571 register int i, j;
5572 int max_group_size = 1;
5573 enum reg_class group_class = NO_REGS;
5574 int inheritance;
5576 rtx save_reload_reg_rtx[MAX_RELOADS];
5577 char save_reload_inherited[MAX_RELOADS];
5578 rtx save_reload_inheritance_insn[MAX_RELOADS];
5579 rtx save_reload_override_in[MAX_RELOADS];
5580 int save_reload_spill_index[MAX_RELOADS];
5581 HARD_REG_SET save_reload_reg_used;
5582 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
5583 HARD_REG_SET save_reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
5584 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5585 HARD_REG_SET save_reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5586 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5587 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5588 HARD_REG_SET save_reload_reg_used_in_op_addr;
5589 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
5590 HARD_REG_SET save_reload_reg_used_in_insn;
5591 HARD_REG_SET save_reload_reg_used_in_other_addr;
5592 HARD_REG_SET save_reload_reg_used_at_all;
5594 bzero (reload_inherited, MAX_RELOADS);
5595 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
5596 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
5598 CLEAR_HARD_REG_SET (reload_reg_used);
5599 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5600 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5601 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5602 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5603 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5605 for (i = 0; i < reload_n_operands; i++)
5607 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5608 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5609 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5610 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5611 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5612 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5615 /* Don't bother with avoiding the return reg
5616 if we have no mandatory reload that could use it. */
5617 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5619 int do_avoid = 0;
5620 int regno = REGNO (avoid_return_reg);
5621 int nregs
5622 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5623 int r;
5625 for (r = regno; r < regno + nregs; r++)
5626 if (spill_reg_order[r] >= 0)
5627 for (j = 0; j < n_reloads; j++)
5628 if (!reload_optional[j] && reload_reg_rtx[j] == 0
5629 && (reload_in[j] != 0 || reload_out[j] != 0
5630 || reload_secondary_p[j])
5632 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
5633 do_avoid = 1;
5634 if (!do_avoid)
5635 avoid_return_reg = 0;
5638 #if 0 /* Not needed, now that we can always retry without inheritance. */
5639 /* See if we have more mandatory reloads than spill regs.
5640 If so, then we cannot risk optimizations that could prevent
5641 reloads from sharing one spill register.
5643 Since we will try finding a better register than reload_reg_rtx
5644 unless it is equal to reload_in or reload_out, count such reloads. */
5647 int tem = SMALL_REGISTER_CLASSES? (avoid_return_reg != 0): 0;
5648 for (j = 0; j < n_reloads; j++)
5649 if (! reload_optional[j]
5650 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
5651 && (reload_reg_rtx[j] == 0
5652 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
5653 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
5654 tem++;
5655 if (tem > n_spills)
5656 must_reuse = 1;
5658 #endif
5660 /* Don't use the subroutine call return reg for a reload
5661 if we are supposed to avoid it. */
5662 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5664 int regno = REGNO (avoid_return_reg);
5665 int nregs
5666 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5667 int r;
5669 for (r = regno; r < regno + nregs; r++)
5670 if (spill_reg_order[r] >= 0)
5671 SET_HARD_REG_BIT (reload_reg_used, r);
5674 /* In order to be certain of getting the registers we need,
5675 we must sort the reloads into order of increasing register class.
5676 Then our grabbing of reload registers will parallel the process
5677 that provided the reload registers.
5679 Also note whether any of the reloads wants a consecutive group of regs.
5680 If so, record the maximum size of the group desired and what
5681 register class contains all the groups needed by this insn. */
5683 for (j = 0; j < n_reloads; j++)
5685 reload_order[j] = j;
5686 reload_spill_index[j] = -1;
5688 reload_mode[j]
5689 = (reload_inmode[j] == VOIDmode
5690 || (GET_MODE_SIZE (reload_outmode[j])
5691 > GET_MODE_SIZE (reload_inmode[j])))
5692 ? reload_outmode[j] : reload_inmode[j];
5694 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5696 if (reload_nregs[j] > 1)
5698 max_group_size = MAX (reload_nregs[j], max_group_size);
5699 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5702 /* If we have already decided to use a certain register,
5703 don't use it in another way. */
5704 if (reload_reg_rtx[j])
5705 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5706 reload_when_needed[j], reload_mode[j]);
5709 if (n_reloads > 1)
5710 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5712 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5713 sizeof reload_reg_rtx);
5714 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5715 bcopy ((char *) reload_inheritance_insn,
5716 (char *) save_reload_inheritance_insn,
5717 sizeof reload_inheritance_insn);
5718 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5719 sizeof reload_override_in);
5720 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5721 sizeof reload_spill_index);
5722 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5723 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5724 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5725 reload_reg_used_in_op_addr);
5727 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5728 reload_reg_used_in_op_addr_reload);
5730 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5731 reload_reg_used_in_insn);
5732 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5733 reload_reg_used_in_other_addr);
5735 for (i = 0; i < reload_n_operands; i++)
5737 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5738 reload_reg_used_in_output[i]);
5739 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5740 reload_reg_used_in_input[i]);
5741 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5742 reload_reg_used_in_input_addr[i]);
5743 COPY_HARD_REG_SET (save_reload_reg_used_in_inpaddr_addr[i],
5744 reload_reg_used_in_inpaddr_addr[i]);
5745 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5746 reload_reg_used_in_output_addr[i]);
5747 COPY_HARD_REG_SET (save_reload_reg_used_in_outaddr_addr[i],
5748 reload_reg_used_in_outaddr_addr[i]);
5751 /* If -O, try first with inheritance, then turning it off.
5752 If not -O, don't do inheritance.
5753 Using inheritance when not optimizing leads to paradoxes
5754 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5755 because one side of the comparison might be inherited. */
5757 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5759 /* Process the reloads in order of preference just found.
5760 Beyond this point, subregs can be found in reload_reg_rtx.
5762 This used to look for an existing reloaded home for all
5763 of the reloads, and only then perform any new reloads.
5764 But that could lose if the reloads were done out of reg-class order
5765 because a later reload with a looser constraint might have an old
5766 home in a register needed by an earlier reload with a tighter constraint.
5768 To solve this, we make two passes over the reloads, in the order
5769 described above. In the first pass we try to inherit a reload
5770 from a previous insn. If there is a later reload that needs a
5771 class that is a proper subset of the class being processed, we must
5772 also allocate a spill register during the first pass.
5774 Then make a second pass over the reloads to allocate any reloads
5775 that haven't been given registers yet. */
5777 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5779 for (j = 0; j < n_reloads; j++)
5781 register int r = reload_order[j];
5783 /* Ignore reloads that got marked inoperative. */
5784 if (reload_out[r] == 0 && reload_in[r] == 0
5785 && ! reload_secondary_p[r])
5786 continue;
5788 /* If find_reloads chose to use reload_in or reload_out as a reload
5789 register, we don't need to chose one. Otherwise, try even if it
5790 found one since we might save an insn if we find the value lying
5791 around.
5792 Try also when reload_in is a pseudo without a hard reg. */
5793 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5794 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5795 || (rtx_equal_p (reload_out[r], reload_reg_rtx[r])
5796 && GET_CODE (reload_in[r]) != MEM
5797 && true_regnum (reload_in[r]) < FIRST_PSEUDO_REGISTER)))
5798 continue;
5800 #if 0 /* No longer needed for correct operation.
5801 It might give better code, or might not; worth an experiment? */
5802 /* If this is an optional reload, we can't inherit from earlier insns
5803 until we are sure that any non-optional reloads have been allocated.
5804 The following code takes advantage of the fact that optional reloads
5805 are at the end of reload_order. */
5806 if (reload_optional[r] != 0)
5807 for (i = 0; i < j; i++)
5808 if ((reload_out[reload_order[i]] != 0
5809 || reload_in[reload_order[i]] != 0
5810 || reload_secondary_p[reload_order[i]])
5811 && ! reload_optional[reload_order[i]]
5812 && reload_reg_rtx[reload_order[i]] == 0)
5813 allocate_reload_reg (chain, reload_order[i], 0, inheritance);
5814 #endif
5816 /* First see if this pseudo is already available as reloaded
5817 for a previous insn. We cannot try to inherit for reloads
5818 that are smaller than the maximum number of registers needed
5819 for groups unless the register we would allocate cannot be used
5820 for the groups.
5822 We could check here to see if this is a secondary reload for
5823 an object that is already in a register of the desired class.
5824 This would avoid the need for the secondary reload register.
5825 But this is complex because we can't easily determine what
5826 objects might want to be loaded via this reload. So let a
5827 register be allocated here. In `emit_reload_insns' we suppress
5828 one of the loads in the case described above. */
5830 if (inheritance)
5832 register int regno = -1;
5833 enum machine_mode mode;
5835 if (reload_in[r] == 0)
5837 else if (GET_CODE (reload_in[r]) == REG)
5839 regno = REGNO (reload_in[r]);
5840 mode = GET_MODE (reload_in[r]);
5842 else if (GET_CODE (reload_in_reg[r]) == REG)
5844 regno = REGNO (reload_in_reg[r]);
5845 mode = GET_MODE (reload_in_reg[r]);
5847 else if (GET_CODE (reload_in[r]) == MEM)
5849 rtx prev = prev_nonnote_insn (insn), note;
5851 if (prev && GET_CODE (prev) == INSN
5852 && GET_CODE (PATTERN (prev)) == USE
5853 && GET_CODE (XEXP (PATTERN (prev), 0)) == REG
5854 && (REGNO (XEXP (PATTERN (prev), 0))
5855 >= FIRST_PSEUDO_REGISTER)
5856 && (note = find_reg_note (prev, REG_EQUAL, NULL_RTX))
5857 && GET_CODE (XEXP (note, 0)) == MEM)
5859 rtx addr = XEXP (XEXP (note, 0), 0);
5860 int size_diff
5861 = (GET_MODE_SIZE (GET_MODE (addr))
5862 - GET_MODE_SIZE (GET_MODE (reload_in[r])));
5863 if (size_diff >= 0
5864 && rtx_equal_p ((BYTES_BIG_ENDIAN
5865 ? plus_constant (addr, size_diff)
5866 : addr),
5867 XEXP (reload_in[r], 0)))
5869 regno = REGNO (XEXP (PATTERN (prev), 0));
5870 mode = GET_MODE (reload_in[r]);
5874 #if 0
5875 /* This won't work, since REGNO can be a pseudo reg number.
5876 Also, it takes much more hair to keep track of all the things
5877 that can invalidate an inherited reload of part of a pseudoreg. */
5878 else if (GET_CODE (reload_in[r]) == SUBREG
5879 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5880 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5881 #endif
5883 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5885 i = REGNO (reg_last_reload_reg[regno]);
5887 if (reg_reloaded_contents[i] == regno
5888 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
5889 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5890 >= GET_MODE_SIZE (mode))
5891 && HARD_REGNO_MODE_OK (i, reload_mode[r])
5892 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5894 && (reload_nregs[r] == max_group_size
5895 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5897 && ((reload_reg_free_p (i, reload_opnum[r],
5898 reload_when_needed[r])
5899 && reload_reg_free_before_p (i, reload_opnum[r],
5900 reload_when_needed[r],
5902 || reload_reg_free_for_value_p (i, reload_opnum[r],
5903 reload_when_needed[r],
5904 reload_in[r],
5905 reload_out[r], r)))
5907 /* If a group is needed, verify that all the subsequent
5908 registers still have their values intact. */
5909 int nr
5910 = HARD_REGNO_NREGS (i, reload_mode[r]);
5911 int k;
5913 for (k = 1; k < nr; k++)
5914 if (reg_reloaded_contents[i + k] != regno
5915 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
5916 break;
5918 if (k == nr)
5920 int i1;
5922 /* We found a register that contains the
5923 value we need. If this register is the
5924 same as an `earlyclobber' operand of the
5925 current insn, just mark it as a place to
5926 reload from since we can't use it as the
5927 reload register itself. */
5929 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5930 if (reg_overlap_mentioned_for_reload_p
5931 (reg_last_reload_reg[regno],
5932 reload_earlyclobbers[i1]))
5933 break;
5935 if (i1 != n_earlyclobbers
5936 /* Don't use it if we'd clobber a pseudo reg. */
5937 || (spill_reg_order[i] < 0
5938 && reload_out[r]
5939 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
5940 /* Don't really use the inherited spill reg
5941 if we need it wider than we've got it. */
5942 || (GET_MODE_SIZE (reload_mode[r])
5943 > GET_MODE_SIZE (mode))
5944 /* If find_reloads chose reload_out as reload
5945 register, stay with it - that leaves the
5946 inherited register for subsequent reloads. */
5947 || (reload_out[r] && reload_reg_rtx
5948 && rtx_equal_p (reload_out[r],
5949 reload_reg_rtx[r])))
5950 reload_override_in[r] = reg_last_reload_reg[regno];
5951 else
5953 int k;
5954 /* We can use this as a reload reg. */
5955 /* Mark the register as in use for this part of
5956 the insn. */
5957 mark_reload_reg_in_use (i,
5958 reload_opnum[r],
5959 reload_when_needed[r],
5960 reload_mode[r]);
5961 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5962 reload_inherited[r] = 1;
5963 reload_inheritance_insn[r]
5964 = reg_reloaded_insn[i];
5965 reload_spill_index[r] = i;
5966 for (k = 0; k < nr; k++)
5967 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5968 i + k);
5975 /* Here's another way to see if the value is already lying around. */
5976 if (inheritance
5977 && reload_in[r] != 0
5978 && ! reload_inherited[r]
5979 && reload_out[r] == 0
5980 && (CONSTANT_P (reload_in[r])
5981 || GET_CODE (reload_in[r]) == PLUS
5982 || GET_CODE (reload_in[r]) == REG
5983 || GET_CODE (reload_in[r]) == MEM)
5984 && (reload_nregs[r] == max_group_size
5985 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5987 register rtx equiv
5988 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5989 -1, NULL_PTR, 0, reload_mode[r]);
5990 int regno;
5992 if (equiv != 0)
5994 if (GET_CODE (equiv) == REG)
5995 regno = REGNO (equiv);
5996 else if (GET_CODE (equiv) == SUBREG)
5998 /* This must be a SUBREG of a hard register.
5999 Make a new REG since this might be used in an
6000 address and not all machines support SUBREGs
6001 there. */
6002 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
6003 equiv = gen_rtx_REG (reload_mode[r], regno);
6005 else
6006 abort ();
6009 /* If we found a spill reg, reject it unless it is free
6010 and of the desired class. */
6011 if (equiv != 0
6012 && ((spill_reg_order[regno] >= 0
6013 && ! (reload_reg_free_before_p (regno, reload_opnum[r],
6014 reload_when_needed[r], 1)
6015 || reload_reg_free_for_value_p (regno,
6016 reload_opnum[r],
6017 reload_when_needed[r],
6018 reload_in[r],
6019 reload_out[r], r)))
6020 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
6021 regno)))
6022 equiv = 0;
6024 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
6025 equiv = 0;
6027 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
6028 equiv = 0;
6030 /* We found a register that contains the value we need.
6031 If this register is the same as an `earlyclobber' operand
6032 of the current insn, just mark it as a place to reload from
6033 since we can't use it as the reload register itself. */
6035 if (equiv != 0)
6036 for (i = 0; i < n_earlyclobbers; i++)
6037 if (reg_overlap_mentioned_for_reload_p (equiv,
6038 reload_earlyclobbers[i]))
6040 reload_override_in[r] = equiv;
6041 equiv = 0;
6042 break;
6045 /* If the equiv register we have found is explicitly clobbered
6046 in the current insn, it depends on the reload type if we
6047 can use it, use it for reload_override_in, or not at all.
6048 In particular, we then can't use EQUIV for a
6049 RELOAD_FOR_OUTPUT_ADDRESS reload. */
6051 if (equiv != 0 && regno_clobbered_p (regno, insn))
6053 switch (reload_when_needed[r])
6055 case RELOAD_FOR_OTHER_ADDRESS:
6056 case RELOAD_FOR_INPADDR_ADDRESS:
6057 case RELOAD_FOR_INPUT_ADDRESS:
6058 case RELOAD_FOR_OPADDR_ADDR:
6059 break;
6060 case RELOAD_OTHER:
6061 case RELOAD_FOR_INPUT:
6062 case RELOAD_FOR_OPERAND_ADDRESS:
6063 reload_override_in[r] = equiv;
6064 /* Fall through. */
6065 default:
6066 equiv = 0;
6067 break;
6071 /* If we found an equivalent reg, say no code need be generated
6072 to load it, and use it as our reload reg. */
6073 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
6075 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
6076 int k;
6077 reload_reg_rtx[r] = equiv;
6078 reload_inherited[r] = 1;
6080 /* If reg_reloaded_valid is not set for this register,
6081 there might be a stale spill_reg_store lying around.
6082 We must clear it, since otherwise emit_reload_insns
6083 might delete the store. */
6084 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6085 spill_reg_store[regno] = NULL_RTX;
6086 /* If any of the hard registers in EQUIV are spill
6087 registers, mark them as in use for this insn. */
6088 for (k = 0; k < nr; k++)
6090 i = spill_reg_order[regno + k];
6091 if (i >= 0)
6093 mark_reload_reg_in_use (regno, reload_opnum[r],
6094 reload_when_needed[r],
6095 reload_mode[r]);
6096 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6097 regno + k);
6103 /* If we found a register to use already, or if this is an optional
6104 reload, we are done. */
6105 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
6106 continue;
6108 #if 0 /* No longer needed for correct operation. Might or might not
6109 give better code on the average. Want to experiment? */
6111 /* See if there is a later reload that has a class different from our
6112 class that intersects our class or that requires less register
6113 than our reload. If so, we must allocate a register to this
6114 reload now, since that reload might inherit a previous reload
6115 and take the only available register in our class. Don't do this
6116 for optional reloads since they will force all previous reloads
6117 to be allocated. Also don't do this for reloads that have been
6118 turned off. */
6120 for (i = j + 1; i < n_reloads; i++)
6122 int s = reload_order[i];
6124 if ((reload_in[s] == 0 && reload_out[s] == 0
6125 && ! reload_secondary_p[s])
6126 || reload_optional[s])
6127 continue;
6129 if ((reload_reg_class[s] != reload_reg_class[r]
6130 && reg_classes_intersect_p (reload_reg_class[r],
6131 reload_reg_class[s]))
6132 || reload_nregs[s] < reload_nregs[r])
6133 break;
6136 if (i == n_reloads)
6137 continue;
6139 allocate_reload_reg (chain, r, j == n_reloads - 1, inheritance);
6140 #endif
6143 /* Now allocate reload registers for anything non-optional that
6144 didn't get one yet. */
6145 for (j = 0; j < n_reloads; j++)
6147 register int r = reload_order[j];
6149 /* Ignore reloads that got marked inoperative. */
6150 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
6151 continue;
6153 /* Skip reloads that already have a register allocated or are
6154 optional. */
6155 if (reload_reg_rtx[r] != 0 || reload_optional[r])
6156 continue;
6158 if (! allocate_reload_reg (chain, r, j == n_reloads - 1, inheritance))
6159 break;
6162 /* If that loop got all the way, we have won. */
6163 if (j == n_reloads)
6164 break;
6166 #if 0
6167 fail:
6168 #endif
6169 /* Loop around and try without any inheritance. */
6170 /* First undo everything done by the failed attempt
6171 to allocate with inheritance. */
6172 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
6173 sizeof reload_reg_rtx);
6174 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
6175 sizeof reload_inherited);
6176 bcopy ((char *) save_reload_inheritance_insn,
6177 (char *) reload_inheritance_insn,
6178 sizeof reload_inheritance_insn);
6179 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
6180 sizeof reload_override_in);
6181 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
6182 sizeof reload_spill_index);
6183 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
6184 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
6185 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
6186 save_reload_reg_used_in_op_addr);
6187 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
6188 save_reload_reg_used_in_op_addr_reload);
6189 COPY_HARD_REG_SET (reload_reg_used_in_insn,
6190 save_reload_reg_used_in_insn);
6191 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
6192 save_reload_reg_used_in_other_addr);
6194 for (i = 0; i < reload_n_operands; i++)
6196 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
6197 save_reload_reg_used_in_input[i]);
6198 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
6199 save_reload_reg_used_in_output[i]);
6200 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
6201 save_reload_reg_used_in_input_addr[i]);
6202 COPY_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i],
6203 save_reload_reg_used_in_inpaddr_addr[i]);
6204 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
6205 save_reload_reg_used_in_output_addr[i]);
6206 COPY_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i],
6207 save_reload_reg_used_in_outaddr_addr[i]);
6211 /* If we thought we could inherit a reload, because it seemed that
6212 nothing else wanted the same reload register earlier in the insn,
6213 verify that assumption, now that all reloads have been assigned. */
6215 for (j = 0; j < n_reloads; j++)
6217 register int r = reload_order[j];
6219 if (reload_inherited[r] && reload_reg_rtx[r] != 0
6220 && ! (reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
6221 reload_opnum[r],
6222 reload_when_needed[r], 0)
6223 || reload_reg_free_for_value_p (true_regnum (reload_reg_rtx[r]),
6224 reload_opnum[r],
6225 reload_when_needed[r],
6226 reload_in[r],
6227 reload_out[r], r)))
6228 reload_inherited[r] = 0;
6229 /* If we can inherit a RELOAD_FOR_INPUT, then we do not need its related
6230 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads.
6231 ??? This could be extended to other reload types, but these are
6232 more tricky to handle:
6233 RELOAD_FOR_OTHER_ADDRESS reloads might have been merged, so we
6234 can't eliminate them without a check that *all* references are
6235 now unused due to inheritance.
6236 While RELOAD_FOR_INPADDR_ADDRESS and RELOAD_FOR_OUTADDR_ADDRESS are
6237 not merged, we can't be sure that we have eliminated the use of
6238 that particular reload if we have seen just one
6239 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_OUTPUT_ADDRESS being inherited,
6240 since there might be multiple of the latter two reloads for a single
6241 operand.
6242 RELOAD_FOR_OPADDR_ADDR reloads for different operands are not
6243 merged, but might share the same register by courtesy of
6244 reload_reg_free_for_value_p. reload_reg_used_in_op_addr_reload
6245 does not differentiate by opnum, thus calling clear_reload_reg_in_use
6246 for one of these reloads would mark the register as free even though
6247 another RELOAD_FOR_OPADDR_ADDR reload might still use it. */
6248 else if (reload_inherited[r] && reload_when_needed[r] == RELOAD_FOR_INPUT)
6250 for (i = 0; i < n_reloads; i++)
6252 if ((reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
6253 || reload_when_needed[i] == RELOAD_FOR_INPADDR_ADDRESS)
6254 && reload_opnum[i] == reload_opnum[r]
6255 && reload_in[i] && reload_reg_rtx[i])
6257 int regno = true_regnum (reload_reg_rtx[i]);
6259 reload_in[i] = 0;
6260 if (spill_reg_order[regno] >= 0)
6261 clear_reload_reg_in_use (regno, reload_opnum[i],
6262 reload_when_needed[i],
6263 reload_mode[i]);
6264 reload_reg_rtx[i] = 0;
6265 reload_spill_index[i] = -1;
6266 remove_replacements (i);
6271 /* If we found a better place to reload from,
6272 validate it in the same fashion, if it is a reload reg. */
6273 if (reload_override_in[r]
6274 && (GET_CODE (reload_override_in[r]) == REG
6275 || GET_CODE (reload_override_in[r]) == SUBREG))
6277 int regno = true_regnum (reload_override_in[r]);
6278 if (spill_reg_order[regno] >= 0
6279 && ! reload_reg_free_before_p (regno, reload_opnum[r],
6280 reload_when_needed[r], 1))
6281 reload_override_in[r] = 0;
6285 /* Now that reload_override_in is known valid,
6286 actually override reload_in. */
6287 for (j = 0; j < n_reloads; j++)
6288 if (reload_override_in[j])
6289 reload_in[j] = reload_override_in[j];
6291 /* If this reload won't be done because it has been cancelled or is
6292 optional and not inherited, clear reload_reg_rtx so other
6293 routines (such as subst_reloads) don't get confused. */
6294 for (j = 0; j < n_reloads; j++)
6295 if (reload_reg_rtx[j] != 0
6296 && ((reload_optional[j] && ! reload_inherited[j])
6297 || (reload_in[j] == 0 && reload_out[j] == 0
6298 && ! reload_secondary_p[j])))
6300 int regno = true_regnum (reload_reg_rtx[j]);
6302 if (spill_reg_order[regno] >= 0)
6303 clear_reload_reg_in_use (regno, reload_opnum[j],
6304 reload_when_needed[j], reload_mode[j]);
6305 reload_reg_rtx[j] = 0;
6308 /* Record which pseudos and which spill regs have output reloads. */
6309 for (j = 0; j < n_reloads; j++)
6311 register int r = reload_order[j];
6313 i = reload_spill_index[r];
6315 /* I is nonneg if this reload uses a register.
6316 If reload_reg_rtx[r] is 0, this is an optional reload
6317 that we opted to ignore. */
6318 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
6319 && reload_reg_rtx[r] != 0)
6321 register int nregno = REGNO (reload_out[r]);
6322 int nr = 1;
6324 if (nregno < FIRST_PSEUDO_REGISTER)
6325 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
6327 while (--nr >= 0)
6328 reg_has_output_reload[nregno + nr] = 1;
6330 if (i >= 0)
6332 nr = HARD_REGNO_NREGS (i, reload_mode[r]);
6333 while (--nr >= 0)
6334 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
6337 if (reload_when_needed[r] != RELOAD_OTHER
6338 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
6339 && reload_when_needed[r] != RELOAD_FOR_INSN)
6340 abort ();
6345 /* If SMALL_REGISTER_CLASSES is non-zero, we may not have merged two
6346 reloads of the same item for fear that we might not have enough reload
6347 registers. However, normally they will get the same reload register
6348 and hence actually need not be loaded twice.
6350 Here we check for the most common case of this phenomenon: when we have
6351 a number of reloads for the same object, each of which were allocated
6352 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6353 reload, and is not modified in the insn itself. If we find such,
6354 merge all the reloads and set the resulting reload to RELOAD_OTHER.
6355 This will not increase the number of spill registers needed and will
6356 prevent redundant code. */
6358 static void
6359 merge_assigned_reloads (insn)
6360 rtx insn;
6362 int i, j;
6364 /* Scan all the reloads looking for ones that only load values and
6365 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6366 assigned and not modified by INSN. */
6368 for (i = 0; i < n_reloads; i++)
6370 int conflicting_input = 0;
6371 int max_input_address_opnum = -1;
6372 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6374 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
6375 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
6376 || reg_set_p (reload_reg_rtx[i], insn))
6377 continue;
6379 /* Look at all other reloads. Ensure that the only use of this
6380 reload_reg_rtx is in a reload that just loads the same value
6381 as we do. Note that any secondary reloads must be of the identical
6382 class since the values, modes, and result registers are the
6383 same, so we need not do anything with any secondary reloads. */
6385 for (j = 0; j < n_reloads; j++)
6387 if (i == j || reload_reg_rtx[j] == 0
6388 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
6389 reload_reg_rtx[i]))
6390 continue;
6392 if (reload_when_needed[j] == RELOAD_FOR_INPUT_ADDRESS
6393 && reload_opnum[j] > max_input_address_opnum)
6394 max_input_address_opnum = reload_opnum[j];
6396 /* If the reload regs aren't exactly the same (e.g, different modes)
6397 or if the values are different, we can't merge this reload.
6398 But if it is an input reload, we might still merge
6399 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
6401 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
6402 || reload_out[j] != 0 || reload_in[j] == 0
6403 || ! rtx_equal_p (reload_in[i], reload_in[j]))
6405 if (reload_when_needed[j] != RELOAD_FOR_INPUT
6406 || ((reload_when_needed[i] != RELOAD_FOR_INPUT_ADDRESS
6407 || reload_opnum[i] > reload_opnum[j])
6408 && reload_when_needed[i] != RELOAD_FOR_OTHER_ADDRESS))
6409 break;
6410 conflicting_input = 1;
6411 if (min_conflicting_input_opnum > reload_opnum[j])
6412 min_conflicting_input_opnum = reload_opnum[j];
6416 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6417 we, in fact, found any matching reloads. */
6419 if (j == n_reloads
6420 && max_input_address_opnum <= min_conflicting_input_opnum)
6422 for (j = 0; j < n_reloads; j++)
6423 if (i != j && reload_reg_rtx[j] != 0
6424 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
6425 && (! conflicting_input
6426 || reload_when_needed[j] == RELOAD_FOR_INPUT_ADDRESS
6427 || reload_when_needed[j] == RELOAD_FOR_OTHER_ADDRESS))
6429 reload_when_needed[i] = RELOAD_OTHER;
6430 reload_in[j] = 0;
6431 reload_spill_index[j] = -1;
6432 transfer_replacements (i, j);
6435 /* If this is now RELOAD_OTHER, look for any reloads that load
6436 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
6437 if they were for inputs, RELOAD_OTHER for outputs. Note that
6438 this test is equivalent to looking for reloads for this operand
6439 number. */
6441 if (reload_when_needed[i] == RELOAD_OTHER)
6442 for (j = 0; j < n_reloads; j++)
6443 if (reload_in[j] != 0
6444 && reload_when_needed[i] != RELOAD_OTHER
6445 && reg_overlap_mentioned_for_reload_p (reload_in[j],
6446 reload_in[i]))
6447 reload_when_needed[j]
6448 = ((reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
6449 || reload_when_needed[i] == RELOAD_FOR_INPADDR_ADDRESS)
6450 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
6456 /* Output insns to reload values in and out of the chosen reload regs. */
6458 static void
6459 emit_reload_insns (chain)
6460 struct insn_chain *chain;
6462 rtx insn = chain->insn;
6464 register int j;
6465 rtx input_reload_insns[MAX_RECOG_OPERANDS];
6466 rtx other_input_address_reload_insns = 0;
6467 rtx other_input_reload_insns = 0;
6468 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6469 rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6470 rtx output_reload_insns[MAX_RECOG_OPERANDS];
6471 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6472 rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6473 rtx operand_reload_insns = 0;
6474 rtx other_operand_reload_insns = 0;
6475 rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6476 rtx following_insn = NEXT_INSN (insn);
6477 rtx before_insn = PREV_INSN (insn);
6478 int special;
6479 /* Values to be put in spill_reg_store are put here first. */
6480 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6481 HARD_REG_SET reg_reloaded_died;
6483 CLEAR_HARD_REG_SET (reg_reloaded_died);
6485 for (j = 0; j < reload_n_operands; j++)
6486 input_reload_insns[j] = input_address_reload_insns[j]
6487 = inpaddr_address_reload_insns[j]
6488 = output_reload_insns[j] = output_address_reload_insns[j]
6489 = outaddr_address_reload_insns[j]
6490 = other_output_reload_insns[j] = 0;
6492 /* Now output the instructions to copy the data into and out of the
6493 reload registers. Do these in the order that the reloads were reported,
6494 since reloads of base and index registers precede reloads of operands
6495 and the operands may need the base and index registers reloaded. */
6497 for (j = 0; j < n_reloads; j++)
6499 register rtx old;
6500 rtx oldequiv_reg = 0;
6501 rtx this_reload_insn = 0;
6502 int expect_occurrences = 1;
6504 if (reload_spill_index[j] >= 0)
6505 new_spill_reg_store[reload_spill_index[j]] = 0;
6507 old = reload_in[j];
6508 if (old != 0 && ! reload_inherited[j]
6509 && ! rtx_equal_p (reload_reg_rtx[j], old)
6510 && reload_reg_rtx[j] != 0)
6512 register rtx reloadreg = reload_reg_rtx[j];
6513 rtx oldequiv = 0;
6514 enum machine_mode mode;
6515 rtx *where;
6517 /* Determine the mode to reload in.
6518 This is very tricky because we have three to choose from.
6519 There is the mode the insn operand wants (reload_inmode[J]).
6520 There is the mode of the reload register RELOADREG.
6521 There is the intrinsic mode of the operand, which we could find
6522 by stripping some SUBREGs.
6523 It turns out that RELOADREG's mode is irrelevant:
6524 we can change that arbitrarily.
6526 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6527 then the reload reg may not support QImode moves, so use SImode.
6528 If foo is in memory due to spilling a pseudo reg, this is safe,
6529 because the QImode value is in the least significant part of a
6530 slot big enough for a SImode. If foo is some other sort of
6531 memory reference, then it is impossible to reload this case,
6532 so previous passes had better make sure this never happens.
6534 Then consider a one-word union which has SImode and one of its
6535 members is a float, being fetched as (SUBREG:SF union:SI).
6536 We must fetch that as SFmode because we could be loading into
6537 a float-only register. In this case OLD's mode is correct.
6539 Consider an immediate integer: it has VOIDmode. Here we need
6540 to get a mode from something else.
6542 In some cases, there is a fourth mode, the operand's
6543 containing mode. If the insn specifies a containing mode for
6544 this operand, it overrides all others.
6546 I am not sure whether the algorithm here is always right,
6547 but it does the right things in those cases. */
6549 mode = GET_MODE (old);
6550 if (mode == VOIDmode)
6551 mode = reload_inmode[j];
6553 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6554 /* If we need a secondary register for this operation, see if
6555 the value is already in a register in that class. Don't
6556 do this if the secondary register will be used as a scratch
6557 register. */
6559 if (reload_secondary_in_reload[j] >= 0
6560 && reload_secondary_in_icode[j] == CODE_FOR_nothing
6561 && optimize)
6562 oldequiv
6563 = find_equiv_reg (old, insn,
6564 reload_reg_class[reload_secondary_in_reload[j]],
6565 -1, NULL_PTR, 0, mode);
6566 #endif
6568 /* If reloading from memory, see if there is a register
6569 that already holds the same value. If so, reload from there.
6570 We can pass 0 as the reload_reg_p argument because
6571 any other reload has either already been emitted,
6572 in which case find_equiv_reg will see the reload-insn,
6573 or has yet to be emitted, in which case it doesn't matter
6574 because we will use this equiv reg right away. */
6576 if (oldequiv == 0 && optimize
6577 && (GET_CODE (old) == MEM
6578 || (GET_CODE (old) == REG
6579 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6580 && reg_renumber[REGNO (old)] < 0)))
6581 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
6582 -1, NULL_PTR, 0, mode);
6584 if (oldequiv)
6586 int regno = true_regnum (oldequiv);
6588 /* If OLDEQUIV is a spill register, don't use it for this
6589 if any other reload needs it at an earlier stage of this insn
6590 or at this stage. */
6591 if (spill_reg_order[regno] >= 0
6592 && (! reload_reg_free_p (regno, reload_opnum[j],
6593 reload_when_needed[j])
6594 || ! reload_reg_free_before_p (regno, reload_opnum[j],
6595 reload_when_needed[j], 1)))
6596 oldequiv = 0;
6598 /* If OLDEQUIV is not a spill register,
6599 don't use it if any other reload wants it. */
6600 if (spill_reg_order[regno] < 0)
6602 int k;
6603 for (k = 0; k < n_reloads; k++)
6604 if (reload_reg_rtx[k] != 0 && k != j
6605 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
6606 oldequiv))
6608 oldequiv = 0;
6609 break;
6613 /* If it is no cheaper to copy from OLDEQUIV into the
6614 reload register than it would be to move from memory,
6615 don't use it. Likewise, if we need a secondary register
6616 or memory. */
6618 if (oldequiv != 0
6619 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
6620 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
6621 reload_reg_class[j])
6622 >= MEMORY_MOVE_COST (mode, reload_reg_class[j], 1)))
6623 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6624 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6625 mode, oldequiv)
6626 != NO_REGS)
6627 #endif
6628 #ifdef SECONDARY_MEMORY_NEEDED
6629 || SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (regno),
6630 reload_reg_class[j],
6631 mode)
6632 #endif
6634 oldequiv = 0;
6637 if (oldequiv == 0)
6638 oldequiv = old;
6639 else if (GET_CODE (oldequiv) == REG)
6640 oldequiv_reg = oldequiv;
6641 else if (GET_CODE (oldequiv) == SUBREG)
6642 oldequiv_reg = SUBREG_REG (oldequiv);
6644 /* If we are reloading from a register that was recently stored in
6645 with an output-reload, see if we can prove there was
6646 actually no need to store the old value in it. */
6648 if (optimize && GET_CODE (oldequiv) == REG
6649 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6650 && spill_reg_store[REGNO (oldequiv)]
6651 && GET_CODE (old) == REG && dead_or_set_p (insn, old)
6652 /* This is unsafe if operand occurs more than once in current
6653 insn. Perhaps some occurrences weren't reloaded. */
6654 && count_occurrences (PATTERN (insn), old) == 1)
6655 delete_output_reload (insn, j, spill_reg_store[REGNO (oldequiv)]);
6657 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6658 then load RELOADREG from OLDEQUIV. Note that we cannot use
6659 gen_lowpart_common since it can do the wrong thing when
6660 RELOADREG has a multi-word mode. Note that RELOADREG
6661 must always be a REG here. */
6663 if (GET_MODE (reloadreg) != mode)
6664 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
6665 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6666 oldequiv = SUBREG_REG (oldequiv);
6667 if (GET_MODE (oldequiv) != VOIDmode
6668 && mode != GET_MODE (oldequiv))
6669 oldequiv = gen_rtx_SUBREG (mode, oldequiv, 0);
6671 /* Switch to the right place to emit the reload insns. */
6672 switch (reload_when_needed[j])
6674 case RELOAD_OTHER:
6675 where = &other_input_reload_insns;
6676 break;
6677 case RELOAD_FOR_INPUT:
6678 where = &input_reload_insns[reload_opnum[j]];
6679 break;
6680 case RELOAD_FOR_INPUT_ADDRESS:
6681 where = &input_address_reload_insns[reload_opnum[j]];
6682 break;
6683 case RELOAD_FOR_INPADDR_ADDRESS:
6684 where = &inpaddr_address_reload_insns[reload_opnum[j]];
6685 break;
6686 case RELOAD_FOR_OUTPUT_ADDRESS:
6687 where = &output_address_reload_insns[reload_opnum[j]];
6688 break;
6689 case RELOAD_FOR_OUTADDR_ADDRESS:
6690 where = &outaddr_address_reload_insns[reload_opnum[j]];
6691 break;
6692 case RELOAD_FOR_OPERAND_ADDRESS:
6693 where = &operand_reload_insns;
6694 break;
6695 case RELOAD_FOR_OPADDR_ADDR:
6696 where = &other_operand_reload_insns;
6697 break;
6698 case RELOAD_FOR_OTHER_ADDRESS:
6699 where = &other_input_address_reload_insns;
6700 break;
6701 default:
6702 abort ();
6705 push_to_sequence (*where);
6706 special = 0;
6708 /* Auto-increment addresses must be reloaded in a special way. */
6709 if (GET_CODE (oldequiv) == POST_INC
6710 || GET_CODE (oldequiv) == POST_DEC
6711 || GET_CODE (oldequiv) == PRE_INC
6712 || GET_CODE (oldequiv) == PRE_DEC)
6714 /* We are not going to bother supporting the case where a
6715 incremented register can't be copied directly from
6716 OLDEQUIV since this seems highly unlikely. */
6717 if (reload_secondary_in_reload[j] >= 0)
6718 abort ();
6719 /* Prevent normal processing of this reload. */
6720 special = 1;
6721 /* Output a special code sequence for this case. */
6722 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
6725 /* If we are reloading a pseudo-register that was set by the previous
6726 insn, see if we can get rid of that pseudo-register entirely
6727 by redirecting the previous insn into our reload register. */
6729 else if (optimize && GET_CODE (old) == REG
6730 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6731 && dead_or_set_p (insn, old)
6732 /* This is unsafe if some other reload
6733 uses the same reg first. */
6734 && reload_reg_free_before_p (REGNO (reloadreg),
6735 reload_opnum[j],
6736 reload_when_needed[j], 0))
6738 rtx temp = PREV_INSN (insn);
6739 while (temp && GET_CODE (temp) == NOTE)
6740 temp = PREV_INSN (temp);
6741 if (temp
6742 && GET_CODE (temp) == INSN
6743 && GET_CODE (PATTERN (temp)) == SET
6744 && SET_DEST (PATTERN (temp)) == old
6745 /* Make sure we can access insn_operand_constraint. */
6746 && asm_noperands (PATTERN (temp)) < 0
6747 /* This is unsafe if prev insn rejects our reload reg. */
6748 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
6749 reloadreg)
6750 /* This is unsafe if operand occurs more than once in current
6751 insn. Perhaps some occurrences aren't reloaded. */
6752 && count_occurrences (PATTERN (insn), old) == 1
6753 /* Don't risk splitting a matching pair of operands. */
6754 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
6756 /* Store into the reload register instead of the pseudo. */
6757 SET_DEST (PATTERN (temp)) = reloadreg;
6758 /* If these are the only uses of the pseudo reg,
6759 pretend for GDB it lives in the reload reg we used. */
6760 if (REG_N_DEATHS (REGNO (old)) == 1
6761 && REG_N_SETS (REGNO (old)) == 1)
6763 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
6764 alter_reg (REGNO (old), -1);
6766 special = 1;
6770 /* We can't do that, so output an insn to load RELOADREG. */
6772 if (! special)
6774 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6775 rtx second_reload_reg = 0;
6776 enum insn_code icode;
6778 /* If we have a secondary reload, pick up the secondary register
6779 and icode, if any. If OLDEQUIV and OLD are different or
6780 if this is an in-out reload, recompute whether or not we
6781 still need a secondary register and what the icode should
6782 be. If we still need a secondary register and the class or
6783 icode is different, go back to reloading from OLD if using
6784 OLDEQUIV means that we got the wrong type of register. We
6785 cannot have different class or icode due to an in-out reload
6786 because we don't make such reloads when both the input and
6787 output need secondary reload registers. */
6789 if (reload_secondary_in_reload[j] >= 0)
6791 int secondary_reload = reload_secondary_in_reload[j];
6792 rtx real_oldequiv = oldequiv;
6793 rtx real_old = old;
6795 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6796 and similarly for OLD.
6797 See comments in get_secondary_reload in reload.c. */
6798 if (GET_CODE (oldequiv) == REG
6799 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6800 && reg_equiv_mem[REGNO (oldequiv)] != 0)
6801 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
6803 if (GET_CODE (old) == REG
6804 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6805 && reg_equiv_mem[REGNO (old)] != 0)
6806 real_old = reg_equiv_mem[REGNO (old)];
6808 second_reload_reg = reload_reg_rtx[secondary_reload];
6809 icode = reload_secondary_in_icode[j];
6811 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6812 || (reload_in[j] != 0 && reload_out[j] != 0))
6814 enum reg_class new_class
6815 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6816 mode, real_oldequiv);
6818 if (new_class == NO_REGS)
6819 second_reload_reg = 0;
6820 else
6822 enum insn_code new_icode;
6823 enum machine_mode new_mode;
6825 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6826 REGNO (second_reload_reg)))
6827 oldequiv = old, real_oldequiv = real_old;
6828 else
6830 new_icode = reload_in_optab[(int) mode];
6831 if (new_icode != CODE_FOR_nothing
6832 && ((insn_operand_predicate[(int) new_icode][0]
6833 && ! ((*insn_operand_predicate[(int) new_icode][0])
6834 (reloadreg, mode)))
6835 || (insn_operand_predicate[(int) new_icode][1]
6836 && ! ((*insn_operand_predicate[(int) new_icode][1])
6837 (real_oldequiv, mode)))))
6838 new_icode = CODE_FOR_nothing;
6840 if (new_icode == CODE_FOR_nothing)
6841 new_mode = mode;
6842 else
6843 new_mode = insn_operand_mode[(int) new_icode][2];
6845 if (GET_MODE (second_reload_reg) != new_mode)
6847 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6848 new_mode))
6849 oldequiv = old, real_oldequiv = real_old;
6850 else
6851 second_reload_reg
6852 = gen_rtx_REG (new_mode,
6853 REGNO (second_reload_reg));
6859 /* If we still need a secondary reload register, check
6860 to see if it is being used as a scratch or intermediate
6861 register and generate code appropriately. If we need
6862 a scratch register, use REAL_OLDEQUIV since the form of
6863 the insn may depend on the actual address if it is
6864 a MEM. */
6866 if (second_reload_reg)
6868 if (icode != CODE_FOR_nothing)
6870 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6871 second_reload_reg));
6872 special = 1;
6874 else
6876 /* See if we need a scratch register to load the
6877 intermediate register (a tertiary reload). */
6878 enum insn_code tertiary_icode
6879 = reload_secondary_in_icode[secondary_reload];
6881 if (tertiary_icode != CODE_FOR_nothing)
6883 rtx third_reload_reg
6884 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6886 emit_insn ((GEN_FCN (tertiary_icode)
6887 (second_reload_reg, real_oldequiv,
6888 third_reload_reg)));
6890 else
6891 gen_reload (second_reload_reg, oldequiv,
6892 reload_opnum[j],
6893 reload_when_needed[j]);
6895 oldequiv = second_reload_reg;
6899 #endif
6901 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6902 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6903 reload_when_needed[j]);
6905 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6906 /* We may have to make a REG_DEAD note for the secondary reload
6907 register in the insns we just made. Find the last insn that
6908 mentioned the register. */
6909 if (! special && second_reload_reg
6910 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6912 rtx prev;
6914 for (prev = get_last_insn (); prev;
6915 prev = PREV_INSN (prev))
6916 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6917 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6918 PATTERN (prev)))
6920 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_DEAD,
6921 second_reload_reg,
6922 REG_NOTES (prev));
6923 break;
6926 #endif
6929 this_reload_insn = get_last_insn ();
6930 /* End this sequence. */
6931 *where = get_insns ();
6932 end_sequence ();
6935 /* When inheriting a wider reload, we have a MEM in reload_in[j],
6936 e.g. inheriting a SImode output reload for
6937 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
6938 if (optimize && reload_inherited[j] && reload_in[j]
6939 && GET_CODE (reload_in[j]) == MEM
6940 && reload_spill_index[j] >= 0
6941 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
6943 expect_occurrences
6944 = count_occurrences (PATTERN (insn), reload_in[j]) == 1 ? 0 : -1;
6945 reload_in[j]
6946 = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
6948 /* Add a note saying the input reload reg
6949 dies in this insn, if anyone cares. */
6950 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6951 if (old != 0
6952 && reload_reg_rtx[j] != old
6953 && reload_reg_rtx[j] != 0
6954 && reload_out[j] == 0
6955 && ! reload_inherited[j]
6956 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6958 register rtx reloadreg = reload_reg_rtx[j];
6960 #if 0
6961 /* We can't abort here because we need to support this for sched.c.
6962 It's not terrible to miss a REG_DEAD note, but we should try
6963 to figure out how to do this correctly. */
6964 /* The code below is incorrect for address-only reloads. */
6965 if (reload_when_needed[j] != RELOAD_OTHER
6966 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6967 abort ();
6968 #endif
6970 /* Add a death note to this insn, for an input reload. */
6972 if ((reload_when_needed[j] == RELOAD_OTHER
6973 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6974 && ! dead_or_set_p (insn, reloadreg))
6975 REG_NOTES (insn)
6976 = gen_rtx_EXPR_LIST (REG_DEAD,
6977 reloadreg, REG_NOTES (insn));
6980 /* When we inherit a reload, the last marked death of the reload reg
6981 may no longer really be a death. */
6982 if (reload_reg_rtx[j] != 0
6983 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6984 && reload_inherited[j])
6986 /* Handle inheriting an output reload.
6987 Remove the death note from the output reload insn. */
6988 if (reload_spill_index[j] >= 0
6989 && GET_CODE (reload_in[j]) == REG
6990 && spill_reg_store[reload_spill_index[j]] != 0
6991 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6992 REG_DEAD, REGNO (reload_reg_rtx[j])))
6993 remove_death (REGNO (reload_reg_rtx[j]),
6994 spill_reg_store[reload_spill_index[j]]);
6995 /* Likewise for input reloads that were inherited. */
6996 else if (reload_spill_index[j] >= 0
6997 && GET_CODE (reload_in[j]) == REG
6998 && spill_reg_store[reload_spill_index[j]] == 0
6999 && reload_inheritance_insn[j] != 0
7000 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
7001 REGNO (reload_reg_rtx[j])))
7002 remove_death (REGNO (reload_reg_rtx[j]),
7003 reload_inheritance_insn[j]);
7004 else
7006 rtx prev;
7008 /* We got this register from find_equiv_reg.
7009 Search back for its last death note and get rid of it.
7010 But don't search back too far.
7011 Don't go past a place where this reg is set,
7012 since a death note before that remains valid. */
7013 for (prev = PREV_INSN (insn);
7014 prev && GET_CODE (prev) != CODE_LABEL;
7015 prev = PREV_INSN (prev))
7016 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
7017 && dead_or_set_p (prev, reload_reg_rtx[j]))
7019 if (find_regno_note (prev, REG_DEAD,
7020 REGNO (reload_reg_rtx[j])))
7021 remove_death (REGNO (reload_reg_rtx[j]), prev);
7022 break;
7027 /* We might have used find_equiv_reg above to choose an alternate
7028 place from which to reload. If so, and it died, we need to remove
7029 that death and move it to one of the insns we just made. */
7031 if (oldequiv_reg != 0
7032 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
7034 rtx prev, prev1;
7036 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
7037 prev = PREV_INSN (prev))
7038 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
7039 && dead_or_set_p (prev, oldequiv_reg))
7041 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
7043 for (prev1 = this_reload_insn;
7044 prev1; prev1 = PREV_INSN (prev1))
7045 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
7046 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
7047 PATTERN (prev1)))
7049 REG_NOTES (prev1) = gen_rtx_EXPR_LIST (REG_DEAD,
7050 oldequiv_reg,
7051 REG_NOTES (prev1));
7052 break;
7054 remove_death (REGNO (oldequiv_reg), prev);
7056 break;
7059 #endif
7061 /* If we are reloading a register that was recently stored in with an
7062 output-reload, see if we can prove there was
7063 actually no need to store the old value in it. */
7065 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
7066 && reload_in[j] != 0
7067 && GET_CODE (reload_in[j]) == REG
7068 #if 0
7069 /* There doesn't seem to be any reason to restrict this to pseudos
7070 and doing so loses in the case where we are copying from a
7071 register of the wrong class. */
7072 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
7073 #endif
7074 && spill_reg_store[reload_spill_index[j]] != 0
7075 /* This is unsafe if some other reload uses the same reg first. */
7076 && reload_reg_free_before_p (reload_spill_index[j],
7077 reload_opnum[j], reload_when_needed[j],
7079 && dead_or_set_p (insn, reload_in[j])
7080 /* This is unsafe if operand occurs more than once in current
7081 insn. Perhaps some occurrences weren't reloaded. */
7082 && (count_occurrences (PATTERN (insn), reload_in[j])
7083 == expect_occurrences))
7084 delete_output_reload (insn, j,
7085 spill_reg_store[reload_spill_index[j]]);
7087 /* Input-reloading is done. Now do output-reloading,
7088 storing the value from the reload-register after the main insn
7089 if reload_out[j] is nonzero.
7091 ??? At some point we need to support handling output reloads of
7092 JUMP_INSNs or insns that set cc0. */
7093 old = reload_out[j];
7094 if (old != 0
7095 && reload_reg_rtx[j] != old
7096 && reload_reg_rtx[j] != 0)
7098 register rtx reloadreg = reload_reg_rtx[j];
7099 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
7100 register rtx second_reloadreg = 0;
7101 #endif
7102 rtx note, p;
7103 enum machine_mode mode;
7104 int special = 0;
7106 /* An output operand that dies right away does need a reload,
7107 but need not be copied from it. Show the new location in the
7108 REG_UNUSED note. */
7109 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
7110 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7112 XEXP (note, 0) = reload_reg_rtx[j];
7113 continue;
7115 /* Likewise for a SUBREG of an operand that dies. */
7116 else if (GET_CODE (old) == SUBREG
7117 && GET_CODE (SUBREG_REG (old)) == REG
7118 && 0 != (note = find_reg_note (insn, REG_UNUSED,
7119 SUBREG_REG (old))))
7121 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
7122 reload_reg_rtx[j]);
7123 continue;
7125 else if (GET_CODE (old) == SCRATCH)
7126 /* If we aren't optimizing, there won't be a REG_UNUSED note,
7127 but we don't want to make an output reload. */
7128 continue;
7130 #if 0
7131 /* Strip off of OLD any size-increasing SUBREGs such as
7132 (SUBREG:SI foo:QI 0). */
7134 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
7135 && (GET_MODE_SIZE (GET_MODE (old))
7136 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
7137 old = SUBREG_REG (old);
7138 #endif
7140 /* If is a JUMP_INSN, we can't support output reloads yet. */
7141 if (GET_CODE (insn) == JUMP_INSN)
7142 abort ();
7144 if (reload_when_needed[j] == RELOAD_OTHER)
7145 start_sequence ();
7146 else
7147 push_to_sequence (output_reload_insns[reload_opnum[j]]);
7149 /* Determine the mode to reload in.
7150 See comments above (for input reloading). */
7152 mode = GET_MODE (old);
7153 if (mode == VOIDmode)
7155 /* VOIDmode should never happen for an output. */
7156 if (asm_noperands (PATTERN (insn)) < 0)
7157 /* It's the compiler's fault. */
7158 fatal_insn ("VOIDmode on an output", insn);
7159 error_for_asm (insn, "output operand is constant in `asm'");
7160 /* Prevent crash--use something we know is valid. */
7161 mode = word_mode;
7162 old = gen_rtx_REG (mode, REGNO (reloadreg));
7165 if (GET_MODE (reloadreg) != mode)
7166 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
7168 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
7170 /* If we need two reload regs, set RELOADREG to the intermediate
7171 one, since it will be stored into OLD. We might need a secondary
7172 register only for an input reload, so check again here. */
7174 if (reload_secondary_out_reload[j] >= 0)
7176 rtx real_old = old;
7178 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
7179 && reg_equiv_mem[REGNO (old)] != 0)
7180 real_old = reg_equiv_mem[REGNO (old)];
7182 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
7183 mode, real_old)
7184 != NO_REGS))
7186 second_reloadreg = reloadreg;
7187 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
7189 /* See if RELOADREG is to be used as a scratch register
7190 or as an intermediate register. */
7191 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
7193 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
7194 (real_old, second_reloadreg, reloadreg)));
7195 special = 1;
7197 else
7199 /* See if we need both a scratch and intermediate reload
7200 register. */
7202 int secondary_reload = reload_secondary_out_reload[j];
7203 enum insn_code tertiary_icode
7204 = reload_secondary_out_icode[secondary_reload];
7206 if (GET_MODE (reloadreg) != mode)
7207 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
7209 if (tertiary_icode != CODE_FOR_nothing)
7211 rtx third_reloadreg
7212 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
7213 rtx tem;
7215 /* Copy primary reload reg to secondary reload reg.
7216 (Note that these have been swapped above, then
7217 secondary reload reg to OLD using our insn. */
7219 /* If REAL_OLD is a paradoxical SUBREG, remove it
7220 and try to put the opposite SUBREG on
7221 RELOADREG. */
7222 if (GET_CODE (real_old) == SUBREG
7223 && (GET_MODE_SIZE (GET_MODE (real_old))
7224 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
7225 && 0 != (tem = gen_lowpart_common
7226 (GET_MODE (SUBREG_REG (real_old)),
7227 reloadreg)))
7228 real_old = SUBREG_REG (real_old), reloadreg = tem;
7230 gen_reload (reloadreg, second_reloadreg,
7231 reload_opnum[j], reload_when_needed[j]);
7232 emit_insn ((GEN_FCN (tertiary_icode)
7233 (real_old, reloadreg, third_reloadreg)));
7234 special = 1;
7237 else
7238 /* Copy between the reload regs here and then to
7239 OUT later. */
7241 gen_reload (reloadreg, second_reloadreg,
7242 reload_opnum[j], reload_when_needed[j]);
7246 #endif
7248 /* Output the last reload insn. */
7249 if (! special)
7251 rtx set;
7253 /* Don't output the last reload if OLD is not the dest of
7254 INSN and is in the src and is clobbered by INSN. */
7255 if (! flag_expensive_optimizations
7256 || GET_CODE (old) != REG
7257 || !(set = single_set (insn))
7258 || rtx_equal_p (old, SET_DEST (set))
7259 || !reg_mentioned_p (old, SET_SRC (set))
7260 || !regno_clobbered_p (REGNO (old), insn))
7261 gen_reload (old, reloadreg, reload_opnum[j],
7262 reload_when_needed[j]);
7265 #ifdef PRESERVE_DEATH_INFO_REGNO_P
7266 /* If final will look at death notes for this reg,
7267 put one on the last output-reload insn to use it. Similarly
7268 for any secondary register. */
7269 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
7270 for (p = get_last_insn (); p; p = PREV_INSN (p))
7271 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
7272 && reg_overlap_mentioned_for_reload_p (reloadreg,
7273 PATTERN (p)))
7274 REG_NOTES (p) = gen_rtx_EXPR_LIST (REG_DEAD,
7275 reloadreg, REG_NOTES (p));
7277 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
7278 if (! special && second_reloadreg
7279 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
7280 for (p = get_last_insn (); p; p = PREV_INSN (p))
7281 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
7282 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
7283 PATTERN (p)))
7284 REG_NOTES (p) = gen_rtx_EXPR_LIST (REG_DEAD,
7285 second_reloadreg,
7286 REG_NOTES (p));
7287 #endif
7288 #endif
7289 /* Look at all insns we emitted, just to be safe. */
7290 for (p = get_insns (); p; p = NEXT_INSN (p))
7291 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
7293 rtx pat = PATTERN (p);
7295 /* If this output reload doesn't come from a spill reg,
7296 clear any memory of reloaded copies of the pseudo reg.
7297 If this output reload comes from a spill reg,
7298 reg_has_output_reload will make this do nothing. */
7299 note_stores (pat, forget_old_reloads_1);
7301 if (reg_mentioned_p (reload_reg_rtx[j], pat))
7303 if (reload_spill_index[j] < 0
7304 && GET_CODE (pat) == SET
7305 && SET_SRC (pat) == reload_reg_rtx[j])
7307 int src = REGNO (SET_SRC (pat));
7309 reload_spill_index[j] = src;
7310 SET_HARD_REG_BIT (reg_is_output_reload, src);
7311 if (find_regno_note (insn, REG_DEAD, src))
7312 SET_HARD_REG_BIT (reg_reloaded_died, src);
7314 if (reload_spill_index[j] >= 0)
7316 int s = reload_secondary_out_reload[j];
7317 rtx set = single_set (p);
7318 /* If this reload copies only to the secondary reload
7319 register, the secondary reload does the actual
7320 store. */
7321 if (s >= 0 && set == NULL_RTX)
7322 ; /* We can't tell what function the secondary reload
7323 has and where the actual store to the pseudo is
7324 made; leave new_spill_reg_store alone. */
7325 else if (s >= 0
7326 && SET_SRC (set) == reload_reg_rtx[j]
7327 && SET_DEST (set) == reload_reg_rtx[s])
7329 /* Usually the next instruction will be the
7330 secondary reload insn; if we can confirm
7331 that it is, setting new_spill_reg_store to
7332 that insn will allow an extra optimization. */
7333 rtx s_reg = reload_reg_rtx[s];
7334 rtx next = NEXT_INSN (p);
7335 reload_out[s] = reload_out[j];
7336 set = single_set (next);
7337 if (set && SET_SRC (set) == s_reg
7338 && ! new_spill_reg_store[REGNO (s_reg)])
7339 new_spill_reg_store[REGNO (s_reg)] = next;
7341 else
7342 new_spill_reg_store[reload_spill_index[j]] = p;
7347 if (reload_when_needed[j] == RELOAD_OTHER)
7349 emit_insns (other_output_reload_insns[reload_opnum[j]]);
7350 other_output_reload_insns[reload_opnum[j]] = get_insns ();
7352 else
7353 output_reload_insns[reload_opnum[j]] = get_insns ();
7355 end_sequence ();
7359 /* Now write all the insns we made for reloads in the order expected by
7360 the allocation functions. Prior to the insn being reloaded, we write
7361 the following reloads:
7363 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7365 RELOAD_OTHER reloads.
7367 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7368 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7369 RELOAD_FOR_INPUT reload for the operand.
7371 RELOAD_FOR_OPADDR_ADDRS reloads.
7373 RELOAD_FOR_OPERAND_ADDRESS reloads.
7375 After the insn being reloaded, we write the following:
7377 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7378 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7379 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7380 reloads for the operand. The RELOAD_OTHER output reloads are
7381 output in descending order by reload number. */
7383 emit_insns_before (other_input_address_reload_insns, insn);
7384 emit_insns_before (other_input_reload_insns, insn);
7386 for (j = 0; j < reload_n_operands; j++)
7388 emit_insns_before (inpaddr_address_reload_insns[j], insn);
7389 emit_insns_before (input_address_reload_insns[j], insn);
7390 emit_insns_before (input_reload_insns[j], insn);
7393 emit_insns_before (other_operand_reload_insns, insn);
7394 emit_insns_before (operand_reload_insns, insn);
7396 for (j = 0; j < reload_n_operands; j++)
7398 emit_insns_before (outaddr_address_reload_insns[j], following_insn);
7399 emit_insns_before (output_address_reload_insns[j], following_insn);
7400 emit_insns_before (output_reload_insns[j], following_insn);
7401 emit_insns_before (other_output_reload_insns[j], following_insn);
7404 /* Keep basic block info up to date. */
7405 if (n_basic_blocks)
7407 if (basic_block_head[chain->block] == insn)
7408 basic_block_head[chain->block] = NEXT_INSN (before_insn);
7409 if (basic_block_end[chain->block] == insn)
7410 basic_block_end[chain->block] = PREV_INSN (following_insn);
7413 /* Move death notes from INSN
7414 to output-operand-address and output reload insns. */
7415 #ifdef PRESERVE_DEATH_INFO_REGNO_P
7417 rtx insn1;
7418 /* Loop over those insns, last ones first. */
7419 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
7420 insn1 = PREV_INSN (insn1))
7421 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
7423 rtx source = SET_SRC (PATTERN (insn1));
7424 rtx dest = SET_DEST (PATTERN (insn1));
7426 /* The note we will examine next. */
7427 rtx reg_notes = REG_NOTES (insn);
7428 /* The place that pointed to this note. */
7429 rtx *prev_reg_note = &REG_NOTES (insn);
7431 /* If the note is for something used in the source of this
7432 reload insn, or in the output address, move the note. */
7433 while (reg_notes)
7435 rtx next_reg_notes = XEXP (reg_notes, 1);
7436 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
7437 && GET_CODE (XEXP (reg_notes, 0)) == REG
7438 && ((GET_CODE (dest) != REG
7439 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
7440 dest))
7441 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
7442 source)))
7444 *prev_reg_note = next_reg_notes;
7445 XEXP (reg_notes, 1) = REG_NOTES (insn1);
7446 REG_NOTES (insn1) = reg_notes;
7448 else
7449 prev_reg_note = &XEXP (reg_notes, 1);
7451 reg_notes = next_reg_notes;
7455 #endif
7457 /* For all the spill regs newly reloaded in this instruction,
7458 record what they were reloaded from, so subsequent instructions
7459 can inherit the reloads.
7461 Update spill_reg_store for the reloads of this insn.
7462 Copy the elements that were updated in the loop above. */
7464 for (j = 0; j < n_reloads; j++)
7466 register int r = reload_order[j];
7467 register int i = reload_spill_index[r];
7469 /* I is nonneg if this reload used a register.
7470 If reload_reg_rtx[r] is 0, this is an optional reload
7471 that we opted to ignore. */
7473 if (i >= 0 && reload_reg_rtx[r] != 0)
7475 int nr
7476 = HARD_REGNO_NREGS (i, GET_MODE (reload_reg_rtx[r]));
7477 int k;
7478 int part_reaches_end = 0;
7479 int all_reaches_end = 1;
7481 /* For a multi register reload, we need to check if all or part
7482 of the value lives to the end. */
7483 for (k = 0; k < nr; k++)
7485 if (reload_reg_reaches_end_p (i + k, reload_opnum[r],
7486 reload_when_needed[r]))
7487 part_reaches_end = 1;
7488 else
7489 all_reaches_end = 0;
7492 /* Ignore reloads that don't reach the end of the insn in
7493 entirety. */
7494 if (all_reaches_end)
7496 /* First, clear out memory of what used to be in this spill reg.
7497 If consecutive registers are used, clear them all. */
7499 for (k = 0; k < nr; k++)
7500 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7502 /* Maybe the spill reg contains a copy of reload_out. */
7503 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
7505 register int nregno = REGNO (reload_out[r]);
7506 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7507 : HARD_REGNO_NREGS (nregno,
7508 GET_MODE (reload_reg_rtx[r])));
7510 spill_reg_store[i] = new_spill_reg_store[i];
7511 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
7513 /* If NREGNO is a hard register, it may occupy more than
7514 one register. If it does, say what is in the
7515 rest of the registers assuming that both registers
7516 agree on how many words the object takes. If not,
7517 invalidate the subsequent registers. */
7519 if (nregno < FIRST_PSEUDO_REGISTER)
7520 for (k = 1; k < nnr; k++)
7521 reg_last_reload_reg[nregno + k]
7522 = (nr == nnr
7523 ? gen_rtx_REG (reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
7524 REGNO (reload_reg_rtx[r]) + k)
7525 : 0);
7527 /* Now do the inverse operation. */
7528 for (k = 0; k < nr; k++)
7530 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7531 reg_reloaded_contents[i + k]
7532 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7533 ? nregno
7534 : nregno + k);
7535 reg_reloaded_insn[i + k] = insn;
7536 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7540 /* Maybe the spill reg contains a copy of reload_in. Only do
7541 something if there will not be an output reload for
7542 the register being reloaded. */
7543 else if (reload_out[r] == 0
7544 && reload_in[r] != 0
7545 && spill_reg_order[i] >= 0
7546 && ((GET_CODE (reload_in[r]) == REG
7547 && ! reg_has_output_reload[REGNO (reload_in[r])])
7548 || (GET_CODE (reload_in_reg[r]) == REG
7549 && ! reg_has_output_reload[REGNO (reload_in_reg[r])])))
7551 register int nregno;
7552 int nnr;
7554 if (GET_CODE (reload_in[r]) == REG)
7555 nregno = REGNO (reload_in[r]);
7556 else
7557 nregno = REGNO (reload_in_reg[r]);
7559 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7560 : HARD_REGNO_NREGS (nregno,
7561 GET_MODE (reload_reg_rtx[r])));
7563 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
7565 if (nregno < FIRST_PSEUDO_REGISTER)
7566 for (k = 1; k < nnr; k++)
7567 reg_last_reload_reg[nregno + k]
7568 = (nr == nnr
7569 ? gen_rtx_REG (reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
7570 REGNO (reload_reg_rtx[r]) + k)
7571 : 0);
7573 /* Unless we inherited this reload, show we haven't
7574 recently done a store. */
7575 if (! reload_inherited[r])
7576 spill_reg_store[i] = 0;
7578 for (k = 0; k < nr; k++)
7580 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7581 reg_reloaded_contents[i + k]
7582 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7583 ? nregno
7584 : nregno + k);
7585 reg_reloaded_insn[i + k] = insn;
7586 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7591 /* However, if part of the reload reaches the end, then we must
7592 invalidate the old info for the part that survives to the end. */
7593 else if (part_reaches_end)
7595 for (k = 0; k < nr; k++)
7596 if (reload_reg_reaches_end_p (i + k,
7597 reload_opnum[r],
7598 reload_when_needed[r]))
7599 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7603 /* The following if-statement was #if 0'd in 1.34 (or before...).
7604 It's reenabled in 1.35 because supposedly nothing else
7605 deals with this problem. */
7607 /* If a register gets output-reloaded from a non-spill register,
7608 that invalidates any previous reloaded copy of it.
7609 But forget_old_reloads_1 won't get to see it, because
7610 it thinks only about the original insn. So invalidate it here. */
7611 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
7613 register int nregno = REGNO (reload_out[r]);
7614 if (nregno >= FIRST_PSEUDO_REGISTER)
7615 reg_last_reload_reg[nregno] = 0;
7616 else
7618 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
7620 while (num_regs-- > 0)
7621 reg_last_reload_reg[nregno + num_regs] = 0;
7625 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
7628 /* Emit code to perform a reload from IN (which may be a reload register) to
7629 OUT (which may also be a reload register). IN or OUT is from operand
7630 OPNUM with reload type TYPE.
7632 Returns first insn emitted. */
7635 gen_reload (out, in, opnum, type)
7636 rtx out;
7637 rtx in;
7638 int opnum;
7639 enum reload_type type;
7641 rtx last = get_last_insn ();
7642 rtx tem;
7644 /* If IN is a paradoxical SUBREG, remove it and try to put the
7645 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7646 if (GET_CODE (in) == SUBREG
7647 && (GET_MODE_SIZE (GET_MODE (in))
7648 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7649 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7650 in = SUBREG_REG (in), out = tem;
7651 else if (GET_CODE (out) == SUBREG
7652 && (GET_MODE_SIZE (GET_MODE (out))
7653 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7654 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7655 out = SUBREG_REG (out), in = tem;
7657 /* How to do this reload can get quite tricky. Normally, we are being
7658 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7659 register that didn't get a hard register. In that case we can just
7660 call emit_move_insn.
7662 We can also be asked to reload a PLUS that adds a register or a MEM to
7663 another register, constant or MEM. This can occur during frame pointer
7664 elimination and while reloading addresses. This case is handled by
7665 trying to emit a single insn to perform the add. If it is not valid,
7666 we use a two insn sequence.
7668 Finally, we could be called to handle an 'o' constraint by putting
7669 an address into a register. In that case, we first try to do this
7670 with a named pattern of "reload_load_address". If no such pattern
7671 exists, we just emit a SET insn and hope for the best (it will normally
7672 be valid on machines that use 'o').
7674 This entire process is made complex because reload will never
7675 process the insns we generate here and so we must ensure that
7676 they will fit their constraints and also by the fact that parts of
7677 IN might be being reloaded separately and replaced with spill registers.
7678 Because of this, we are, in some sense, just guessing the right approach
7679 here. The one listed above seems to work.
7681 ??? At some point, this whole thing needs to be rethought. */
7683 if (GET_CODE (in) == PLUS
7684 && (GET_CODE (XEXP (in, 0)) == REG
7685 || GET_CODE (XEXP (in, 0)) == SUBREG
7686 || GET_CODE (XEXP (in, 0)) == MEM)
7687 && (GET_CODE (XEXP (in, 1)) == REG
7688 || GET_CODE (XEXP (in, 1)) == SUBREG
7689 || CONSTANT_P (XEXP (in, 1))
7690 || GET_CODE (XEXP (in, 1)) == MEM))
7692 /* We need to compute the sum of a register or a MEM and another
7693 register, constant, or MEM, and put it into the reload
7694 register. The best possible way of doing this is if the machine
7695 has a three-operand ADD insn that accepts the required operands.
7697 The simplest approach is to try to generate such an insn and see if it
7698 is recognized and matches its constraints. If so, it can be used.
7700 It might be better not to actually emit the insn unless it is valid,
7701 but we need to pass the insn as an operand to `recog' and
7702 `insn_extract' and it is simpler to emit and then delete the insn if
7703 not valid than to dummy things up. */
7705 rtx op0, op1, tem, insn;
7706 int code;
7708 op0 = find_replacement (&XEXP (in, 0));
7709 op1 = find_replacement (&XEXP (in, 1));
7711 /* Since constraint checking is strict, commutativity won't be
7712 checked, so we need to do that here to avoid spurious failure
7713 if the add instruction is two-address and the second operand
7714 of the add is the same as the reload reg, which is frequently
7715 the case. If the insn would be A = B + A, rearrange it so
7716 it will be A = A + B as constrain_operands expects. */
7718 if (GET_CODE (XEXP (in, 1)) == REG
7719 && REGNO (out) == REGNO (XEXP (in, 1)))
7720 tem = op0, op0 = op1, op1 = tem;
7722 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7723 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
7725 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
7726 code = recog_memoized (insn);
7728 if (code >= 0)
7730 insn_extract (insn);
7731 /* We want constrain operands to treat this insn strictly in
7732 its validity determination, i.e., the way it would after reload
7733 has completed. */
7734 if (constrain_operands (code, 1))
7735 return insn;
7738 delete_insns_since (last);
7740 /* If that failed, we must use a conservative two-insn sequence.
7741 use move to copy constant, MEM, or pseudo register to the reload
7742 register since "move" will be able to handle an arbitrary operand,
7743 unlike add which can't, in general. Then add the registers.
7745 If there is another way to do this for a specific machine, a
7746 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7747 we emit below. */
7749 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
7750 || (GET_CODE (op1) == REG
7751 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
7752 tem = op0, op0 = op1, op1 = tem;
7754 gen_reload (out, op0, opnum, type);
7756 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7757 This fixes a problem on the 32K where the stack pointer cannot
7758 be used as an operand of an add insn. */
7760 if (rtx_equal_p (op0, op1))
7761 op1 = out;
7763 insn = emit_insn (gen_add2_insn (out, op1));
7765 /* If that failed, copy the address register to the reload register.
7766 Then add the constant to the reload register. */
7768 code = recog_memoized (insn);
7770 if (code >= 0)
7772 insn_extract (insn);
7773 /* We want constrain operands to treat this insn strictly in
7774 its validity determination, i.e., the way it would after reload
7775 has completed. */
7776 if (constrain_operands (code, 1))
7778 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
7779 REG_NOTES (insn)
7780 = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7781 return insn;
7785 delete_insns_since (last);
7787 gen_reload (out, op1, opnum, type);
7788 insn = emit_insn (gen_add2_insn (out, op0));
7789 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7792 #ifdef SECONDARY_MEMORY_NEEDED
7793 /* If we need a memory location to do the move, do it that way. */
7794 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
7795 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
7796 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
7797 REGNO_REG_CLASS (REGNO (out)),
7798 GET_MODE (out)))
7800 /* Get the memory to use and rewrite both registers to its mode. */
7801 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7803 if (GET_MODE (loc) != GET_MODE (out))
7804 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
7806 if (GET_MODE (loc) != GET_MODE (in))
7807 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
7809 gen_reload (loc, in, opnum, type);
7810 gen_reload (out, loc, opnum, type);
7812 #endif
7814 /* If IN is a simple operand, use gen_move_insn. */
7815 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
7816 emit_insn (gen_move_insn (out, in));
7818 #ifdef HAVE_reload_load_address
7819 else if (HAVE_reload_load_address)
7820 emit_insn (gen_reload_load_address (out, in));
7821 #endif
7823 /* Otherwise, just write (set OUT IN) and hope for the best. */
7824 else
7825 emit_insn (gen_rtx_SET (VOIDmode, out, in));
7827 /* Return the first insn emitted.
7828 We can not just return get_last_insn, because there may have
7829 been multiple instructions emitted. Also note that gen_move_insn may
7830 emit more than one insn itself, so we can not assume that there is one
7831 insn emitted per emit_insn_before call. */
7833 return last ? NEXT_INSN (last) : get_insns ();
7836 /* Delete a previously made output-reload
7837 whose result we now believe is not needed.
7838 First we double-check.
7840 INSN is the insn now being processed.
7841 OUTPUT_RELOAD_INSN is the insn of the output reload.
7842 J is the reload-number for this insn. */
7844 static void
7845 delete_output_reload (insn, j, output_reload_insn)
7846 rtx insn;
7847 int j;
7848 rtx output_reload_insn;
7850 register rtx i1;
7852 /* Get the raw pseudo-register referred to. */
7854 rtx reg = reload_in[j];
7855 while (GET_CODE (reg) == SUBREG)
7856 reg = SUBREG_REG (reg);
7858 /* If the pseudo-reg we are reloading is no longer referenced
7859 anywhere between the store into it and here,
7860 and no jumps or labels intervene, then the value can get
7861 here through the reload reg alone.
7862 Otherwise, give up--return. */
7863 for (i1 = NEXT_INSN (output_reload_insn);
7864 i1 != insn; i1 = NEXT_INSN (i1))
7866 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
7867 return;
7868 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
7869 && reg_mentioned_p (reg, PATTERN (i1)))
7871 /* If this is just a single USE with an REG_EQUAL note in front
7872 of INSN, this is no problem, because this mentions just the
7873 address that we are using here.
7874 But if there is more than one such USE, the insn might use
7875 the operand directly, or another reload might do that.
7876 This is analogous to the count_occurences check in the callers. */
7877 int num_occurences = 0;
7879 while (GET_CODE (i1) == INSN && GET_CODE (PATTERN (i1)) == USE
7880 && find_reg_note (i1, REG_EQUAL, NULL_RTX))
7882 num_occurences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
7883 i1 = NEXT_INSN (i1);
7885 if (num_occurences == 1 && i1 == insn)
7886 break;
7887 return;
7891 /* The caller has already checked that REG dies or is set in INSN.
7892 It has also checked that we are optimizing, and thus some inaccurancies
7893 in the debugging information are acceptable.
7894 So we could just delete output_reload_insn.
7895 But in some cases we can improve the debugging information without
7896 sacrificing optimization - maybe even improving the code:
7897 See if the pseudo reg has been completely replaced
7898 with reload regs. If so, delete the store insn
7899 and forget we had a stack slot for the pseudo. */
7900 if (reload_out[j] != reload_in[j]
7901 && REG_N_DEATHS (REGNO (reg)) == 1
7902 && REG_BASIC_BLOCK (REGNO (reg)) >= 0
7903 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7905 rtx i2;
7907 /* We know that it was used only between here
7908 and the beginning of the current basic block.
7909 (We also know that the last use before INSN was
7910 the output reload we are thinking of deleting, but never mind that.)
7911 Search that range; see if any ref remains. */
7912 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7914 rtx set = single_set (i2);
7916 /* Uses which just store in the pseudo don't count,
7917 since if they are the only uses, they are dead. */
7918 if (set != 0 && SET_DEST (set) == reg)
7919 continue;
7920 if (GET_CODE (i2) == CODE_LABEL
7921 || GET_CODE (i2) == JUMP_INSN)
7922 break;
7923 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7924 && reg_mentioned_p (reg, PATTERN (i2)))
7926 /* Some other ref remains; just delete the output reload we
7927 know to be dead. */
7928 delete_insn (output_reload_insn);
7929 return;
7933 /* Delete the now-dead stores into this pseudo. */
7934 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7936 rtx set = single_set (i2);
7938 if (set != 0 && SET_DEST (set) == reg)
7940 /* This might be a basic block head,
7941 thus don't use delete_insn. */
7942 PUT_CODE (i2, NOTE);
7943 NOTE_SOURCE_FILE (i2) = 0;
7944 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
7946 if (GET_CODE (i2) == CODE_LABEL
7947 || GET_CODE (i2) == JUMP_INSN)
7948 break;
7951 /* For the debugging info,
7952 say the pseudo lives in this reload reg. */
7953 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
7954 alter_reg (REGNO (reg), -1);
7956 delete_insn (output_reload_insn);
7960 /* Output reload-insns to reload VALUE into RELOADREG.
7961 VALUE is an autoincrement or autodecrement RTX whose operand
7962 is a register or memory location;
7963 so reloading involves incrementing that location.
7965 INC_AMOUNT is the number to increment or decrement by (always positive).
7966 This cannot be deduced from VALUE. */
7968 static void
7969 inc_for_reload (reloadreg, value, inc_amount)
7970 rtx reloadreg;
7971 rtx value;
7972 int inc_amount;
7974 /* REG or MEM to be copied and incremented. */
7975 rtx incloc = XEXP (value, 0);
7976 /* Nonzero if increment after copying. */
7977 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
7978 rtx last;
7979 rtx inc;
7980 rtx add_insn;
7981 int code;
7983 /* No hard register is equivalent to this register after
7984 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7985 we could inc/dec that register as well (maybe even using it for
7986 the source), but I'm not sure it's worth worrying about. */
7987 if (GET_CODE (incloc) == REG)
7988 reg_last_reload_reg[REGNO (incloc)] = 0;
7990 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7991 inc_amount = - inc_amount;
7993 inc = GEN_INT (inc_amount);
7995 /* If this is post-increment, first copy the location to the reload reg. */
7996 if (post)
7997 emit_insn (gen_move_insn (reloadreg, incloc));
7999 /* See if we can directly increment INCLOC. Use a method similar to that
8000 in gen_reload. */
8002 last = get_last_insn ();
8003 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
8004 gen_rtx_PLUS (GET_MODE (incloc),
8005 incloc, inc)));
8007 code = recog_memoized (add_insn);
8008 if (code >= 0)
8010 insn_extract (add_insn);
8011 if (constrain_operands (code, 1))
8013 /* If this is a pre-increment and we have incremented the value
8014 where it lives, copy the incremented value to RELOADREG to
8015 be used as an address. */
8017 if (! post)
8018 emit_insn (gen_move_insn (reloadreg, incloc));
8020 return;
8024 delete_insns_since (last);
8026 /* If couldn't do the increment directly, must increment in RELOADREG.
8027 The way we do this depends on whether this is pre- or post-increment.
8028 For pre-increment, copy INCLOC to the reload register, increment it
8029 there, then save back. */
8031 if (! post)
8033 emit_insn (gen_move_insn (reloadreg, incloc));
8034 emit_insn (gen_add2_insn (reloadreg, inc));
8035 emit_insn (gen_move_insn (incloc, reloadreg));
8037 else
8039 /* Postincrement.
8040 Because this might be a jump insn or a compare, and because RELOADREG
8041 may not be available after the insn in an input reload, we must do
8042 the incrementation before the insn being reloaded for.
8044 We have already copied INCLOC to RELOADREG. Increment the copy in
8045 RELOADREG, save that back, then decrement RELOADREG so it has
8046 the original value. */
8048 emit_insn (gen_add2_insn (reloadreg, inc));
8049 emit_insn (gen_move_insn (incloc, reloadreg));
8050 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
8053 return;
8056 /* Return 1 if we are certain that the constraint-string STRING allows
8057 the hard register REG. Return 0 if we can't be sure of this. */
8059 static int
8060 constraint_accepts_reg_p (string, reg)
8061 char *string;
8062 rtx reg;
8064 int value = 0;
8065 int regno = true_regnum (reg);
8066 int c;
8068 /* Initialize for first alternative. */
8069 value = 0;
8070 /* Check that each alternative contains `g' or `r'. */
8071 while (1)
8072 switch (c = *string++)
8074 case 0:
8075 /* If an alternative lacks `g' or `r', we lose. */
8076 return value;
8077 case ',':
8078 /* If an alternative lacks `g' or `r', we lose. */
8079 if (value == 0)
8080 return 0;
8081 /* Initialize for next alternative. */
8082 value = 0;
8083 break;
8084 case 'g':
8085 case 'r':
8086 /* Any general reg wins for this alternative. */
8087 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
8088 value = 1;
8089 break;
8090 default:
8091 /* Any reg in specified class wins for this alternative. */
8093 enum reg_class class = REG_CLASS_FROM_LETTER (c);
8095 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
8096 value = 1;
8101 /* Return the number of places FIND appears within X, but don't count
8102 an occurrence if some SET_DEST is FIND. */
8105 count_occurrences (x, find)
8106 register rtx x, find;
8108 register int i, j;
8109 register enum rtx_code code;
8110 register char *format_ptr;
8111 int count;
8113 if (x == find)
8114 return 1;
8115 if (x == 0)
8116 return 0;
8118 code = GET_CODE (x);
8120 switch (code)
8122 case REG:
8123 case QUEUED:
8124 case CONST_INT:
8125 case CONST_DOUBLE:
8126 case SYMBOL_REF:
8127 case CODE_LABEL:
8128 case PC:
8129 case CC0:
8130 return 0;
8132 case SET:
8133 if (SET_DEST (x) == find)
8134 return count_occurrences (SET_SRC (x), find);
8135 break;
8137 default:
8138 break;
8141 format_ptr = GET_RTX_FORMAT (code);
8142 count = 0;
8144 for (i = 0; i < GET_RTX_LENGTH (code); i++)
8146 switch (*format_ptr++)
8148 case 'e':
8149 count += count_occurrences (XEXP (x, i), find);
8150 break;
8152 case 'E':
8153 if (XVEC (x, i) != NULL)
8155 for (j = 0; j < XVECLEN (x, i); j++)
8156 count += count_occurrences (XVECEXP (x, i, j), find);
8158 break;
8161 return count;
8164 /* This array holds values which are equivalent to a hard register
8165 during reload_cse_regs. Each array element is an EXPR_LIST of
8166 values. Each time a hard register is set, we set the corresponding
8167 array element to the value. Each time a hard register is copied
8168 into memory, we add the memory location to the corresponding array
8169 element. We don't store values or memory addresses with side
8170 effects in this array.
8172 If the value is a CONST_INT, then the mode of the containing
8173 EXPR_LIST is the mode in which that CONST_INT was referenced.
8175 We sometimes clobber a specific entry in a list. In that case, we
8176 just set XEXP (list-entry, 0) to 0. */
8178 static rtx *reg_values;
8180 /* This is a preallocated REG rtx which we use as a temporary in
8181 reload_cse_invalidate_regno, so that we don't need to allocate a
8182 new one each time through a loop in that function. */
8184 static rtx invalidate_regno_rtx;
8186 /* This is a set of registers for which we must remove REG_DEAD notes in
8187 previous insns, because our modifications made them invalid. That can
8188 happen if we introduced the register into the current insn, or we deleted
8189 the current insn which used to set the register. */
8191 static HARD_REG_SET no_longer_dead_regs;
8193 /* Invalidate any entries in reg_values which depend on REGNO,
8194 including those for REGNO itself. This is called if REGNO is
8195 changing. If CLOBBER is true, then always forget anything we
8196 currently know about REGNO. MODE is the mode of the assignment to
8197 REGNO, which is used to determine how many hard registers are being
8198 changed. If MODE is VOIDmode, then only REGNO is being changed;
8199 this is used when invalidating call clobbered registers across a
8200 call. */
8202 static void
8203 reload_cse_invalidate_regno (regno, mode, clobber)
8204 int regno;
8205 enum machine_mode mode;
8206 int clobber;
8208 int endregno;
8209 register int i;
8211 /* Our callers don't always go through true_regnum; we may see a
8212 pseudo-register here from a CLOBBER or the like. We probably
8213 won't ever see a pseudo-register that has a real register number,
8214 for we check anyhow for safety. */
8215 if (regno >= FIRST_PSEUDO_REGISTER)
8216 regno = reg_renumber[regno];
8217 if (regno < 0)
8218 return;
8220 if (mode == VOIDmode)
8221 endregno = regno + 1;
8222 else
8223 endregno = regno + HARD_REGNO_NREGS (regno, mode);
8225 if (clobber)
8226 for (i = regno; i < endregno; i++)
8227 reg_values[i] = 0;
8229 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8231 rtx x;
8233 for (x = reg_values[i]; x; x = XEXP (x, 1))
8235 if (XEXP (x, 0) != 0
8236 && refers_to_regno_p (regno, endregno, XEXP (x, 0), NULL_PTR))
8238 /* If this is the only entry on the list, clear
8239 reg_values[i]. Otherwise, just clear this entry on
8240 the list. */
8241 if (XEXP (x, 1) == 0 && x == reg_values[i])
8243 reg_values[i] = 0;
8244 break;
8246 XEXP (x, 0) = 0;
8251 /* We must look at earlier registers, in case REGNO is part of a
8252 multi word value but is not the first register. If an earlier
8253 register has a value in a mode which overlaps REGNO, then we must
8254 invalidate that earlier register. Note that we do not need to
8255 check REGNO or later registers (we must not check REGNO itself,
8256 because we would incorrectly conclude that there was a conflict). */
8258 for (i = 0; i < regno; i++)
8260 rtx x;
8262 for (x = reg_values[i]; x; x = XEXP (x, 1))
8264 if (XEXP (x, 0) != 0)
8266 PUT_MODE (invalidate_regno_rtx, GET_MODE (x));
8267 REGNO (invalidate_regno_rtx) = i;
8268 if (refers_to_regno_p (regno, endregno, invalidate_regno_rtx,
8269 NULL_PTR))
8271 reload_cse_invalidate_regno (i, VOIDmode, 1);
8272 break;
8279 /* The memory at address MEM_BASE is being changed.
8280 Return whether this change will invalidate VAL. */
8282 static int
8283 reload_cse_mem_conflict_p (mem_base, val)
8284 rtx mem_base;
8285 rtx val;
8287 enum rtx_code code;
8288 char *fmt;
8289 int i;
8291 code = GET_CODE (val);
8292 switch (code)
8294 /* Get rid of a few simple cases quickly. */
8295 case REG:
8296 case PC:
8297 case CC0:
8298 case SCRATCH:
8299 case CONST:
8300 case CONST_INT:
8301 case CONST_DOUBLE:
8302 case SYMBOL_REF:
8303 case LABEL_REF:
8304 return 0;
8306 case MEM:
8307 if (GET_MODE (mem_base) == BLKmode
8308 || GET_MODE (val) == BLKmode)
8309 return 1;
8310 if (anti_dependence (val, mem_base))
8311 return 1;
8312 /* The address may contain nested MEMs. */
8313 break;
8315 default:
8316 break;
8319 fmt = GET_RTX_FORMAT (code);
8321 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8323 if (fmt[i] == 'e')
8325 if (reload_cse_mem_conflict_p (mem_base, XEXP (val, i)))
8326 return 1;
8328 else if (fmt[i] == 'E')
8330 int j;
8332 for (j = 0; j < XVECLEN (val, i); j++)
8333 if (reload_cse_mem_conflict_p (mem_base, XVECEXP (val, i, j)))
8334 return 1;
8338 return 0;
8341 /* Invalidate any entries in reg_values which are changed because of a
8342 store to MEM_RTX. If this is called because of a non-const call
8343 instruction, MEM_RTX is (mem:BLK const0_rtx). */
8345 static void
8346 reload_cse_invalidate_mem (mem_rtx)
8347 rtx mem_rtx;
8349 register int i;
8351 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8353 rtx x;
8355 for (x = reg_values[i]; x; x = XEXP (x, 1))
8357 if (XEXP (x, 0) != 0
8358 && reload_cse_mem_conflict_p (mem_rtx, XEXP (x, 0)))
8360 /* If this is the only entry on the list, clear
8361 reg_values[i]. Otherwise, just clear this entry on
8362 the list. */
8363 if (XEXP (x, 1) == 0 && x == reg_values[i])
8365 reg_values[i] = 0;
8366 break;
8368 XEXP (x, 0) = 0;
8374 /* Invalidate DEST, which is being assigned to or clobbered. The
8375 second parameter exists so that this function can be passed to
8376 note_stores; it is ignored. */
8378 static void
8379 reload_cse_invalidate_rtx (dest, ignore)
8380 rtx dest;
8381 rtx ignore ATTRIBUTE_UNUSED;
8383 while (GET_CODE (dest) == STRICT_LOW_PART
8384 || GET_CODE (dest) == SIGN_EXTRACT
8385 || GET_CODE (dest) == ZERO_EXTRACT
8386 || GET_CODE (dest) == SUBREG)
8387 dest = XEXP (dest, 0);
8389 if (GET_CODE (dest) == REG)
8390 reload_cse_invalidate_regno (REGNO (dest), GET_MODE (dest), 1);
8391 else if (GET_CODE (dest) == MEM)
8392 reload_cse_invalidate_mem (dest);
8395 /* Possibly delete death notes on the insns before INSN if modifying INSN
8396 extended the lifespan of the registers. */
8398 static void
8399 reload_cse_delete_death_notes (insn)
8400 rtx insn;
8402 int dreg;
8404 for (dreg = 0; dreg < FIRST_PSEUDO_REGISTER; dreg++)
8406 rtx trial;
8408 if (! TEST_HARD_REG_BIT (no_longer_dead_regs, dreg))
8409 continue;
8411 for (trial = prev_nonnote_insn (insn);
8412 (trial
8413 && GET_CODE (trial) != CODE_LABEL
8414 && GET_CODE (trial) != BARRIER);
8415 trial = prev_nonnote_insn (trial))
8417 if (find_regno_note (trial, REG_DEAD, dreg))
8419 remove_death (dreg, trial);
8420 break;
8426 /* Record that the current insn uses hard reg REGNO in mode MODE. This
8427 will be used in reload_cse_delete_death_notes to delete prior REG_DEAD
8428 notes for this register. */
8430 static void
8431 reload_cse_no_longer_dead (regno, mode)
8432 int regno;
8433 enum machine_mode mode;
8435 int nregs = HARD_REGNO_NREGS (regno, mode);
8436 while (nregs-- > 0)
8438 SET_HARD_REG_BIT (no_longer_dead_regs, regno);
8439 regno++;
8444 /* Do a very simple CSE pass over the hard registers.
8446 This function detects no-op moves where we happened to assign two
8447 different pseudo-registers to the same hard register, and then
8448 copied one to the other. Reload will generate a useless
8449 instruction copying a register to itself.
8451 This function also detects cases where we load a value from memory
8452 into two different registers, and (if memory is more expensive than
8453 registers) changes it to simply copy the first register into the
8454 second register.
8456 Another optimization is performed that scans the operands of each
8457 instruction to see whether the value is already available in a
8458 hard register. It then replaces the operand with the hard register
8459 if possible, much like an optional reload would. */
8461 static void
8462 reload_cse_regs_1 (first)
8463 rtx first;
8465 char *firstobj;
8466 rtx callmem;
8467 register int i;
8468 rtx insn;
8470 init_alias_analysis ();
8472 reg_values = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
8473 bzero ((char *)reg_values, FIRST_PSEUDO_REGISTER * sizeof (rtx));
8475 /* Create our EXPR_LIST structures on reload_obstack, so that we can
8476 free them when we are done. */
8477 push_obstacks (&reload_obstack, &reload_obstack);
8478 firstobj = (char *) obstack_alloc (&reload_obstack, 0);
8480 /* We pass this to reload_cse_invalidate_mem to invalidate all of
8481 memory for a non-const call instruction. */
8482 callmem = gen_rtx_MEM (BLKmode, const0_rtx);
8484 /* This is used in reload_cse_invalidate_regno to avoid consing a
8485 new REG in a loop in that function. */
8486 invalidate_regno_rtx = gen_rtx_REG (VOIDmode, 0);
8488 for (insn = first; insn; insn = NEXT_INSN (insn))
8490 rtx body;
8492 if (GET_CODE (insn) == CODE_LABEL)
8494 /* Forget all the register values at a code label. We don't
8495 try to do anything clever around jumps. */
8496 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8497 reg_values[i] = 0;
8499 continue;
8502 #ifdef NON_SAVING_SETJMP
8503 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
8504 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
8506 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8507 reg_values[i] = 0;
8509 continue;
8511 #endif
8513 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
8514 continue;
8516 CLEAR_HARD_REG_SET (no_longer_dead_regs);
8518 /* If this is a call instruction, forget anything stored in a
8519 call clobbered register, or, if this is not a const call, in
8520 memory. */
8521 if (GET_CODE (insn) == CALL_INSN)
8523 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8524 if (call_used_regs[i])
8525 reload_cse_invalidate_regno (i, VOIDmode, 1);
8527 if (! CONST_CALL_P (insn))
8528 reload_cse_invalidate_mem (callmem);
8531 body = PATTERN (insn);
8532 if (GET_CODE (body) == SET)
8534 int count = 0;
8535 if (reload_cse_noop_set_p (body, insn))
8537 /* If this sets the return value of the function, we must keep
8538 a USE around, in case this is in a different basic block
8539 than the final USE. Otherwise, we could loose important
8540 register lifeness information on SMALL_REGISTER_CLASSES
8541 machines, where return registers might be used as spills:
8542 subsequent passes assume that spill registers are dead at
8543 the end of a basic block. */
8544 if (REG_FUNCTION_VALUE_P (SET_DEST (body)))
8546 pop_obstacks ();
8547 PATTERN (insn) = gen_rtx_USE (VOIDmode, SET_DEST (body));
8548 INSN_CODE (insn) = -1;
8549 REG_NOTES (insn) = NULL_RTX;
8550 push_obstacks (&reload_obstack, &reload_obstack);
8552 else
8554 PUT_CODE (insn, NOTE);
8555 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8556 NOTE_SOURCE_FILE (insn) = 0;
8558 reload_cse_delete_death_notes (insn);
8560 /* We're done with this insn. */
8561 continue;
8564 /* It's not a no-op, but we can try to simplify it. */
8565 CLEAR_HARD_REG_SET (no_longer_dead_regs);
8566 count += reload_cse_simplify_set (body, insn);
8568 if (count > 0 && apply_change_group ())
8569 reload_cse_delete_death_notes (insn);
8570 else if (reload_cse_simplify_operands (insn))
8571 reload_cse_delete_death_notes (insn);
8573 reload_cse_record_set (body, body);
8575 else if (GET_CODE (body) == PARALLEL)
8577 int count = 0;
8578 rtx value = NULL_RTX;
8580 /* If every action in a PARALLEL is a noop, we can delete
8581 the entire PARALLEL. */
8582 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8584 rtx part = XVECEXP (body, 0, i);
8585 if (GET_CODE (part) == SET)
8587 if (! reload_cse_noop_set_p (part, insn))
8588 break;
8589 if (REG_FUNCTION_VALUE_P (SET_DEST (part)))
8591 if (value)
8592 break;
8593 value = SET_DEST (part);
8596 else if (GET_CODE (part) != CLOBBER)
8597 break;
8599 if (i < 0)
8601 if (value)
8603 pop_obstacks ();
8604 PATTERN (insn) = gen_rtx_USE (VOIDmode, value);
8605 INSN_CODE (insn) = -1;
8606 REG_NOTES (insn) = NULL_RTX;
8607 push_obstacks (&reload_obstack, &reload_obstack);
8609 else
8611 PUT_CODE (insn, NOTE);
8612 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8613 NOTE_SOURCE_FILE (insn) = 0;
8615 reload_cse_delete_death_notes (insn);
8617 /* We're done with this insn. */
8618 continue;
8621 /* It's not a no-op, but we can try to simplify it. */
8622 CLEAR_HARD_REG_SET (no_longer_dead_regs);
8623 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8624 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
8625 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
8627 if (count > 0 && apply_change_group ())
8628 reload_cse_delete_death_notes (insn);
8629 else if (reload_cse_simplify_operands (insn))
8630 reload_cse_delete_death_notes (insn);
8632 /* Look through the PARALLEL and record the values being
8633 set, if possible. Also handle any CLOBBERs. */
8634 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8636 rtx x = XVECEXP (body, 0, i);
8638 if (GET_CODE (x) == SET)
8639 reload_cse_record_set (x, body);
8640 else
8641 note_stores (x, reload_cse_invalidate_rtx);
8644 else
8645 note_stores (body, reload_cse_invalidate_rtx);
8647 #ifdef AUTO_INC_DEC
8648 /* Clobber any registers which appear in REG_INC notes. We
8649 could keep track of the changes to their values, but it is
8650 unlikely to help. */
8652 rtx x;
8654 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
8655 if (REG_NOTE_KIND (x) == REG_INC)
8656 reload_cse_invalidate_rtx (XEXP (x, 0), NULL_RTX);
8658 #endif
8660 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
8661 after we have processed the insn. */
8662 if (GET_CODE (insn) == CALL_INSN)
8664 rtx x;
8666 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
8667 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
8668 reload_cse_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX);
8672 /* Free all the temporary structures we created, and go back to the
8673 regular obstacks. */
8674 obstack_free (&reload_obstack, firstobj);
8675 pop_obstacks ();
8678 /* Call cse / combine like post-reload optimization phases.
8679 FIRST is the first instruction. */
8680 void
8681 reload_cse_regs (first)
8682 rtx first;
8684 reload_cse_regs_1 (first);
8685 reload_combine ();
8686 reload_cse_move2add (first);
8687 if (flag_expensive_optimizations)
8688 reload_cse_regs_1 (first);
8691 /* Return whether the values known for REGNO are equal to VAL. MODE
8692 is the mode of the object that VAL is being copied to; this matters
8693 if VAL is a CONST_INT. */
8695 static int
8696 reload_cse_regno_equal_p (regno, val, mode)
8697 int regno;
8698 rtx val;
8699 enum machine_mode mode;
8701 rtx x;
8703 if (val == 0)
8704 return 0;
8706 for (x = reg_values[regno]; x; x = XEXP (x, 1))
8707 if (XEXP (x, 0) != 0
8708 && rtx_equal_p (XEXP (x, 0), val)
8709 && (! flag_float_store || GET_CODE (XEXP (x, 0)) != MEM
8710 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
8711 && (GET_CODE (val) != CONST_INT
8712 || mode == GET_MODE (x)
8713 || (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
8714 /* On a big endian machine if the value spans more than
8715 one register then this register holds the high part of
8716 it and we can't use it.
8718 ??? We should also compare with the high part of the
8719 value. */
8720 && !(WORDS_BIG_ENDIAN
8721 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
8722 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
8723 GET_MODE_BITSIZE (GET_MODE (x))))))
8724 return 1;
8726 return 0;
8729 /* See whether a single set is a noop. SET is the set instruction we
8730 are should check, and INSN is the instruction from which it came. */
8732 static int
8733 reload_cse_noop_set_p (set, insn)
8734 rtx set;
8735 rtx insn;
8737 rtx src, dest;
8738 enum machine_mode dest_mode;
8739 int dreg, sreg;
8740 int ret;
8742 src = SET_SRC (set);
8743 dest = SET_DEST (set);
8744 dest_mode = GET_MODE (dest);
8746 if (side_effects_p (src))
8747 return 0;
8749 dreg = true_regnum (dest);
8750 sreg = true_regnum (src);
8752 /* Check for setting a register to itself. In this case, we don't
8753 have to worry about REG_DEAD notes. */
8754 if (dreg >= 0 && dreg == sreg)
8755 return 1;
8757 ret = 0;
8758 if (dreg >= 0)
8760 /* Check for setting a register to itself. */
8761 if (dreg == sreg)
8762 ret = 1;
8764 /* Check for setting a register to a value which we already know
8765 is in the register. */
8766 else if (reload_cse_regno_equal_p (dreg, src, dest_mode))
8767 ret = 1;
8769 /* Check for setting a register DREG to another register SREG
8770 where SREG is equal to a value which is already in DREG. */
8771 else if (sreg >= 0)
8773 rtx x;
8775 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
8777 rtx tmp;
8779 if (XEXP (x, 0) == 0)
8780 continue;
8782 if (dest_mode == GET_MODE (x))
8783 tmp = XEXP (x, 0);
8784 else if (GET_MODE_BITSIZE (dest_mode)
8785 < GET_MODE_BITSIZE (GET_MODE (x)))
8786 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
8787 else
8788 continue;
8790 if (tmp
8791 && reload_cse_regno_equal_p (dreg, tmp, dest_mode))
8793 ret = 1;
8794 break;
8799 else if (GET_CODE (dest) == MEM)
8801 /* Check for storing a register to memory when we know that the
8802 register is equivalent to the memory location. */
8803 if (sreg >= 0
8804 && reload_cse_regno_equal_p (sreg, dest, dest_mode)
8805 && ! side_effects_p (dest))
8806 ret = 1;
8809 /* If we can delete this SET, then we need to look for an earlier
8810 REG_DEAD note on DREG, and remove it if it exists. */
8811 if (ret && dreg >= 0)
8813 if (! find_regno_note (insn, REG_UNUSED, dreg))
8814 reload_cse_no_longer_dead (dreg, dest_mode);
8817 return ret;
8820 /* Try to simplify a single SET instruction. SET is the set pattern.
8821 INSN is the instruction it came from.
8822 This function only handles one case: if we set a register to a value
8823 which is not a register, we try to find that value in some other register
8824 and change the set into a register copy. */
8826 static int
8827 reload_cse_simplify_set (set, insn)
8828 rtx set;
8829 rtx insn;
8831 int dreg;
8832 rtx src;
8833 enum machine_mode dest_mode;
8834 enum reg_class dclass;
8835 register int i;
8837 dreg = true_regnum (SET_DEST (set));
8838 if (dreg < 0)
8839 return 0;
8841 src = SET_SRC (set);
8842 if (side_effects_p (src) || true_regnum (src) >= 0)
8843 return 0;
8845 dclass = REGNO_REG_CLASS (dreg);
8847 /* If memory loads are cheaper than register copies, don't change them. */
8848 if (GET_CODE (src) == MEM
8849 && MEMORY_MOVE_COST (GET_MODE (src), dclass, 1) < 2)
8850 return 0;
8852 /* If the constant is cheaper than a register, don't change it. */
8853 if (CONSTANT_P (src)
8854 && rtx_cost (src, SET) < 2)
8855 return 0;
8857 dest_mode = GET_MODE (SET_DEST (set));
8858 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8860 if (i != dreg
8861 && REGISTER_MOVE_COST (REGNO_REG_CLASS (i), dclass) == 2
8862 && reload_cse_regno_equal_p (i, src, dest_mode))
8864 int validated;
8866 /* Pop back to the real obstacks while changing the insn. */
8867 pop_obstacks ();
8869 validated = validate_change (insn, &SET_SRC (set),
8870 gen_rtx_REG (dest_mode, i), 1);
8872 /* Go back to the obstack we are using for temporary
8873 storage. */
8874 push_obstacks (&reload_obstack, &reload_obstack);
8876 if (validated && ! find_regno_note (insn, REG_UNUSED, i))
8878 reload_cse_no_longer_dead (i, dest_mode);
8879 return 1;
8883 return 0;
8886 /* Try to replace operands in INSN with equivalent values that are already
8887 in registers. This can be viewed as optional reloading.
8889 For each non-register operand in the insn, see if any hard regs are
8890 known to be equivalent to that operand. Record the alternatives which
8891 can accept these hard registers. Among all alternatives, select the
8892 ones which are better or equal to the one currently matching, where
8893 "better" is in terms of '?' and '!' constraints. Among the remaining
8894 alternatives, select the one which replaces most operands with
8895 hard registers. */
8897 static int
8898 reload_cse_simplify_operands (insn)
8899 rtx insn;
8901 #ifdef REGISTER_CONSTRAINTS
8902 int insn_code_number, n_operands, n_alternatives;
8903 int i,j;
8905 char *constraints[MAX_RECOG_OPERANDS];
8907 /* Vector recording how bad an alternative is. */
8908 int *alternative_reject;
8909 /* Vector recording how many registers can be introduced by choosing
8910 this alternative. */
8911 int *alternative_nregs;
8912 /* Array of vectors recording, for each operand and each alternative,
8913 which hard register to substitute, or -1 if the operand should be
8914 left as it is. */
8915 int *op_alt_regno[MAX_RECOG_OPERANDS];
8916 /* Array of alternatives, sorted in order of decreasing desirability. */
8917 int *alternative_order;
8918 rtx reg = gen_rtx_REG (VOIDmode, -1);
8920 /* Find out some information about this insn. */
8921 insn_code_number = recog_memoized (insn);
8922 /* We don't modify asm instructions. */
8923 if (insn_code_number < 0)
8924 return 0;
8926 n_operands = insn_n_operands[insn_code_number];
8927 n_alternatives = insn_n_alternatives[insn_code_number];
8929 if (n_alternatives == 0 || n_operands == 0)
8930 return 0;
8931 insn_extract (insn);
8933 /* Figure out which alternative currently matches. */
8934 if (! constrain_operands (insn_code_number, 1))
8935 fatal_insn_not_found (insn);
8937 alternative_reject = (int *) alloca (n_alternatives * sizeof (int));
8938 alternative_nregs = (int *) alloca (n_alternatives * sizeof (int));
8939 alternative_order = (int *) alloca (n_alternatives * sizeof (int));
8940 bzero ((char *)alternative_reject, n_alternatives * sizeof (int));
8941 bzero ((char *)alternative_nregs, n_alternatives * sizeof (int));
8943 for (i = 0; i < n_operands; i++)
8945 enum machine_mode mode;
8946 int regno;
8947 char *p;
8949 op_alt_regno[i] = (int *) alloca (n_alternatives * sizeof (int));
8950 for (j = 0; j < n_alternatives; j++)
8951 op_alt_regno[i][j] = -1;
8953 p = constraints[i] = insn_operand_constraint[insn_code_number][i];
8954 mode = insn_operand_mode[insn_code_number][i];
8956 /* Add the reject values for each alternative given by the constraints
8957 for this operand. */
8958 j = 0;
8959 while (*p != '\0')
8961 char c = *p++;
8962 if (c == ',')
8963 j++;
8964 else if (c == '?')
8965 alternative_reject[j] += 3;
8966 else if (c == '!')
8967 alternative_reject[j] += 300;
8970 /* We won't change operands which are already registers. We
8971 also don't want to modify output operands. */
8972 regno = true_regnum (recog_operand[i]);
8973 if (regno >= 0
8974 || constraints[i][0] == '='
8975 || constraints[i][0] == '+')
8976 continue;
8978 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8980 int class = (int) NO_REGS;
8982 if (! reload_cse_regno_equal_p (regno, recog_operand[i], mode))
8983 continue;
8985 REGNO (reg) = regno;
8986 PUT_MODE (reg, mode);
8988 /* We found a register equal to this operand. Now look for all
8989 alternatives that can accept this register and have not been
8990 assigned a register they can use yet. */
8991 j = 0;
8992 p = constraints[i];
8993 for (;;)
8995 char c = *p++;
8997 switch (c)
8999 case '=': case '+': case '?':
9000 case '#': case '&': case '!':
9001 case '*': case '%':
9002 case '0': case '1': case '2': case '3': case '4':
9003 case 'm': case '<': case '>': case 'V': case 'o':
9004 case 'E': case 'F': case 'G': case 'H':
9005 case 's': case 'i': case 'n':
9006 case 'I': case 'J': case 'K': case 'L':
9007 case 'M': case 'N': case 'O': case 'P':
9008 #ifdef EXTRA_CONSTRAINT
9009 case 'Q': case 'R': case 'S': case 'T': case 'U':
9010 #endif
9011 case 'p': case 'X':
9012 /* These don't say anything we care about. */
9013 break;
9015 case 'g': case 'r':
9016 class = reg_class_subunion[(int) class][(int) GENERAL_REGS];
9017 break;
9019 default:
9020 class
9021 = reg_class_subunion[(int) class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
9022 break;
9024 case ',': case '\0':
9025 /* See if REGNO fits this alternative, and set it up as the
9026 replacement register if we don't have one for this
9027 alternative yet and the operand being replaced is not
9028 a cheap CONST_INT. */
9029 if (op_alt_regno[i][j] == -1
9030 && reg_fits_class_p (reg, class, 0, mode)
9031 && (GET_CODE (recog_operand[i]) != CONST_INT
9032 || rtx_cost (recog_operand[i], SET) > rtx_cost (reg, SET)))
9034 alternative_nregs[j]++;
9035 op_alt_regno[i][j] = regno;
9037 j++;
9038 break;
9041 if (c == '\0')
9042 break;
9047 /* Record all alternatives which are better or equal to the currently
9048 matching one in the alternative_order array. */
9049 for (i = j = 0; i < n_alternatives; i++)
9050 if (alternative_reject[i] <= alternative_reject[which_alternative])
9051 alternative_order[j++] = i;
9052 n_alternatives = j;
9054 /* Sort it. Given a small number of alternatives, a dumb algorithm
9055 won't hurt too much. */
9056 for (i = 0; i < n_alternatives - 1; i++)
9058 int best = i;
9059 int best_reject = alternative_reject[alternative_order[i]];
9060 int best_nregs = alternative_nregs[alternative_order[i]];
9061 int tmp;
9063 for (j = i + 1; j < n_alternatives; j++)
9065 int this_reject = alternative_reject[alternative_order[j]];
9066 int this_nregs = alternative_nregs[alternative_order[j]];
9068 if (this_reject < best_reject
9069 || (this_reject == best_reject && this_nregs < best_nregs))
9071 best = j;
9072 best_reject = this_reject;
9073 best_nregs = this_nregs;
9077 tmp = alternative_order[best];
9078 alternative_order[best] = alternative_order[i];
9079 alternative_order[i] = tmp;
9082 /* Substitute the operands as determined by op_alt_regno for the best
9083 alternative. */
9084 j = alternative_order[0];
9085 CLEAR_HARD_REG_SET (no_longer_dead_regs);
9087 /* Pop back to the real obstacks while changing the insn. */
9088 pop_obstacks ();
9090 for (i = 0; i < n_operands; i++)
9092 enum machine_mode mode = insn_operand_mode[insn_code_number][i];
9093 if (op_alt_regno[i][j] == -1)
9094 continue;
9096 reload_cse_no_longer_dead (op_alt_regno[i][j], mode);
9097 validate_change (insn, recog_operand_loc[i],
9098 gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
9101 for (i = insn_n_dups[insn_code_number] - 1; i >= 0; i--)
9103 int op = recog_dup_num[i];
9104 enum machine_mode mode = insn_operand_mode[insn_code_number][op];
9106 if (op_alt_regno[op][j] == -1)
9107 continue;
9109 reload_cse_no_longer_dead (op_alt_regno[op][j], mode);
9110 validate_change (insn, recog_dup_loc[i],
9111 gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
9114 /* Go back to the obstack we are using for temporary
9115 storage. */
9116 push_obstacks (&reload_obstack, &reload_obstack);
9118 return apply_change_group ();
9119 #else
9120 return 0;
9121 #endif
9124 /* These two variables are used to pass information from
9125 reload_cse_record_set to reload_cse_check_clobber. */
9127 static int reload_cse_check_clobbered;
9128 static rtx reload_cse_check_src;
9130 /* See if DEST overlaps with RELOAD_CSE_CHECK_SRC. If it does, set
9131 RELOAD_CSE_CHECK_CLOBBERED. This is called via note_stores. The
9132 second argument, which is passed by note_stores, is ignored. */
9134 static void
9135 reload_cse_check_clobber (dest, ignore)
9136 rtx dest;
9137 rtx ignore ATTRIBUTE_UNUSED;
9139 if (reg_overlap_mentioned_p (dest, reload_cse_check_src))
9140 reload_cse_check_clobbered = 1;
9143 /* Record the result of a SET instruction. SET is the set pattern.
9144 BODY is the pattern of the insn that it came from. */
9146 static void
9147 reload_cse_record_set (set, body)
9148 rtx set;
9149 rtx body;
9151 rtx dest, src, x;
9152 int dreg, sreg;
9153 enum machine_mode dest_mode;
9155 dest = SET_DEST (set);
9156 src = SET_SRC (set);
9157 dreg = true_regnum (dest);
9158 sreg = true_regnum (src);
9159 dest_mode = GET_MODE (dest);
9161 /* Some machines don't define AUTO_INC_DEC, but they still use push
9162 instructions. We need to catch that case here in order to
9163 invalidate the stack pointer correctly. Note that invalidating
9164 the stack pointer is different from invalidating DEST. */
9165 x = dest;
9166 while (GET_CODE (x) == SUBREG
9167 || GET_CODE (x) == ZERO_EXTRACT
9168 || GET_CODE (x) == SIGN_EXTRACT
9169 || GET_CODE (x) == STRICT_LOW_PART)
9170 x = XEXP (x, 0);
9171 if (push_operand (x, GET_MODE (x)))
9173 reload_cse_invalidate_rtx (stack_pointer_rtx, NULL_RTX);
9174 reload_cse_invalidate_rtx (dest, NULL_RTX);
9175 return;
9178 /* We can only handle an assignment to a register, or a store of a
9179 register to a memory location. For other cases, we just clobber
9180 the destination. We also have to just clobber if there are side
9181 effects in SRC or DEST. */
9182 if ((dreg < 0 && GET_CODE (dest) != MEM)
9183 || side_effects_p (src)
9184 || side_effects_p (dest))
9186 reload_cse_invalidate_rtx (dest, NULL_RTX);
9187 return;
9190 #ifdef HAVE_cc0
9191 /* We don't try to handle values involving CC, because it's a pain
9192 to keep track of when they have to be invalidated. */
9193 if (reg_mentioned_p (cc0_rtx, src)
9194 || reg_mentioned_p (cc0_rtx, dest))
9196 reload_cse_invalidate_rtx (dest, NULL_RTX);
9197 return;
9199 #endif
9201 /* If BODY is a PARALLEL, then we need to see whether the source of
9202 SET is clobbered by some other instruction in the PARALLEL. */
9203 if (GET_CODE (body) == PARALLEL)
9205 int i;
9207 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
9209 rtx x;
9211 x = XVECEXP (body, 0, i);
9212 if (x == set)
9213 continue;
9215 reload_cse_check_clobbered = 0;
9216 reload_cse_check_src = src;
9217 note_stores (x, reload_cse_check_clobber);
9218 if (reload_cse_check_clobbered)
9220 reload_cse_invalidate_rtx (dest, NULL_RTX);
9221 return;
9226 if (dreg >= 0)
9228 int i;
9230 /* This is an assignment to a register. Update the value we
9231 have stored for the register. */
9232 if (sreg >= 0)
9234 rtx x;
9236 /* This is a copy from one register to another. Any values
9237 which were valid for SREG are now valid for DREG. If the
9238 mode changes, we use gen_lowpart_common to extract only
9239 the part of the value that is copied. */
9240 reg_values[dreg] = 0;
9241 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
9243 rtx tmp;
9245 if (XEXP (x, 0) == 0)
9246 continue;
9247 if (dest_mode == GET_MODE (XEXP (x, 0)))
9248 tmp = XEXP (x, 0);
9249 else if (GET_MODE_BITSIZE (dest_mode)
9250 > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
9251 continue;
9252 else
9253 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
9254 if (tmp)
9255 reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, tmp,
9256 reg_values[dreg]);
9259 else
9260 reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, src, NULL_RTX);
9262 /* We've changed DREG, so invalidate any values held by other
9263 registers that depend upon it. */
9264 reload_cse_invalidate_regno (dreg, dest_mode, 0);
9266 /* If this assignment changes more than one hard register,
9267 forget anything we know about the others. */
9268 for (i = 1; i < HARD_REGNO_NREGS (dreg, dest_mode); i++)
9269 reg_values[dreg + i] = 0;
9271 else if (GET_CODE (dest) == MEM)
9273 /* Invalidate conflicting memory locations. */
9274 reload_cse_invalidate_mem (dest);
9276 /* If we're storing a register to memory, add DEST to the list
9277 in REG_VALUES. */
9278 if (sreg >= 0 && ! side_effects_p (dest))
9279 reg_values[sreg] = gen_rtx_EXPR_LIST (dest_mode, dest,
9280 reg_values[sreg]);
9282 else
9284 /* We should have bailed out earlier. */
9285 abort ();
9289 /* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
9290 addressing now.
9291 This code might also be useful when reload gave up on reg+reg addresssing
9292 because of clashes between the return register and INDEX_REG_CLASS. */
9294 /* The maximum number of uses of a register we can keep track of to
9295 replace them with reg+reg addressing. */
9296 #define RELOAD_COMBINE_MAX_USES 6
9298 /* INSN is the insn where a register has ben used, and USEP points to the
9299 location of the register within the rtl. */
9300 struct reg_use { rtx insn, *usep; };
9302 /* If the register is used in some unknown fashion, USE_INDEX is negative.
9303 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
9304 indicates where it becomes live again.
9305 Otherwise, USE_INDEX is the index of the last encountered use of the
9306 register (which is first among these we have seen since we scan backwards),
9307 OFFSET contains the constant offset that is added to the register in
9308 all encountered uses, and USE_RUID indicates the first encountered, i.e.
9309 last, of these uses. */
9310 static struct
9312 struct reg_use reg_use[RELOAD_COMBINE_MAX_USES];
9313 int use_index;
9314 rtx offset;
9315 int store_ruid;
9316 int use_ruid;
9317 } reg_state[FIRST_PSEUDO_REGISTER];
9319 /* Reverse linear uid. This is increased in reload_combine while scanning
9320 the instructions from last to first. It is used to set last_label_ruid
9321 and the store_ruid / use_ruid fields in reg_state. */
9322 static int reload_combine_ruid;
9324 static void
9325 reload_combine ()
9327 rtx insn, set;
9328 int first_index_reg = 1, last_index_reg = 0;
9329 int i;
9330 int last_label_ruid;
9332 /* If reg+reg can be used in offsetable memory adresses, the main chunk of
9333 reload has already used it where appropriate, so there is no use in
9334 trying to generate it now. */
9335 if (double_reg_address_ok && reload_address_index_reg_class != NO_REGS)
9336 return;
9338 /* To avoid wasting too much time later searching for an index register,
9339 determine the minimum and maximum index register numbers. */
9340 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9342 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i))
9344 if (! last_index_reg)
9345 last_index_reg = i;
9346 first_index_reg = i;
9349 /* If no index register is available, we can quit now. */
9350 if (first_index_reg > last_index_reg)
9351 return;
9353 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
9354 last_label_ruid = reload_combine_ruid = 0;
9355 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9357 if (fixed_regs[i])
9358 reg_state[i].use_index = -1;
9359 else
9361 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
9362 reg_state[i].store_ruid = reload_combine_ruid;
9366 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
9368 rtx note;
9370 /* We cannot do our optimization across labels. Invalidating all the use
9371 information we have would be costly, so we just note where the label
9372 is and then later disable any optimization that would cross it. */
9373 if (GET_CODE (insn) == CODE_LABEL)
9374 last_label_ruid = reload_combine_ruid;
9375 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
9376 continue;
9377 reload_combine_ruid++;
9379 /* Look for (set (REGX) (CONST_INT))
9380 (set (REGX) (PLUS (REGX) (REGY)))
9382 ... (MEM (REGX)) ...
9383 and convert it to
9384 (set (REGZ) (CONST_INT))
9386 ... (MEM (PLUS (REGZ) (REGY)))... .
9388 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
9389 and that we know all uses of REGX before it dies. */
9390 set = single_set (insn);
9391 if (set != NULL_RTX
9392 && GET_CODE (SET_DEST (set)) == REG
9393 && (HARD_REGNO_NREGS (REGNO (SET_DEST (set)),
9394 GET_MODE (SET_DEST (set)))
9395 == 1)
9396 && GET_CODE (SET_SRC (set)) == PLUS
9397 && GET_CODE (XEXP (SET_SRC (set), 1)) == REG
9398 && rtx_equal_p (XEXP (SET_SRC (set), 0), SET_DEST (set))
9399 && last_label_ruid < reg_state[REGNO (SET_DEST (set))].use_ruid)
9401 rtx reg = SET_DEST (set);
9402 rtx plus = SET_SRC (set);
9403 rtx base = XEXP (plus, 1);
9404 rtx prev = prev_nonnote_insn (insn);
9405 rtx prev_set = prev ? single_set (prev) : NULL_RTX;
9406 int regno = REGNO (reg);
9407 rtx const_reg;
9408 rtx reg_sum = NULL_RTX;
9410 /* Now, we need an index register.
9411 We'll set index_reg to this index register, const_reg to the
9412 register that is to be loaded with the constant
9413 (denoted as REGZ in the substitution illustration above),
9414 and reg_sum to the register-register that we want to use to
9415 substitute uses of REG (typically in MEMs) with.
9416 First check REG and BASE for being index registers;
9417 we can use them even if they are not dead. */
9418 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno)
9419 || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
9420 REGNO (base)))
9422 const_reg = reg;
9423 reg_sum = plus;
9425 else
9427 /* Otherwise, look for a free index register. Since we have
9428 checked above that neiter REG nor BASE are index registers,
9429 if we find anything at all, it will be different from these
9430 two registers. */
9431 for (i = first_index_reg; i <= last_index_reg; i++)
9433 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i)
9434 && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
9435 && reg_state[i].store_ruid <= reg_state[regno].use_ruid
9436 && HARD_REGNO_NREGS (i, GET_MODE (reg)) == 1)
9438 rtx index_reg = gen_rtx_REG (GET_MODE (reg), i);
9439 const_reg = index_reg;
9440 reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
9441 break;
9445 if (prev_set
9446 && GET_CODE (SET_SRC (prev_set)) == CONST_INT
9447 && rtx_equal_p (SET_DEST (prev_set), reg)
9448 && reg_state[regno].use_index >= 0
9449 && reg_sum)
9451 int i;
9453 /* Change destination register and - if necessary - the
9454 constant value in PREV, the constant loading instruction. */
9455 validate_change (prev, &SET_DEST (prev_set), const_reg, 1);
9456 if (reg_state[regno].offset != const0_rtx)
9457 validate_change (prev,
9458 &SET_SRC (prev_set),
9459 GEN_INT (INTVAL (SET_SRC (prev_set))
9460 + INTVAL (reg_state[regno].offset)),
9462 /* Now for every use of REG that we have recorded, replace REG
9463 with REG_SUM. */
9464 for (i = reg_state[regno].use_index;
9465 i < RELOAD_COMBINE_MAX_USES; i++)
9466 validate_change (reg_state[regno].reg_use[i].insn,
9467 reg_state[regno].reg_use[i].usep,
9468 reg_sum, 1);
9470 if (apply_change_group ())
9472 rtx *np;
9474 /* Delete the reg-reg addition. */
9475 PUT_CODE (insn, NOTE);
9476 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
9477 NOTE_SOURCE_FILE (insn) = 0;
9479 if (reg_state[regno].offset != const0_rtx)
9481 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
9482 are now invalid. */
9483 for (np = &REG_NOTES (prev); *np; )
9485 if (REG_NOTE_KIND (*np) == REG_EQUAL
9486 || REG_NOTE_KIND (*np) == REG_EQUIV)
9487 *np = XEXP (*np, 1);
9488 else
9489 np = &XEXP (*np, 1);
9492 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
9493 reg_state[REGNO (const_reg)].store_ruid = reload_combine_ruid;
9494 continue;
9498 note_stores (PATTERN (insn), reload_combine_note_store);
9499 if (GET_CODE (insn) == CALL_INSN)
9501 rtx link;
9503 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9505 if (call_used_regs[i])
9507 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
9508 reg_state[i].store_ruid = reload_combine_ruid;
9511 for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
9512 link = XEXP (link, 1))
9514 rtx use = XEXP (link, 0);
9515 int regno = REGNO (XEXP (use, 0));
9516 if (GET_CODE (use) == CLOBBER)
9518 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
9519 reg_state[regno].store_ruid = reload_combine_ruid;
9521 else
9522 reg_state[regno].use_index = -1;
9525 if (GET_CODE (insn) == JUMP_INSN)
9527 /* Non-spill registers might be used at the call destination in
9528 some unknown fashion, so we have to mark the unknown use. */
9529 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9531 if (! TEST_HARD_REG_BIT (used_spill_regs, i))
9532 reg_state[i].use_index = -1;
9535 reload_combine_note_use (&PATTERN (insn), insn);
9536 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
9538 if (REG_NOTE_KIND (note) == REG_INC
9539 && GET_CODE (XEXP (note, 0)) == REG)
9540 reg_state[REGNO (XEXP (note, 0))].use_index = -1;
9545 /* Check if DST is a register or a subreg of a register; if it is,
9546 update reg_state[regno].store_ruid and reg_state[regno].use_index
9547 accordingly. Called via note_stores from reload_combine.
9548 The second argument, SET, is ignored. */
9549 static void
9550 reload_combine_note_store (dst, set)
9551 rtx dst, set ATTRIBUTE_UNUSED;
9553 int regno = 0;
9554 int i;
9555 unsigned size = GET_MODE_SIZE (GET_MODE (dst));
9557 if (GET_CODE (dst) == SUBREG)
9559 regno = SUBREG_WORD (dst);
9560 dst = SUBREG_REG (dst);
9562 if (GET_CODE (dst) != REG)
9563 return;
9564 regno += REGNO (dst);
9565 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
9566 careful with registers / register parts that are not full words. */
9567 if (size < (unsigned) UNITS_PER_WORD)
9568 reg_state[regno].use_index = -1;
9569 else
9571 for (i = size / UNITS_PER_WORD - 1 + regno; i >= regno; i--)
9573 reg_state[i].store_ruid = reload_combine_ruid;
9574 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
9579 /* XP points to a piece of rtl that has to be checked for any uses of
9580 registers.
9581 *XP is the pattern of INSN, or a part of it.
9582 Called from reload_combine, and recursively by itself. */
9583 static void
9584 reload_combine_note_use (xp, insn)
9585 rtx *xp, insn;
9587 rtx x = *xp;
9588 enum rtx_code code = x->code;
9589 char *fmt;
9590 int i, j;
9591 rtx offset = const0_rtx; /* For the REG case below. */
9593 switch (code)
9595 case SET:
9596 if (GET_CODE (SET_DEST (x)) == REG)
9598 reload_combine_note_use (&SET_SRC (x), insn);
9599 return;
9601 break;
9603 case CLOBBER:
9604 if (GET_CODE (SET_DEST (x)) == REG)
9605 return;
9606 break;
9608 case PLUS:
9609 /* We are interested in (plus (reg) (const_int)) . */
9610 if (GET_CODE (XEXP (x, 0)) != REG || GET_CODE (XEXP (x, 1)) != CONST_INT)
9611 break;
9612 offset = XEXP (x, 1);
9613 x = XEXP (x, 0);
9614 /* Fall through. */
9615 case REG:
9617 int regno = REGNO (x);
9618 int use_index;
9620 /* Some spurious USEs of pseudo registers might remain.
9621 Just ignore them. */
9622 if (regno >= FIRST_PSEUDO_REGISTER)
9623 return;
9625 /* If this register is already used in some unknown fashion, we
9626 can't do anything.
9627 If we decrement the index from zero to -1, we can't store more
9628 uses, so this register becomes used in an unknown fashion. */
9629 use_index = --reg_state[regno].use_index;
9630 if (use_index < 0)
9631 return;
9633 if (use_index != RELOAD_COMBINE_MAX_USES - 1)
9635 /* We have found another use for a register that is already
9636 used later. Check if the offsets match; if not, mark the
9637 register as used in an unknown fashion. */
9638 if (! rtx_equal_p (offset, reg_state[regno].offset))
9640 reg_state[regno].use_index = -1;
9641 return;
9644 else
9646 /* This is the first use of this register we have seen since we
9647 marked it as dead. */
9648 reg_state[regno].offset = offset;
9649 reg_state[regno].use_ruid = reload_combine_ruid;
9651 reg_state[regno].reg_use[use_index].insn = insn;
9652 reg_state[regno].reg_use[use_index].usep = xp;
9653 return;
9656 default:
9657 break;
9660 /* Recursively process the components of X. */
9661 fmt = GET_RTX_FORMAT (code);
9662 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9664 if (fmt[i] == 'e')
9665 reload_combine_note_use (&XEXP (x, i), insn);
9666 else if (fmt[i] == 'E')
9668 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9669 reload_combine_note_use (&XVECEXP (x, i, j), insn);
9674 /* See if we can reduce the cost of a constant by replacing a move with
9675 an add. */
9676 /* We cannot do our optimization across labels. Invalidating all the
9677 information about register contents we have would be costly, so we
9678 use last_label_luid (local variable of reload_cse_move2add) to note
9679 where the label is and then later disable any optimization that would
9680 cross it.
9681 reg_offset[n] / reg_base_reg[n] / reg_mode[n] are only valid if
9682 reg_set_luid[n] is larger than last_label_luid[n] . */
9683 static int reg_set_luid[FIRST_PSEUDO_REGISTER];
9684 /* reg_offset[n] has to be CONST_INT for it and reg_base_reg[n] /
9685 reg_mode[n] to be valid.
9686 If reg_offset[n] is a CONST_INT and reg_base_reg[n] is negative, register n
9687 has been set to reg_offset[n] in mode reg_mode[n] .
9688 If reg_offset[n] is a CONST_INT and reg_base_reg[n] is non-negative,
9689 register n has been set to the sum of reg_offset[n] and register
9690 reg_base_reg[n], calculated in mode reg_mode[n] . */
9691 static rtx reg_offset[FIRST_PSEUDO_REGISTER];
9692 static int reg_base_reg[FIRST_PSEUDO_REGISTER];
9693 static enum machine_mode reg_mode[FIRST_PSEUDO_REGISTER];
9694 /* move2add_luid is linearily increased while scanning the instructions
9695 from first to last. It is used to set reg_set_luid in
9696 reload_cse_move2add and move2add_note_store, and to set reg_death_luid
9697 (local variable of reload_cse_move2add) . */
9698 static int move2add_luid;
9700 static void
9701 reload_cse_move2add (first)
9702 rtx first;
9704 int i;
9705 rtx insn;
9706 int last_label_luid;
9707 /* reg_death and reg_death_luid are solely used to remove stale REG_DEAD
9708 notes. */
9709 int reg_death_luid[FIRST_PSEUDO_REGISTER];
9710 rtx reg_death[FIRST_PSEUDO_REGISTER];
9712 for (i = FIRST_PSEUDO_REGISTER-1; i >= 0; i--)
9714 reg_set_luid[i] = 0;
9715 reg_death_luid[i] = 0;
9717 last_label_luid = 0;
9718 move2add_luid = 1;
9719 for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++)
9721 rtx pat, note;
9723 if (GET_CODE (insn) == CODE_LABEL)
9724 last_label_luid = move2add_luid;
9725 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
9726 continue;
9727 pat = PATTERN (insn);
9728 /* For simplicity, we only perform this optimization on
9729 straightforward SETs. */
9730 if (GET_CODE (pat) == SET
9731 && GET_CODE (SET_DEST (pat)) == REG)
9733 rtx reg = SET_DEST (pat);
9734 int regno = REGNO (reg);
9735 rtx src = SET_SRC (pat);
9737 /* Check if we have valid information on the contents of this
9738 register in the mode of REG. */
9739 /* ??? We don't know how zero / sign extension is handled, hence
9740 we can't go from a narrower to a wider mode. */
9741 if (reg_set_luid[regno] > last_label_luid
9742 && (GET_MODE_SIZE (GET_MODE (reg))
9743 <= GET_MODE_SIZE (reg_mode[regno]))
9744 && GET_CODE (reg_offset[regno]) == CONST_INT)
9746 /* Try to transform (set (REGX) (CONST_INT A))
9748 (set (REGX) (CONST_INT B))
9750 (set (REGX) (CONST_INT A))
9752 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
9754 if (GET_CODE (src) == CONST_INT && reg_base_reg[regno] < 0)
9756 int success = 0;
9757 rtx new_src = GEN_INT (INTVAL (src)
9758 - INTVAL (reg_offset[regno]));
9759 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
9760 use (set (reg) (reg)) instead.
9761 We don't delete this insn, nor do we convert it into a
9762 note, to avoid losing register notes or the return
9763 value flag. jump2 already knowns how to get rid of
9764 no-op moves. */
9765 if (new_src == const0_rtx)
9766 success = validate_change (insn, &SET_SRC (pat), reg, 0);
9767 else if (rtx_cost (new_src, PLUS) < rtx_cost (src, SET)
9768 && have_add2_insn (GET_MODE (reg)))
9769 success = validate_change (insn, &PATTERN (insn),
9770 gen_add2_insn (reg, new_src), 0);
9771 if (success && reg_death_luid[regno] > reg_set_luid[regno])
9772 remove_death (regno, reg_death[regno]);
9773 reg_set_luid[regno] = move2add_luid;
9774 reg_mode[regno] = GET_MODE (reg);
9775 reg_offset[regno] = src;
9776 continue;
9779 /* Try to transform (set (REGX) (REGY))
9780 (set (REGX) (PLUS (REGX) (CONST_INT A)))
9782 (set (REGX) (REGY))
9783 (set (REGX) (PLUS (REGX) (CONST_INT B)))
9785 (REGX) (REGY))
9786 (set (REGX) (PLUS (REGX) (CONST_INT A)))
9788 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
9789 else if (GET_CODE (src) == REG
9790 && reg_base_reg[regno] == REGNO (src)
9791 && reg_set_luid[regno] > reg_set_luid[REGNO (src)])
9793 rtx next = next_nonnote_insn (insn);
9794 rtx set;
9795 if (next)
9796 set = single_set (next);
9797 if (next
9798 && set
9799 && SET_DEST (set) == reg
9800 && GET_CODE (SET_SRC (set)) == PLUS
9801 && XEXP (SET_SRC (set), 0) == reg
9802 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
9804 rtx src3 = XEXP (SET_SRC (set), 1);
9805 rtx new_src = GEN_INT (INTVAL (src3)
9806 - INTVAL (reg_offset[regno]));
9807 int success = 0;
9809 if (new_src == const0_rtx)
9810 /* See above why we create (set (reg) (reg)) here. */
9811 success
9812 = validate_change (next, &SET_SRC (set), reg, 0);
9813 else if ((rtx_cost (new_src, PLUS)
9814 < 2 + rtx_cost (src3, SET))
9815 && have_add2_insn (GET_MODE (reg)))
9816 success
9817 = validate_change (next, &PATTERN (next),
9818 gen_add2_insn (reg, new_src), 0);
9819 if (success)
9821 if (reg_death_luid[regno] > reg_set_luid[regno])
9822 remove_death (regno, reg_death[regno]);
9823 /* INSN might be the first insn in a basic block
9824 if the preceding insn is a conditional jump
9825 or a possible-throwing call. */
9826 PUT_CODE (insn, NOTE);
9827 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
9828 NOTE_SOURCE_FILE (insn) = 0;
9830 insn = next;
9831 reg_set_luid[regno] = move2add_luid;
9832 reg_mode[regno] = GET_MODE (reg);
9833 reg_offset[regno] = src3;
9834 continue;
9840 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
9842 if (REG_NOTE_KIND (note) == REG_INC
9843 && GET_CODE (XEXP (note, 0)) == REG)
9845 /* Indicate that this register has been recently written to,
9846 but the exact contents are not available. */
9847 int regno = REGNO (XEXP (note, 0));
9848 if (regno < FIRST_PSEUDO_REGISTER)
9850 reg_set_luid[regno] = move2add_luid;
9851 reg_offset[regno] = note;
9854 /* Remember any REG_DEAD notes so that we can remove them
9855 later if necessary. */
9856 else if (REG_NOTE_KIND (note) == REG_DEAD
9857 && GET_CODE (XEXP (note, 0)) == REG)
9859 int regno = REGNO (XEXP (note, 0));
9860 if (regno < FIRST_PSEUDO_REGISTER)
9862 reg_death[regno] = insn;
9863 reg_death_luid[regno] = move2add_luid;
9867 note_stores (PATTERN (insn), move2add_note_store);
9868 /* If this is a CALL_INSN, all call used registers are stored with
9869 unknown values. */
9870 if (GET_CODE (insn) == CALL_INSN)
9872 for (i = FIRST_PSEUDO_REGISTER-1; i >= 0; i--)
9874 if (call_used_regs[i])
9876 reg_set_luid[i] = move2add_luid;
9877 reg_offset[i] = insn; /* Invalidate contents. */
9884 /* SET is a SET or CLOBBER that sets DST.
9885 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
9886 Called from reload_cse_move2add via note_stores. */
9887 static void
9888 move2add_note_store (dst, set)
9889 rtx dst, set;
9891 int regno = 0;
9892 int i;
9894 enum machine_mode mode = GET_MODE (dst);
9895 if (GET_CODE (dst) == SUBREG)
9897 regno = SUBREG_WORD (dst);
9898 dst = SUBREG_REG (dst);
9900 if (GET_CODE (dst) != REG)
9901 return;
9903 regno += REGNO (dst);
9905 if (HARD_REGNO_NREGS (regno, mode) == 1 && GET_CODE (set) == SET)
9907 rtx src = SET_SRC (set);
9909 reg_mode[regno] = mode;
9910 switch (GET_CODE (src))
9912 case PLUS:
9914 rtx src0 = XEXP (src, 0);
9915 if (GET_CODE (src0) == REG)
9917 if (REGNO (src0) != regno
9918 || reg_offset[regno] != const0_rtx)
9920 reg_base_reg[regno] = REGNO (src0);
9921 reg_set_luid[regno] = move2add_luid;
9923 reg_offset[regno] = XEXP (src, 1);
9924 break;
9926 reg_set_luid[regno] = move2add_luid;
9927 reg_offset[regno] = set; /* Invalidate contents. */
9928 break;
9931 case REG:
9932 reg_base_reg[regno] = REGNO (SET_SRC (set));
9933 reg_offset[regno] = const0_rtx;
9934 reg_set_luid[regno] = move2add_luid;
9935 break;
9937 default:
9938 reg_base_reg[regno] = -1;
9939 reg_offset[regno] = SET_SRC (set);
9940 reg_set_luid[regno] = move2add_luid;
9941 break;
9944 else
9946 for (i = regno + HARD_REGNO_NREGS (regno, mode) - 1; i >= regno; i--)
9948 /* Indicate that this register has been recently written to,
9949 but the exact contents are not available. */
9950 reg_set_luid[i] = move2add_luid;
9951 reg_offset[i] = dst;