Do not do src->dest copy if register would not be allocated a normal register
[official-gcc.git] / gcc / reload1.c
blob23ad43933e9b60ca775e29ad3b8e3293ef767787
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "obstack.h"
26 #include "insn-config.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
29 #include "flags.h"
30 #include "expr.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "reload.h"
34 #include "recog.h"
35 #include "basic-block.h"
36 #include "output.h"
37 #include "real.h"
38 #include "toplev.h"
40 /* This file contains the reload pass of the compiler, which is
41 run after register allocation has been done. It checks that
42 each insn is valid (operands required to be in registers really
43 are in registers of the proper class) and fixes up invalid ones
44 by copying values temporarily into registers for the insns
45 that need them.
47 The results of register allocation are described by the vector
48 reg_renumber; the insns still contain pseudo regs, but reg_renumber
49 can be used to find which hard reg, if any, a pseudo reg is in.
51 The technique we always use is to free up a few hard regs that are
52 called ``reload regs'', and for each place where a pseudo reg
53 must be in a hard reg, copy it temporarily into one of the reload regs.
55 All the pseudos that were formerly allocated to the hard regs that
56 are now in use as reload regs must be ``spilled''. This means
57 that they go to other hard regs, or to stack slots if no other
58 available hard regs can be found. Spilling can invalidate more
59 insns, requiring additional need for reloads, so we must keep checking
60 until the process stabilizes.
62 For machines with different classes of registers, we must keep track
63 of the register class needed for each reload, and make sure that
64 we allocate enough reload registers of each class.
66 The file reload.c contains the code that checks one insn for
67 validity and reports the reloads that it needs. This file
68 is in charge of scanning the entire rtl code, accumulating the
69 reload needs, spilling, assigning reload registers to use for
70 fixing up each insn, and generating the new insns to copy values
71 into the reload registers. */
74 #ifndef REGISTER_MOVE_COST
75 #define REGISTER_MOVE_COST(x, y) 2
76 #endif
78 /* During reload_as_needed, element N contains a REG rtx for the hard reg
79 into which reg N has been reloaded (perhaps for a previous insn). */
80 static rtx *reg_last_reload_reg;
82 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
83 for an output reload that stores into reg N. */
84 static char *reg_has_output_reload;
86 /* Indicates which hard regs are reload-registers for an output reload
87 in the current insn. */
88 static HARD_REG_SET reg_is_output_reload;
90 /* Element N is the constant value to which pseudo reg N is equivalent,
91 or zero if pseudo reg N is not equivalent to a constant.
92 find_reloads looks at this in order to replace pseudo reg N
93 with the constant it stands for. */
94 rtx *reg_equiv_constant;
96 /* Element N is a memory location to which pseudo reg N is equivalent,
97 prior to any register elimination (such as frame pointer to stack
98 pointer). Depending on whether or not it is a valid address, this value
99 is transferred to either reg_equiv_address or reg_equiv_mem. */
100 rtx *reg_equiv_memory_loc;
102 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
103 This is used when the address is not valid as a memory address
104 (because its displacement is too big for the machine.) */
105 rtx *reg_equiv_address;
107 /* Element N is the memory slot to which pseudo reg N is equivalent,
108 or zero if pseudo reg N is not equivalent to a memory slot. */
109 rtx *reg_equiv_mem;
111 /* Widest width in which each pseudo reg is referred to (via subreg). */
112 static int *reg_max_ref_width;
114 /* Element N is the insn that initialized reg N from its equivalent
115 constant or memory slot. */
116 static rtx *reg_equiv_init;
118 /* During reload_as_needed, element N contains the last pseudo regno reloaded
119 into hard register N. If that pseudo reg occupied more than one register,
120 reg_reloaded_contents points to that pseudo for each spill register in
121 use; all of these must remain set for an inheritance to occur. */
122 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
124 /* During reload_as_needed, element N contains the insn for which
125 hard register N was last used. Its contents are significant only
126 when reg_reloaded_valid is set for this register. */
127 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
129 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid */
130 static HARD_REG_SET reg_reloaded_valid;
131 /* Indicate if the register was dead at the end of the reload.
132 This is only valid if reg_reloaded_contents is set and valid. */
133 static HARD_REG_SET reg_reloaded_dead;
135 /* Number of spill-regs so far; number of valid elements of spill_regs. */
136 static int n_spills;
138 /* In parallel with spill_regs, contains REG rtx's for those regs.
139 Holds the last rtx used for any given reg, or 0 if it has never
140 been used for spilling yet. This rtx is reused, provided it has
141 the proper mode. */
142 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
144 /* In parallel with spill_regs, contains nonzero for a spill reg
145 that was stored after the last time it was used.
146 The precise value is the insn generated to do the store. */
147 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
149 /* This table is the inverse mapping of spill_regs:
150 indexed by hard reg number,
151 it contains the position of that reg in spill_regs,
152 or -1 for something that is not in spill_regs. */
153 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
155 /* This reg set indicates registers that may not be used for retrying global
156 allocation. The registers that may not be used include all spill registers
157 and the frame pointer (if we are using one). */
158 HARD_REG_SET forbidden_regs;
160 /* This reg set indicates registers that are not good for spill registers.
161 They will not be used to complete groups of spill registers. This includes
162 all fixed registers, registers that may be eliminated, and, if
163 SMALL_REGISTER_CLASSES is zero, registers explicitly used in the rtl.
165 (spill_reg_order prevents these registers from being used to start a
166 group.) */
167 static HARD_REG_SET bad_spill_regs;
169 /* Describes order of use of registers for reloading
170 of spilled pseudo-registers. `spills' is the number of
171 elements that are actually valid; new ones are added at the end. */
172 static short spill_regs[FIRST_PSEUDO_REGISTER];
174 /* This reg set indicates those registers that have been used a spill
175 registers. This information is used in reorg.c, to help figure out
176 what registers are live at any point. It is assumed that all spill_regs
177 are dead at every CODE_LABEL. */
179 HARD_REG_SET used_spill_regs;
181 /* Index of last register assigned as a spill register. We allocate in
182 a round-robin fashion. */
184 static int last_spill_reg;
186 /* Describes order of preference for putting regs into spill_regs.
187 Contains the numbers of all the hard regs, in order most preferred first.
188 This order is different for each function.
189 It is set up by order_regs_for_reload.
190 Empty elements at the end contain -1. */
191 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
193 /* 1 for a hard register that appears explicitly in the rtl
194 (for example, function value registers, special registers
195 used by insns, structure value pointer registers). */
196 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
198 /* Indicates if a register was counted against the need for
199 groups. 0 means it can count against max_nongroup instead. */
200 static HARD_REG_SET counted_for_groups;
202 /* Indicates if a register was counted against the need for
203 non-groups. 0 means it can become part of a new group.
204 During choose_reload_regs, 1 here means don't use this reg
205 as part of a group, even if it seems to be otherwise ok. */
206 static HARD_REG_SET counted_for_nongroups;
208 /* Nonzero if indirect addressing is supported on the machine; this means
209 that spilling (REG n) does not require reloading it into a register in
210 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
211 value indicates the level of indirect addressing supported, e.g., two
212 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
213 a hard register. */
215 static char spill_indirect_levels;
217 /* Nonzero if indirect addressing is supported when the innermost MEM is
218 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
219 which these are valid is the same as spill_indirect_levels, above. */
221 char indirect_symref_ok;
223 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
225 char double_reg_address_ok;
227 /* Record the stack slot for each spilled hard register. */
229 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
231 /* Width allocated so far for that stack slot. */
233 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
235 /* Indexed by register class and basic block number, nonzero if there is
236 any need for a spill register of that class in that basic block.
237 The pointer is 0 if we did stupid allocation and don't know
238 the structure of basic blocks. */
240 char *basic_block_needs[N_REG_CLASSES];
242 /* First uid used by insns created by reload in this function.
243 Used in find_equiv_reg. */
244 int reload_first_uid;
246 /* Flag set by local-alloc or global-alloc if anything is live in
247 a call-clobbered reg across calls. */
249 int caller_save_needed;
251 /* The register class to use for a base register when reloading an
252 address. This is normally BASE_REG_CLASS, but it may be different
253 when using SMALL_REGISTER_CLASSES and passing parameters in
254 registers. */
255 enum reg_class reload_address_base_reg_class;
257 /* The register class to use for an index register when reloading an
258 address. This is normally INDEX_REG_CLASS, but it may be different
259 when using SMALL_REGISTER_CLASSES and passing parameters in
260 registers. */
261 enum reg_class reload_address_index_reg_class;
263 /* Set to 1 while reload_as_needed is operating.
264 Required by some machines to handle any generated moves differently. */
266 int reload_in_progress = 0;
268 /* These arrays record the insn_code of insns that may be needed to
269 perform input and output reloads of special objects. They provide a
270 place to pass a scratch register. */
272 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
273 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
275 /* This obstack is used for allocation of rtl during register elimination.
276 The allocated storage can be freed once find_reloads has processed the
277 insn. */
279 struct obstack reload_obstack;
280 char *reload_firstobj;
282 #define obstack_chunk_alloc xmalloc
283 #define obstack_chunk_free free
285 /* List of labels that must never be deleted. */
286 extern rtx forced_labels;
288 /* Allocation number table from global register allocation. */
289 extern int *reg_allocno;
291 /* This structure is used to record information about register eliminations.
292 Each array entry describes one possible way of eliminating a register
293 in favor of another. If there is more than one way of eliminating a
294 particular register, the most preferred should be specified first. */
296 static struct elim_table
298 int from; /* Register number to be eliminated. */
299 int to; /* Register number used as replacement. */
300 int initial_offset; /* Initial difference between values. */
301 int can_eliminate; /* Non-zero if this elimination can be done. */
302 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
303 insns made by reload. */
304 int offset; /* Current offset between the two regs. */
305 int max_offset; /* Maximum offset between the two regs. */
306 int previous_offset; /* Offset at end of previous insn. */
307 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
308 rtx from_rtx; /* REG rtx for the register to be eliminated.
309 We cannot simply compare the number since
310 we might then spuriously replace a hard
311 register corresponding to a pseudo
312 assigned to the reg to be eliminated. */
313 rtx to_rtx; /* REG rtx for the replacement. */
314 } reg_eliminate[] =
316 /* If a set of eliminable registers was specified, define the table from it.
317 Otherwise, default to the normal case of the frame pointer being
318 replaced by the stack pointer. */
320 #ifdef ELIMINABLE_REGS
321 ELIMINABLE_REGS;
322 #else
323 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
324 #endif
326 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
328 /* Record the number of pending eliminations that have an offset not equal
329 to their initial offset. If non-zero, we use a new copy of each
330 replacement result in any insns encountered. */
331 static int num_not_at_initial_offset;
333 /* Count the number of registers that we may be able to eliminate. */
334 static int num_eliminable;
336 /* For each label, we record the offset of each elimination. If we reach
337 a label by more than one path and an offset differs, we cannot do the
338 elimination. This information is indexed by the number of the label.
339 The first table is an array of flags that records whether we have yet
340 encountered a label and the second table is an array of arrays, one
341 entry in the latter array for each elimination. */
343 static char *offsets_known_at;
344 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
346 /* Number of labels in the current function. */
348 static int num_labels;
350 struct hard_reg_n_uses { int regno; int uses; };
352 static int possible_group_p PROTO((int, int *));
353 static void count_possible_groups PROTO((int *, enum machine_mode *,
354 int *, int));
355 static int modes_equiv_for_class_p PROTO((enum machine_mode,
356 enum machine_mode,
357 enum reg_class));
358 static void spill_failure PROTO((rtx));
359 static int new_spill_reg PROTO((int, int, int *, int *, int,
360 FILE *));
361 static void delete_dead_insn PROTO((rtx));
362 static void alter_reg PROTO((int, int));
363 static void mark_scratch_live PROTO((rtx));
364 static void set_label_offsets PROTO((rtx, rtx, int));
365 static int eliminate_regs_in_insn PROTO((rtx, int));
366 static void mark_not_eliminable PROTO((rtx, rtx));
367 static int spill_hard_reg PROTO((int, int, FILE *, int));
368 static void scan_paradoxical_subregs PROTO((rtx));
369 static int hard_reg_use_compare PROTO((const GENERIC_PTR, const GENERIC_PTR));
370 static void order_regs_for_reload PROTO((int));
371 static int compare_spill_regs PROTO((const GENERIC_PTR, const GENERIC_PTR));
372 static void reload_as_needed PROTO((rtx, int));
373 static void forget_old_reloads_1 PROTO((rtx, rtx));
374 static int reload_reg_class_lower PROTO((const GENERIC_PTR, const GENERIC_PTR));
375 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
376 enum machine_mode));
377 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
378 enum machine_mode));
379 static int reload_reg_free_p PROTO((int, int, enum reload_type));
380 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
381 static int reload_reg_free_for_value_p PROTO((int, int, enum reload_type, rtx, rtx, int));
382 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
383 static int allocate_reload_reg PROTO((int, rtx, int, int));
384 static void choose_reload_regs PROTO((rtx, rtx));
385 static void merge_assigned_reloads PROTO((rtx));
386 static void emit_reload_insns PROTO((rtx));
387 static void delete_output_reload PROTO((rtx, int, rtx));
388 static void inc_for_reload PROTO((rtx, rtx, int));
389 static int constraint_accepts_reg_p PROTO((char *, rtx));
390 static void reload_cse_invalidate_regno PROTO((int, enum machine_mode, int));
391 static int reload_cse_mem_conflict_p PROTO((rtx, rtx));
392 static void reload_cse_invalidate_mem PROTO((rtx));
393 static void reload_cse_invalidate_rtx PROTO((rtx, rtx));
394 static int reload_cse_regno_equal_p PROTO((int, rtx, enum machine_mode));
395 static int reload_cse_noop_set_p PROTO((rtx, rtx));
396 static int reload_cse_simplify_set PROTO((rtx, rtx));
397 static int reload_cse_simplify_operands PROTO((rtx));
398 static void reload_cse_check_clobber PROTO((rtx, rtx));
399 static void reload_cse_record_set PROTO((rtx, rtx));
400 static void reload_cse_delete_death_notes PROTO((rtx));
401 static void reload_cse_no_longer_dead PROTO((int, enum machine_mode));
403 /* Initialize the reload pass once per compilation. */
405 void
406 init_reload ()
408 register int i;
410 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
411 Set spill_indirect_levels to the number of levels such addressing is
412 permitted, zero if it is not permitted at all. */
414 register rtx tem
415 = gen_rtx_MEM (Pmode,
416 gen_rtx_PLUS (Pmode,
417 gen_rtx_REG (Pmode, LAST_VIRTUAL_REGISTER + 1),
418 GEN_INT (4)));
419 spill_indirect_levels = 0;
421 while (memory_address_p (QImode, tem))
423 spill_indirect_levels++;
424 tem = gen_rtx_MEM (Pmode, tem);
427 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
429 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
430 indirect_symref_ok = memory_address_p (QImode, tem);
432 /* See if reg+reg is a valid (and offsettable) address. */
434 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
436 tem = gen_rtx_PLUS (Pmode,
437 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
438 gen_rtx_REG (Pmode, i));
439 /* This way, we make sure that reg+reg is an offsettable address. */
440 tem = plus_constant (tem, 4);
442 if (memory_address_p (QImode, tem))
444 double_reg_address_ok = 1;
445 break;
449 /* Initialize obstack for our rtl allocation. */
450 gcc_obstack_init (&reload_obstack);
451 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
453 /* Decide which register class should be used when reloading
454 addresses. If we are using SMALL_REGISTER_CLASSES, and any
455 parameters are passed in registers, then we do not want to use
456 those registers when reloading an address. Otherwise, if a
457 function argument needs a reload, we may wind up clobbering
458 another argument to the function which was already computed. If
459 we find a subset class which simply avoids those registers, we
460 use it instead. ??? It would be better to only use the
461 restricted class when we actually are loading function arguments,
462 but that is hard to determine. */
463 reload_address_base_reg_class = BASE_REG_CLASS;
464 reload_address_index_reg_class = INDEX_REG_CLASS;
465 if (SMALL_REGISTER_CLASSES)
467 int regno;
468 HARD_REG_SET base, index;
469 enum reg_class *p;
471 COPY_HARD_REG_SET (base, reg_class_contents[BASE_REG_CLASS]);
472 COPY_HARD_REG_SET (index, reg_class_contents[INDEX_REG_CLASS]);
473 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
475 if (FUNCTION_ARG_REGNO_P (regno))
477 CLEAR_HARD_REG_BIT (base, regno);
478 CLEAR_HARD_REG_BIT (index, regno);
482 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[BASE_REG_CLASS],
483 baseok);
484 for (p = reg_class_subclasses[BASE_REG_CLASS];
485 *p != LIM_REG_CLASSES;
486 p++)
488 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[*p], usebase);
489 continue;
490 usebase:
491 reload_address_base_reg_class = *p;
492 break;
494 baseok:;
496 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[INDEX_REG_CLASS],
497 indexok);
498 for (p = reg_class_subclasses[INDEX_REG_CLASS];
499 *p != LIM_REG_CLASSES;
500 p++)
502 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[*p], useindex);
503 continue;
504 useindex:
505 reload_address_index_reg_class = *p;
506 break;
508 indexok:;
512 /* Main entry point for the reload pass.
514 FIRST is the first insn of the function being compiled.
516 GLOBAL nonzero means we were called from global_alloc
517 and should attempt to reallocate any pseudoregs that we
518 displace from hard regs we will use for reloads.
519 If GLOBAL is zero, we do not have enough information to do that,
520 so any pseudo reg that is spilled must go to the stack.
522 DUMPFILE is the global-reg debugging dump file stream, or 0.
523 If it is nonzero, messages are written to it to describe
524 which registers are seized as reload regs, which pseudo regs
525 are spilled from them, and where the pseudo regs are reallocated to.
527 Return value is nonzero if reload failed
528 and we must not do any more for this function. */
531 reload (first, global, dumpfile)
532 rtx first;
533 int global;
534 FILE *dumpfile;
536 register int class;
537 register int i, j, k;
538 register rtx insn;
539 register struct elim_table *ep;
541 /* The two pointers used to track the true location of the memory used
542 for label offsets. */
543 char *real_known_ptr = NULL_PTR;
544 int (*real_at_ptr)[NUM_ELIMINABLE_REGS];
546 int something_changed;
547 int something_needs_reloads;
548 int something_needs_elimination;
549 int new_basic_block_needs;
550 enum reg_class caller_save_spill_class = NO_REGS;
551 int caller_save_group_size = 1;
553 /* Nonzero means we couldn't get enough spill regs. */
554 int failure = 0;
556 /* The basic block number currently being processed for INSN. */
557 int this_block;
559 /* Make sure even insns with volatile mem refs are recognizable. */
560 init_recog ();
562 /* Enable find_equiv_reg to distinguish insns made by reload. */
563 reload_first_uid = get_max_uid ();
565 for (i = 0; i < N_REG_CLASSES; i++)
566 basic_block_needs[i] = 0;
568 #ifdef SECONDARY_MEMORY_NEEDED
569 /* Initialize the secondary memory table. */
570 clear_secondary_mem ();
571 #endif
573 /* Remember which hard regs appear explicitly
574 before we merge into `regs_ever_live' the ones in which
575 pseudo regs have been allocated. */
576 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
578 /* We don't have a stack slot for any spill reg yet. */
579 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
580 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
582 /* Initialize the save area information for caller-save, in case some
583 are needed. */
584 init_save_areas ();
586 /* Compute which hard registers are now in use
587 as homes for pseudo registers.
588 This is done here rather than (eg) in global_alloc
589 because this point is reached even if not optimizing. */
590 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
591 mark_home_live (i);
593 /* A function that receives a nonlocal goto must save all call-saved
594 registers. */
595 if (current_function_has_nonlocal_label)
596 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
598 if (! call_used_regs[i] && ! fixed_regs[i])
599 regs_ever_live[i] = 1;
602 for (i = 0; i < scratch_list_length; i++)
603 if (scratch_list[i])
604 mark_scratch_live (scratch_list[i]);
606 /* Make sure that the last insn in the chain
607 is not something that needs reloading. */
608 emit_note (NULL_PTR, NOTE_INSN_DELETED);
610 /* Find all the pseudo registers that didn't get hard regs
611 but do have known equivalent constants or memory slots.
612 These include parameters (known equivalent to parameter slots)
613 and cse'd or loop-moved constant memory addresses.
615 Record constant equivalents in reg_equiv_constant
616 so they will be substituted by find_reloads.
617 Record memory equivalents in reg_mem_equiv so they can
618 be substituted eventually by altering the REG-rtx's. */
620 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
621 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
622 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
623 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
624 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
625 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
626 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
627 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
628 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
629 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
630 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
631 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
633 if (SMALL_REGISTER_CLASSES)
634 CLEAR_HARD_REG_SET (forbidden_regs);
636 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
637 Also find all paradoxical subregs and find largest such for each pseudo.
638 On machines with small register classes, record hard registers that
639 are used for user variables. These can never be used for spills.
640 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
641 caller-saved registers must be marked live. */
643 for (insn = first; insn; insn = NEXT_INSN (insn))
645 rtx set = single_set (insn);
647 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
648 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
649 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
650 if (! call_used_regs[i])
651 regs_ever_live[i] = 1;
653 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
655 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
656 if (note
657 #ifdef LEGITIMATE_PIC_OPERAND_P
658 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
659 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
660 #endif
663 rtx x = XEXP (note, 0);
664 i = REGNO (SET_DEST (set));
665 if (i > LAST_VIRTUAL_REGISTER)
667 if (GET_CODE (x) == MEM)
669 /* If the operand is a PLUS, the MEM may be shared,
670 so make sure we have an unshared copy here. */
671 if (GET_CODE (XEXP (x, 0)) == PLUS)
672 x = copy_rtx (x);
674 reg_equiv_memory_loc[i] = x;
676 else if (CONSTANT_P (x))
678 if (LEGITIMATE_CONSTANT_P (x))
679 reg_equiv_constant[i] = x;
680 else
681 reg_equiv_memory_loc[i]
682 = force_const_mem (GET_MODE (SET_DEST (set)), x);
684 else
685 continue;
687 /* If this register is being made equivalent to a MEM
688 and the MEM is not SET_SRC, the equivalencing insn
689 is one with the MEM as a SET_DEST and it occurs later.
690 So don't mark this insn now. */
691 if (GET_CODE (x) != MEM
692 || rtx_equal_p (SET_SRC (set), x))
693 reg_equiv_init[i] = insn;
698 /* If this insn is setting a MEM from a register equivalent to it,
699 this is the equivalencing insn. */
700 else if (set && GET_CODE (SET_DEST (set)) == MEM
701 && GET_CODE (SET_SRC (set)) == REG
702 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
703 && rtx_equal_p (SET_DEST (set),
704 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
705 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
707 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
708 scan_paradoxical_subregs (PATTERN (insn));
711 /* Does this function require a frame pointer? */
713 frame_pointer_needed = (! flag_omit_frame_pointer
714 #ifdef EXIT_IGNORE_STACK
715 /* ?? If EXIT_IGNORE_STACK is set, we will not save
716 and restore sp for alloca. So we can't eliminate
717 the frame pointer in that case. At some point,
718 we should improve this by emitting the
719 sp-adjusting insns for this case. */
720 || (current_function_calls_alloca
721 && EXIT_IGNORE_STACK)
722 #endif
723 || FRAME_POINTER_REQUIRED);
725 num_eliminable = 0;
727 /* Initialize the table of registers to eliminate. The way we do this
728 depends on how the eliminable registers were defined. */
729 #ifdef ELIMINABLE_REGS
730 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
732 ep->can_eliminate = ep->can_eliminate_previous
733 = (CAN_ELIMINATE (ep->from, ep->to)
734 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
736 #else
737 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
738 = ! frame_pointer_needed;
739 #endif
741 /* Count the number of eliminable registers and build the FROM and TO
742 REG rtx's. Note that code in gen_rtx will cause, e.g.,
743 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
744 We depend on this. */
745 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
747 num_eliminable += ep->can_eliminate;
748 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
749 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
752 num_labels = max_label_num () - get_first_label_num ();
754 /* Allocate the tables used to store offset information at labels. */
755 /* We used to use alloca here, but the size of what it would try to
756 allocate would occasionally cause it to exceed the stack limit and
757 cause a core dump. */
758 real_known_ptr = xmalloc (num_labels);
759 real_at_ptr
760 = (int (*)[NUM_ELIMINABLE_REGS])
761 xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
763 offsets_known_at = real_known_ptr - get_first_label_num ();
764 offsets_at
765 = (int (*)[NUM_ELIMINABLE_REGS]) (real_at_ptr - get_first_label_num ());
767 /* Alter each pseudo-reg rtx to contain its hard reg number.
768 Assign stack slots to the pseudos that lack hard regs or equivalents.
769 Do not touch virtual registers. */
771 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
772 alter_reg (i, -1);
774 /* If we have some registers we think can be eliminated, scan all insns to
775 see if there is an insn that sets one of these registers to something
776 other than itself plus a constant. If so, the register cannot be
777 eliminated. Doing this scan here eliminates an extra pass through the
778 main reload loop in the most common case where register elimination
779 cannot be done. */
780 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
781 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
782 || GET_CODE (insn) == CALL_INSN)
783 note_stores (PATTERN (insn), mark_not_eliminable);
785 #ifndef REGISTER_CONSTRAINTS
786 /* If all the pseudo regs have hard regs,
787 except for those that are never referenced,
788 we know that no reloads are needed. */
789 /* But that is not true if there are register constraints, since
790 in that case some pseudos might be in the wrong kind of hard reg. */
792 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
793 if (reg_renumber[i] == -1 && REG_N_REFS (i) != 0)
794 break;
796 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
798 free (real_known_ptr);
799 free (real_at_ptr);
800 return;
802 #endif
804 /* Compute the order of preference for hard registers to spill.
805 Store them by decreasing preference in potential_reload_regs. */
807 order_regs_for_reload (global);
809 /* So far, no hard regs have been spilled. */
810 n_spills = 0;
811 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
812 spill_reg_order[i] = -1;
814 /* Initialize to -1, which means take the first spill register. */
815 last_spill_reg = -1;
817 /* On most machines, we can't use any register explicitly used in the
818 rtl as a spill register. But on some, we have to. Those will have
819 taken care to keep the life of hard regs as short as possible. */
821 if (! SMALL_REGISTER_CLASSES)
822 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
824 /* Spill any hard regs that we know we can't eliminate. */
825 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
826 if (! ep->can_eliminate)
827 spill_hard_reg (ep->from, global, dumpfile, 1);
829 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
830 if (frame_pointer_needed)
831 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
832 #endif
834 if (global)
835 for (i = 0; i < N_REG_CLASSES; i++)
837 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
838 bzero (basic_block_needs[i], n_basic_blocks);
841 /* From now on, we need to emit any moves without making new pseudos. */
842 reload_in_progress = 1;
844 /* This loop scans the entire function each go-round
845 and repeats until one repetition spills no additional hard regs. */
847 /* This flag is set when a pseudo reg is spilled,
848 to require another pass. Note that getting an additional reload
849 reg does not necessarily imply any pseudo reg was spilled;
850 sometimes we find a reload reg that no pseudo reg was allocated in. */
851 something_changed = 1;
852 /* This flag is set if there are any insns that require reloading. */
853 something_needs_reloads = 0;
854 /* This flag is set if there are any insns that require register
855 eliminations. */
856 something_needs_elimination = 0;
857 while (something_changed)
859 rtx after_call = 0;
861 /* For each class, number of reload regs needed in that class.
862 This is the maximum over all insns of the needs in that class
863 of the individual insn. */
864 int max_needs[N_REG_CLASSES];
865 /* For each class, size of group of consecutive regs
866 that is needed for the reloads of this class. */
867 int group_size[N_REG_CLASSES];
868 /* For each class, max number of consecutive groups needed.
869 (Each group contains group_size[CLASS] consecutive registers.) */
870 int max_groups[N_REG_CLASSES];
871 /* For each class, max number needed of regs that don't belong
872 to any of the groups. */
873 int max_nongroups[N_REG_CLASSES];
874 /* For each class, the machine mode which requires consecutive
875 groups of regs of that class.
876 If two different modes ever require groups of one class,
877 they must be the same size and equally restrictive for that class,
878 otherwise we can't handle the complexity. */
879 enum machine_mode group_mode[N_REG_CLASSES];
880 /* Record the insn where each maximum need is first found. */
881 rtx max_needs_insn[N_REG_CLASSES];
882 rtx max_groups_insn[N_REG_CLASSES];
883 rtx max_nongroups_insn[N_REG_CLASSES];
884 rtx x;
885 HOST_WIDE_INT starting_frame_size;
886 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
887 int previous_frame_pointer_needed = frame_pointer_needed;
888 #endif
889 static char *reg_class_names[] = REG_CLASS_NAMES;
891 something_changed = 0;
892 bzero ((char *) max_needs, sizeof max_needs);
893 bzero ((char *) max_groups, sizeof max_groups);
894 bzero ((char *) max_nongroups, sizeof max_nongroups);
895 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
896 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
897 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
898 bzero ((char *) group_size, sizeof group_size);
899 for (i = 0; i < N_REG_CLASSES; i++)
900 group_mode[i] = VOIDmode;
902 /* Keep track of which basic blocks are needing the reloads. */
903 this_block = 0;
905 /* Remember whether any element of basic_block_needs
906 changes from 0 to 1 in this pass. */
907 new_basic_block_needs = 0;
909 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done
910 here because the stack size may be a part of the offset computation
911 for register elimination, and there might have been new stack slots
912 created in the last iteration of this loop. */
913 assign_stack_local (BLKmode, 0, 0);
915 starting_frame_size = get_frame_size ();
917 /* Reset all offsets on eliminable registers to their initial values. */
918 #ifdef ELIMINABLE_REGS
919 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
921 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
922 ep->previous_offset = ep->offset
923 = ep->max_offset = ep->initial_offset;
925 #else
926 #ifdef INITIAL_FRAME_POINTER_OFFSET
927 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
928 #else
929 if (!FRAME_POINTER_REQUIRED)
930 abort ();
931 reg_eliminate[0].initial_offset = 0;
932 #endif
933 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
934 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
935 #endif
937 num_not_at_initial_offset = 0;
939 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
941 /* Set a known offset for each forced label to be at the initial offset
942 of each elimination. We do this because we assume that all
943 computed jumps occur from a location where each elimination is
944 at its initial offset. */
946 for (x = forced_labels; x; x = XEXP (x, 1))
947 if (XEXP (x, 0))
948 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
950 /* For each pseudo register that has an equivalent location defined,
951 try to eliminate any eliminable registers (such as the frame pointer)
952 assuming initial offsets for the replacement register, which
953 is the normal case.
955 If the resulting location is directly addressable, substitute
956 the MEM we just got directly for the old REG.
958 If it is not addressable but is a constant or the sum of a hard reg
959 and constant, it is probably not addressable because the constant is
960 out of range, in that case record the address; we will generate
961 hairy code to compute the address in a register each time it is
962 needed. Similarly if it is a hard register, but one that is not
963 valid as an address register.
965 If the location is not addressable, but does not have one of the
966 above forms, assign a stack slot. We have to do this to avoid the
967 potential of producing lots of reloads if, e.g., a location involves
968 a pseudo that didn't get a hard register and has an equivalent memory
969 location that also involves a pseudo that didn't get a hard register.
971 Perhaps at some point we will improve reload_when_needed handling
972 so this problem goes away. But that's very hairy. */
974 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
975 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
977 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
979 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
980 XEXP (x, 0)))
981 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
982 else if (CONSTANT_P (XEXP (x, 0))
983 || (GET_CODE (XEXP (x, 0)) == REG
984 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
985 || (GET_CODE (XEXP (x, 0)) == PLUS
986 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
987 && (REGNO (XEXP (XEXP (x, 0), 0))
988 < FIRST_PSEUDO_REGISTER)
989 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
990 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
991 else
993 /* Make a new stack slot. Then indicate that something
994 changed so we go back and recompute offsets for
995 eliminable registers because the allocation of memory
996 below might change some offset. reg_equiv_{mem,address}
997 will be set up for this pseudo on the next pass around
998 the loop. */
999 reg_equiv_memory_loc[i] = 0;
1000 reg_equiv_init[i] = 0;
1001 alter_reg (i, -1);
1002 something_changed = 1;
1006 /* If we allocated another pseudo to the stack, redo elimination
1007 bookkeeping. */
1008 if (something_changed)
1009 continue;
1011 /* If caller-saves needs a group, initialize the group to include
1012 the size and mode required for caller-saves. */
1014 if (caller_save_group_size > 1)
1016 group_mode[(int) caller_save_spill_class] = Pmode;
1017 group_size[(int) caller_save_spill_class] = caller_save_group_size;
1020 /* Compute the most additional registers needed by any instruction.
1021 Collect information separately for each class of regs. */
1023 for (insn = first; insn; insn = NEXT_INSN (insn))
1025 if (global && this_block + 1 < n_basic_blocks
1026 && insn == basic_block_head[this_block+1])
1027 ++this_block;
1029 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
1030 might include REG_LABEL), we need to see what effects this
1031 has on the known offsets at labels. */
1033 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1034 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1035 && REG_NOTES (insn) != 0))
1036 set_label_offsets (insn, insn, 0);
1038 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1040 /* Nonzero means don't use a reload reg that overlaps
1041 the place where a function value can be returned. */
1042 rtx avoid_return_reg = 0;
1044 rtx old_body = PATTERN (insn);
1045 int old_code = INSN_CODE (insn);
1046 rtx old_notes = REG_NOTES (insn);
1047 int did_elimination = 0;
1049 /* To compute the number of reload registers of each class
1050 needed for an insn, we must simulate what choose_reload_regs
1051 can do. We do this by splitting an insn into an "input" and
1052 an "output" part. RELOAD_OTHER reloads are used in both.
1053 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
1054 which must be live over the entire input section of reloads,
1055 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
1056 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
1057 inputs.
1059 The registers needed for output are RELOAD_OTHER and
1060 RELOAD_FOR_OUTPUT, which are live for the entire output
1061 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
1062 reloads for each operand.
1064 The total number of registers needed is the maximum of the
1065 inputs and outputs. */
1067 struct needs
1069 /* [0] is normal, [1] is nongroup. */
1070 int regs[2][N_REG_CLASSES];
1071 int groups[N_REG_CLASSES];
1074 /* Each `struct needs' corresponds to one RELOAD_... type. */
1075 struct {
1076 struct needs other;
1077 struct needs input;
1078 struct needs output;
1079 struct needs insn;
1080 struct needs other_addr;
1081 struct needs op_addr;
1082 struct needs op_addr_reload;
1083 struct needs in_addr[MAX_RECOG_OPERANDS];
1084 struct needs in_addr_addr[MAX_RECOG_OPERANDS];
1085 struct needs out_addr[MAX_RECOG_OPERANDS];
1086 struct needs out_addr_addr[MAX_RECOG_OPERANDS];
1087 } insn_needs;
1089 /* If needed, eliminate any eliminable registers. */
1090 if (num_eliminable)
1091 did_elimination = eliminate_regs_in_insn (insn, 0);
1093 /* Set avoid_return_reg if this is an insn
1094 that might use the value of a function call. */
1095 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
1097 if (GET_CODE (PATTERN (insn)) == SET)
1098 after_call = SET_DEST (PATTERN (insn));
1099 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1100 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1101 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1102 else
1103 after_call = 0;
1105 else if (SMALL_REGISTER_CLASSES && after_call != 0
1106 && !(GET_CODE (PATTERN (insn)) == SET
1107 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx)
1108 && GET_CODE (PATTERN (insn)) != USE)
1110 if (reg_referenced_p (after_call, PATTERN (insn)))
1111 avoid_return_reg = after_call;
1112 after_call = 0;
1115 /* Analyze the instruction. */
1116 find_reloads (insn, 0, spill_indirect_levels, global,
1117 spill_reg_order);
1119 /* Remember for later shortcuts which insns had any reloads or
1120 register eliminations.
1122 One might think that it would be worthwhile to mark insns
1123 that need register replacements but not reloads, but this is
1124 not safe because find_reloads may do some manipulation of
1125 the insn (such as swapping commutative operands), which would
1126 be lost when we restore the old pattern after register
1127 replacement. So the actions of find_reloads must be redone in
1128 subsequent passes or in reload_as_needed.
1130 However, it is safe to mark insns that need reloads
1131 but not register replacement. */
1133 PUT_MODE (insn, (did_elimination ? QImode
1134 : n_reloads ? HImode
1135 : GET_MODE (insn) == DImode ? DImode
1136 : VOIDmode));
1138 /* Discard any register replacements done. */
1139 if (did_elimination)
1141 obstack_free (&reload_obstack, reload_firstobj);
1142 PATTERN (insn) = old_body;
1143 INSN_CODE (insn) = old_code;
1144 REG_NOTES (insn) = old_notes;
1145 something_needs_elimination = 1;
1148 /* If this insn has no reloads, we need not do anything except
1149 in the case of a CALL_INSN when we have caller-saves and
1150 caller-save needs reloads. */
1152 if (n_reloads == 0
1153 && ! (GET_CODE (insn) == CALL_INSN
1154 && caller_save_spill_class != NO_REGS))
1155 continue;
1157 something_needs_reloads = 1;
1158 bzero ((char *) &insn_needs, sizeof insn_needs);
1160 /* Count each reload once in every class
1161 containing the reload's own class. */
1163 for (i = 0; i < n_reloads; i++)
1165 register enum reg_class *p;
1166 enum reg_class class = reload_reg_class[i];
1167 int size;
1168 enum machine_mode mode;
1169 struct needs *this_needs;
1171 /* Don't count the dummy reloads, for which one of the
1172 regs mentioned in the insn can be used for reloading.
1173 Don't count optional reloads.
1174 Don't count reloads that got combined with others. */
1175 if (reload_reg_rtx[i] != 0
1176 || reload_optional[i] != 0
1177 || (reload_out[i] == 0 && reload_in[i] == 0
1178 && ! reload_secondary_p[i]))
1179 continue;
1181 /* Show that a reload register of this class is needed
1182 in this basic block. We do not use insn_needs and
1183 insn_groups because they are overly conservative for
1184 this purpose. */
1185 if (global && ! basic_block_needs[(int) class][this_block])
1187 basic_block_needs[(int) class][this_block] = 1;
1188 new_basic_block_needs = 1;
1191 mode = reload_inmode[i];
1192 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1193 mode = reload_outmode[i];
1194 size = CLASS_MAX_NREGS (class, mode);
1196 /* Decide which time-of-use to count this reload for. */
1197 switch (reload_when_needed[i])
1199 case RELOAD_OTHER:
1200 this_needs = &insn_needs.other;
1201 break;
1202 case RELOAD_FOR_INPUT:
1203 this_needs = &insn_needs.input;
1204 break;
1205 case RELOAD_FOR_OUTPUT:
1206 this_needs = &insn_needs.output;
1207 break;
1208 case RELOAD_FOR_INSN:
1209 this_needs = &insn_needs.insn;
1210 break;
1211 case RELOAD_FOR_OTHER_ADDRESS:
1212 this_needs = &insn_needs.other_addr;
1213 break;
1214 case RELOAD_FOR_INPUT_ADDRESS:
1215 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1216 break;
1217 case RELOAD_FOR_INPADDR_ADDRESS:
1218 this_needs = &insn_needs.in_addr_addr[reload_opnum[i]];
1219 break;
1220 case RELOAD_FOR_OUTPUT_ADDRESS:
1221 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1222 break;
1223 case RELOAD_FOR_OUTADDR_ADDRESS:
1224 this_needs = &insn_needs.out_addr_addr[reload_opnum[i]];
1225 break;
1226 case RELOAD_FOR_OPERAND_ADDRESS:
1227 this_needs = &insn_needs.op_addr;
1228 break;
1229 case RELOAD_FOR_OPADDR_ADDR:
1230 this_needs = &insn_needs.op_addr_reload;
1231 break;
1234 if (size > 1)
1236 enum machine_mode other_mode, allocate_mode;
1238 /* Count number of groups needed separately from
1239 number of individual regs needed. */
1240 this_needs->groups[(int) class]++;
1241 p = reg_class_superclasses[(int) class];
1242 while (*p != LIM_REG_CLASSES)
1243 this_needs->groups[(int) *p++]++;
1245 /* Record size and mode of a group of this class. */
1246 /* If more than one size group is needed,
1247 make all groups the largest needed size. */
1248 if (group_size[(int) class] < size)
1250 other_mode = group_mode[(int) class];
1251 allocate_mode = mode;
1253 group_size[(int) class] = size;
1254 group_mode[(int) class] = mode;
1256 else
1258 other_mode = mode;
1259 allocate_mode = group_mode[(int) class];
1262 /* Crash if two dissimilar machine modes both need
1263 groups of consecutive regs of the same class. */
1265 if (other_mode != VOIDmode && other_mode != allocate_mode
1266 && ! modes_equiv_for_class_p (allocate_mode,
1267 other_mode, class))
1268 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1269 insn);
1271 else if (size == 1)
1273 this_needs->regs[reload_nongroup[i]][(int) class] += 1;
1274 p = reg_class_superclasses[(int) class];
1275 while (*p != LIM_REG_CLASSES)
1276 this_needs->regs[reload_nongroup[i]][(int) *p++] += 1;
1278 else
1279 abort ();
1282 /* All reloads have been counted for this insn;
1283 now merge the various times of use.
1284 This sets insn_needs, etc., to the maximum total number
1285 of registers needed at any point in this insn. */
1287 for (i = 0; i < N_REG_CLASSES; i++)
1289 int in_max, out_max;
1291 /* Compute normal and nongroup needs. */
1292 for (j = 0; j <= 1; j++)
1294 for (in_max = 0, out_max = 0, k = 0;
1295 k < reload_n_operands; k++)
1297 in_max
1298 = MAX (in_max,
1299 (insn_needs.in_addr[k].regs[j][i]
1300 + insn_needs.in_addr_addr[k].regs[j][i]));
1301 out_max
1302 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1303 out_max
1304 = MAX (out_max,
1305 insn_needs.out_addr_addr[k].regs[j][i]);
1308 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1309 and operand addresses but not things used to reload
1310 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1311 don't conflict with things needed to reload inputs or
1312 outputs. */
1314 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1315 insn_needs.op_addr_reload.regs[j][i]),
1316 in_max);
1318 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1320 insn_needs.input.regs[j][i]
1321 = MAX (insn_needs.input.regs[j][i]
1322 + insn_needs.op_addr.regs[j][i]
1323 + insn_needs.insn.regs[j][i],
1324 in_max + insn_needs.input.regs[j][i]);
1326 insn_needs.output.regs[j][i] += out_max;
1327 insn_needs.other.regs[j][i]
1328 += MAX (MAX (insn_needs.input.regs[j][i],
1329 insn_needs.output.regs[j][i]),
1330 insn_needs.other_addr.regs[j][i]);
1334 /* Now compute group needs. */
1335 for (in_max = 0, out_max = 0, j = 0;
1336 j < reload_n_operands; j++)
1338 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1339 in_max = MAX (in_max,
1340 insn_needs.in_addr_addr[j].groups[i]);
1341 out_max
1342 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1343 out_max
1344 = MAX (out_max, insn_needs.out_addr_addr[j].groups[i]);
1347 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1348 insn_needs.op_addr_reload.groups[i]),
1349 in_max);
1350 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1352 insn_needs.input.groups[i]
1353 = MAX (insn_needs.input.groups[i]
1354 + insn_needs.op_addr.groups[i]
1355 + insn_needs.insn.groups[i],
1356 in_max + insn_needs.input.groups[i]);
1358 insn_needs.output.groups[i] += out_max;
1359 insn_needs.other.groups[i]
1360 += MAX (MAX (insn_needs.input.groups[i],
1361 insn_needs.output.groups[i]),
1362 insn_needs.other_addr.groups[i]);
1365 /* If this is a CALL_INSN and caller-saves will need
1366 a spill register, act as if the spill register is
1367 needed for this insn. However, the spill register
1368 can be used by any reload of this insn, so we only
1369 need do something if no need for that class has
1370 been recorded.
1372 The assumption that every CALL_INSN will trigger a
1373 caller-save is highly conservative, however, the number
1374 of cases where caller-saves will need a spill register but
1375 a block containing a CALL_INSN won't need a spill register
1376 of that class should be quite rare.
1378 If a group is needed, the size and mode of the group will
1379 have been set up at the beginning of this loop. */
1381 if (GET_CODE (insn) == CALL_INSN
1382 && caller_save_spill_class != NO_REGS)
1384 /* See if this register would conflict with any reload that
1385 needs a group or any reload that needs a nongroup. */
1386 int nongroup_need = 0;
1387 int *caller_save_needs;
1389 for (j = 0; j < n_reloads; j++)
1390 if (reg_classes_intersect_p (caller_save_spill_class,
1391 reload_reg_class[j])
1392 && ((CLASS_MAX_NREGS
1393 (reload_reg_class[j],
1394 (GET_MODE_SIZE (reload_outmode[j])
1395 > GET_MODE_SIZE (reload_inmode[j]))
1396 ? reload_outmode[j] : reload_inmode[j])
1397 > 1)
1398 || reload_nongroup[j]))
1400 nongroup_need = 1;
1401 break;
1404 caller_save_needs
1405 = (caller_save_group_size > 1
1406 ? insn_needs.other.groups
1407 : insn_needs.other.regs[nongroup_need]);
1409 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1411 register enum reg_class *p
1412 = reg_class_superclasses[(int) caller_save_spill_class];
1414 caller_save_needs[(int) caller_save_spill_class]++;
1416 while (*p != LIM_REG_CLASSES)
1417 caller_save_needs[(int) *p++] += 1;
1420 /* Show that this basic block will need a register of
1421 this class. */
1423 if (global
1424 && ! (basic_block_needs[(int) caller_save_spill_class]
1425 [this_block]))
1427 basic_block_needs[(int) caller_save_spill_class]
1428 [this_block] = 1;
1429 new_basic_block_needs = 1;
1433 /* If this insn stores the value of a function call,
1434 and that value is in a register that has been spilled,
1435 and if the insn needs a reload in a class
1436 that might use that register as the reload register,
1437 then add an extra need in that class.
1438 This makes sure we have a register available that does
1439 not overlap the return value. */
1441 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
1443 int regno = REGNO (avoid_return_reg);
1444 int nregs
1445 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1446 int r;
1447 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1449 /* First compute the "basic needs", which counts a
1450 need only in the smallest class in which it
1451 is required. */
1453 bcopy ((char *) insn_needs.other.regs[0],
1454 (char *) basic_needs, sizeof basic_needs);
1455 bcopy ((char *) insn_needs.other.groups,
1456 (char *) basic_groups, sizeof basic_groups);
1458 for (i = 0; i < N_REG_CLASSES; i++)
1460 enum reg_class *p;
1462 if (basic_needs[i] >= 0)
1463 for (p = reg_class_superclasses[i];
1464 *p != LIM_REG_CLASSES; p++)
1465 basic_needs[(int) *p] -= basic_needs[i];
1467 if (basic_groups[i] >= 0)
1468 for (p = reg_class_superclasses[i];
1469 *p != LIM_REG_CLASSES; p++)
1470 basic_groups[(int) *p] -= basic_groups[i];
1473 /* Now count extra regs if there might be a conflict with
1474 the return value register. */
1476 for (r = regno; r < regno + nregs; r++)
1477 if (spill_reg_order[r] >= 0)
1478 for (i = 0; i < N_REG_CLASSES; i++)
1479 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1481 if (basic_needs[i] > 0)
1483 enum reg_class *p;
1485 insn_needs.other.regs[0][i]++;
1486 p = reg_class_superclasses[i];
1487 while (*p != LIM_REG_CLASSES)
1488 insn_needs.other.regs[0][(int) *p++]++;
1490 if (basic_groups[i] > 0)
1492 enum reg_class *p;
1494 insn_needs.other.groups[i]++;
1495 p = reg_class_superclasses[i];
1496 while (*p != LIM_REG_CLASSES)
1497 insn_needs.other.groups[(int) *p++]++;
1502 /* For each class, collect maximum need of any insn. */
1504 for (i = 0; i < N_REG_CLASSES; i++)
1506 if (max_needs[i] < insn_needs.other.regs[0][i])
1508 max_needs[i] = insn_needs.other.regs[0][i];
1509 max_needs_insn[i] = insn;
1511 if (max_groups[i] < insn_needs.other.groups[i])
1513 max_groups[i] = insn_needs.other.groups[i];
1514 max_groups_insn[i] = insn;
1516 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1518 max_nongroups[i] = insn_needs.other.regs[1][i];
1519 max_nongroups_insn[i] = insn;
1523 /* Note that there is a continue statement above. */
1526 /* If we allocated any new memory locations, make another pass
1527 since it might have changed elimination offsets. */
1528 if (starting_frame_size != get_frame_size ())
1529 something_changed = 1;
1531 if (dumpfile)
1532 for (i = 0; i < N_REG_CLASSES; i++)
1534 if (max_needs[i] > 0)
1535 fprintf (dumpfile,
1536 ";; Need %d reg%s of class %s (for insn %d).\n",
1537 max_needs[i], max_needs[i] == 1 ? "" : "s",
1538 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1539 if (max_nongroups[i] > 0)
1540 fprintf (dumpfile,
1541 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1542 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1543 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1544 if (max_groups[i] > 0)
1545 fprintf (dumpfile,
1546 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1547 max_groups[i], max_groups[i] == 1 ? "" : "s",
1548 mode_name[(int) group_mode[i]],
1549 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1552 /* If we have caller-saves, set up the save areas and see if caller-save
1553 will need a spill register. */
1555 if (caller_save_needed)
1557 /* Set the offsets for setup_save_areas. */
1558 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
1559 ep++)
1560 ep->previous_offset = ep->max_offset;
1562 if ( ! setup_save_areas (&something_changed)
1563 && caller_save_spill_class == NO_REGS)
1565 /* The class we will need depends on whether the machine
1566 supports the sum of two registers for an address; see
1567 find_address_reloads for details. */
1569 caller_save_spill_class
1570 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1571 caller_save_group_size
1572 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1573 something_changed = 1;
1577 /* See if anything that happened changes which eliminations are valid.
1578 For example, on the Sparc, whether or not the frame pointer can
1579 be eliminated can depend on what registers have been used. We need
1580 not check some conditions again (such as flag_omit_frame_pointer)
1581 since they can't have changed. */
1583 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1584 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1585 #ifdef ELIMINABLE_REGS
1586 || ! CAN_ELIMINATE (ep->from, ep->to)
1587 #endif
1589 ep->can_eliminate = 0;
1591 /* Look for the case where we have discovered that we can't replace
1592 register A with register B and that means that we will now be
1593 trying to replace register A with register C. This means we can
1594 no longer replace register C with register B and we need to disable
1595 such an elimination, if it exists. This occurs often with A == ap,
1596 B == sp, and C == fp. */
1598 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1600 struct elim_table *op;
1601 register int new_to = -1;
1603 if (! ep->can_eliminate && ep->can_eliminate_previous)
1605 /* Find the current elimination for ep->from, if there is a
1606 new one. */
1607 for (op = reg_eliminate;
1608 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1609 if (op->from == ep->from && op->can_eliminate)
1611 new_to = op->to;
1612 break;
1615 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1616 disable it. */
1617 for (op = reg_eliminate;
1618 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1619 if (op->from == new_to && op->to == ep->to)
1620 op->can_eliminate = 0;
1624 /* See if any registers that we thought we could eliminate the previous
1625 time are no longer eliminable. If so, something has changed and we
1626 must spill the register. Also, recompute the number of eliminable
1627 registers and see if the frame pointer is needed; it is if there is
1628 no elimination of the frame pointer that we can perform. */
1630 frame_pointer_needed = 1;
1631 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1633 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1634 && ep->to != HARD_FRAME_POINTER_REGNUM)
1635 frame_pointer_needed = 0;
1637 if (! ep->can_eliminate && ep->can_eliminate_previous)
1639 ep->can_eliminate_previous = 0;
1640 spill_hard_reg (ep->from, global, dumpfile, 1);
1641 something_changed = 1;
1642 num_eliminable--;
1646 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1647 /* If we didn't need a frame pointer last time, but we do now, spill
1648 the hard frame pointer. */
1649 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1651 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1652 something_changed = 1;
1654 #endif
1656 /* If all needs are met, we win. */
1658 for (i = 0; i < N_REG_CLASSES; i++)
1659 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1660 break;
1661 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1662 break;
1664 /* Not all needs are met; must spill some hard regs. */
1666 /* Put all registers spilled so far back in potential_reload_regs, but
1667 put them at the front, since we've already spilled most of the
1668 pseudos in them (we might have left some pseudos unspilled if they
1669 were in a block that didn't need any spill registers of a conflicting
1670 class. We used to try to mark off the need for those registers,
1671 but doing so properly is very complex and reallocating them is the
1672 simpler approach. First, "pack" potential_reload_regs by pushing
1673 any nonnegative entries towards the end. That will leave room
1674 for the registers we already spilled.
1676 Also, undo the marking of the spill registers from the last time
1677 around in FORBIDDEN_REGS since we will be probably be allocating
1678 them again below.
1680 ??? It is theoretically possible that we might end up not using one
1681 of our previously-spilled registers in this allocation, even though
1682 they are at the head of the list. It's not clear what to do about
1683 this, but it was no better before, when we marked off the needs met
1684 by the previously-spilled registers. With the current code, globals
1685 can be allocated into these registers, but locals cannot. */
1687 if (n_spills)
1689 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1690 if (potential_reload_regs[i] != -1)
1691 potential_reload_regs[j--] = potential_reload_regs[i];
1693 for (i = 0; i < n_spills; i++)
1695 potential_reload_regs[i] = spill_regs[i];
1696 spill_reg_order[spill_regs[i]] = -1;
1697 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1700 n_spills = 0;
1703 /* Now find more reload regs to satisfy the remaining need
1704 Do it by ascending class number, since otherwise a reg
1705 might be spilled for a big class and might fail to count
1706 for a smaller class even though it belongs to that class.
1708 Count spilled regs in `spills', and add entries to
1709 `spill_regs' and `spill_reg_order'.
1711 ??? Note there is a problem here.
1712 When there is a need for a group in a high-numbered class,
1713 and also need for non-group regs that come from a lower class,
1714 the non-group regs are chosen first. If there aren't many regs,
1715 they might leave no room for a group.
1717 This was happening on the 386. To fix it, we added the code
1718 that calls possible_group_p, so that the lower class won't
1719 break up the last possible group.
1721 Really fixing the problem would require changes above
1722 in counting the regs already spilled, and in choose_reload_regs.
1723 It might be hard to avoid introducing bugs there. */
1725 CLEAR_HARD_REG_SET (counted_for_groups);
1726 CLEAR_HARD_REG_SET (counted_for_nongroups);
1728 for (class = 0; class < N_REG_CLASSES; class++)
1730 /* First get the groups of registers.
1731 If we got single registers first, we might fragment
1732 possible groups. */
1733 while (max_groups[class] > 0)
1735 /* If any single spilled regs happen to form groups,
1736 count them now. Maybe we don't really need
1737 to spill another group. */
1738 count_possible_groups (group_size, group_mode, max_groups,
1739 class);
1741 if (max_groups[class] <= 0)
1742 break;
1744 /* Groups of size 2 (the only groups used on most machines)
1745 are treated specially. */
1746 if (group_size[class] == 2)
1748 /* First, look for a register that will complete a group. */
1749 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1751 int other;
1753 j = potential_reload_regs[i];
1754 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1756 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1757 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1758 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1759 && HARD_REGNO_MODE_OK (other, group_mode[class])
1760 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1761 other)
1762 /* We don't want one part of another group.
1763 We could get "two groups" that overlap! */
1764 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1766 (j < FIRST_PSEUDO_REGISTER - 1
1767 && (other = j + 1, spill_reg_order[other] >= 0)
1768 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1769 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1770 && HARD_REGNO_MODE_OK (j, group_mode[class])
1771 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1772 other)
1773 && ! TEST_HARD_REG_BIT (counted_for_groups,
1774 other))))
1776 register enum reg_class *p;
1778 /* We have found one that will complete a group,
1779 so count off one group as provided. */
1780 max_groups[class]--;
1781 p = reg_class_superclasses[class];
1782 while (*p != LIM_REG_CLASSES)
1784 if (group_size [(int) *p] <= group_size [class])
1785 max_groups[(int) *p]--;
1786 p++;
1789 /* Indicate both these regs are part of a group. */
1790 SET_HARD_REG_BIT (counted_for_groups, j);
1791 SET_HARD_REG_BIT (counted_for_groups, other);
1792 break;
1795 /* We can't complete a group, so start one. */
1796 /* Look for a pair neither of which is explicitly used. */
1797 if (SMALL_REGISTER_CLASSES && i == FIRST_PSEUDO_REGISTER)
1798 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1800 int k;
1801 j = potential_reload_regs[i];
1802 /* Verify that J+1 is a potential reload reg. */
1803 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1804 if (potential_reload_regs[k] == j + 1)
1805 break;
1806 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1807 && k < FIRST_PSEUDO_REGISTER
1808 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1809 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1810 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1811 && HARD_REGNO_MODE_OK (j, group_mode[class])
1812 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1813 j + 1)
1814 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1815 /* Reject J at this stage
1816 if J+1 was explicitly used. */
1817 && ! regs_explicitly_used[j + 1])
1818 break;
1820 /* Now try any group at all
1821 whose registers are not in bad_spill_regs. */
1822 if (i == FIRST_PSEUDO_REGISTER)
1823 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1825 int k;
1826 j = potential_reload_regs[i];
1827 /* Verify that J+1 is a potential reload reg. */
1828 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1829 if (potential_reload_regs[k] == j + 1)
1830 break;
1831 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1832 && k < FIRST_PSEUDO_REGISTER
1833 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1834 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1835 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1836 && HARD_REGNO_MODE_OK (j, group_mode[class])
1837 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1838 j + 1)
1839 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1840 break;
1843 /* I should be the index in potential_reload_regs
1844 of the new reload reg we have found. */
1846 if (i >= FIRST_PSEUDO_REGISTER)
1848 /* There are no groups left to spill. */
1849 spill_failure (max_groups_insn[class]);
1850 failure = 1;
1851 goto failed;
1853 else
1854 something_changed
1855 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1856 global, dumpfile);
1858 else
1860 /* For groups of more than 2 registers,
1861 look for a sufficient sequence of unspilled registers,
1862 and spill them all at once. */
1863 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1865 int k;
1867 j = potential_reload_regs[i];
1868 if (j >= 0
1869 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1870 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1872 /* Check each reg in the sequence. */
1873 for (k = 0; k < group_size[class]; k++)
1874 if (! (spill_reg_order[j + k] < 0
1875 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1876 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1877 break;
1878 /* We got a full sequence, so spill them all. */
1879 if (k == group_size[class])
1881 register enum reg_class *p;
1882 for (k = 0; k < group_size[class]; k++)
1884 int idx;
1885 SET_HARD_REG_BIT (counted_for_groups, j + k);
1886 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1887 if (potential_reload_regs[idx] == j + k)
1888 break;
1889 something_changed
1890 |= new_spill_reg (idx, class,
1891 max_needs, NULL_PTR,
1892 global, dumpfile);
1895 /* We have found one that will complete a group,
1896 so count off one group as provided. */
1897 max_groups[class]--;
1898 p = reg_class_superclasses[class];
1899 while (*p != LIM_REG_CLASSES)
1901 if (group_size [(int) *p]
1902 <= group_size [class])
1903 max_groups[(int) *p]--;
1904 p++;
1906 break;
1910 /* We couldn't find any registers for this reload.
1911 Avoid going into an infinite loop. */
1912 if (i >= FIRST_PSEUDO_REGISTER)
1914 /* There are no groups left. */
1915 spill_failure (max_groups_insn[class]);
1916 failure = 1;
1917 goto failed;
1922 /* Now similarly satisfy all need for single registers. */
1924 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1926 /* If we spilled enough regs, but they weren't counted
1927 against the non-group need, see if we can count them now.
1928 If so, we can avoid some actual spilling. */
1929 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1930 for (i = 0; i < n_spills; i++)
1931 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1932 spill_regs[i])
1933 && !TEST_HARD_REG_BIT (counted_for_groups,
1934 spill_regs[i])
1935 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1936 spill_regs[i])
1937 && max_nongroups[class] > 0)
1939 register enum reg_class *p;
1941 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1942 max_nongroups[class]--;
1943 p = reg_class_superclasses[class];
1944 while (*p != LIM_REG_CLASSES)
1945 max_nongroups[(int) *p++]--;
1947 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1948 break;
1950 /* Consider the potential reload regs that aren't
1951 yet in use as reload regs, in order of preference.
1952 Find the most preferred one that's in this class. */
1954 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1955 if (potential_reload_regs[i] >= 0
1956 && TEST_HARD_REG_BIT (reg_class_contents[class],
1957 potential_reload_regs[i])
1958 /* If this reg will not be available for groups,
1959 pick one that does not foreclose possible groups.
1960 This is a kludge, and not very general,
1961 but it should be sufficient to make the 386 work,
1962 and the problem should not occur on machines with
1963 more registers. */
1964 && (max_nongroups[class] == 0
1965 || possible_group_p (potential_reload_regs[i], max_groups)))
1966 break;
1968 /* If we couldn't get a register, try to get one even if we
1969 might foreclose possible groups. This may cause problems
1970 later, but that's better than aborting now, since it is
1971 possible that we will, in fact, be able to form the needed
1972 group even with this allocation. */
1974 if (i >= FIRST_PSEUDO_REGISTER
1975 && (asm_noperands (max_needs[class] > 0
1976 ? max_needs_insn[class]
1977 : max_nongroups_insn[class])
1978 < 0))
1979 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1980 if (potential_reload_regs[i] >= 0
1981 && TEST_HARD_REG_BIT (reg_class_contents[class],
1982 potential_reload_regs[i]))
1983 break;
1985 /* I should be the index in potential_reload_regs
1986 of the new reload reg we have found. */
1988 if (i >= FIRST_PSEUDO_REGISTER)
1990 /* There are no possible registers left to spill. */
1991 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1992 : max_nongroups_insn[class]);
1993 failure = 1;
1994 goto failed;
1996 else
1997 something_changed
1998 |= new_spill_reg (i, class, max_needs, max_nongroups,
1999 global, dumpfile);
2004 /* If global-alloc was run, notify it of any register eliminations we have
2005 done. */
2006 if (global)
2007 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2008 if (ep->can_eliminate)
2009 mark_elimination (ep->from, ep->to);
2011 /* Insert code to save and restore call-clobbered hard regs
2012 around calls. Tell if what mode to use so that we will process
2013 those insns in reload_as_needed if we have to. */
2015 if (caller_save_needed)
2016 save_call_clobbered_regs (num_eliminable ? QImode
2017 : caller_save_spill_class != NO_REGS ? HImode
2018 : VOIDmode);
2020 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
2021 If that insn didn't set the register (i.e., it copied the register to
2022 memory), just delete that insn instead of the equivalencing insn plus
2023 anything now dead. If we call delete_dead_insn on that insn, we may
2024 delete the insn that actually sets the register if the register die
2025 there and that is incorrect. */
2027 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2028 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
2029 && GET_CODE (reg_equiv_init[i]) != NOTE)
2031 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
2032 delete_dead_insn (reg_equiv_init[i]);
2033 else
2035 PUT_CODE (reg_equiv_init[i], NOTE);
2036 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
2037 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
2041 /* Use the reload registers where necessary
2042 by generating move instructions to move the must-be-register
2043 values into or out of the reload registers. */
2045 if (something_needs_reloads || something_needs_elimination
2046 || (caller_save_needed && num_eliminable)
2047 || caller_save_spill_class != NO_REGS)
2048 reload_as_needed (first, global);
2050 /* If we were able to eliminate the frame pointer, show that it is no
2051 longer live at the start of any basic block. If it ls live by
2052 virtue of being in a pseudo, that pseudo will be marked live
2053 and hence the frame pointer will be known to be live via that
2054 pseudo. */
2056 if (! frame_pointer_needed)
2057 for (i = 0; i < n_basic_blocks; i++)
2058 CLEAR_REGNO_REG_SET (basic_block_live_at_start[i],
2059 HARD_FRAME_POINTER_REGNUM);
2061 /* Come here (with failure set nonzero) if we can't get enough spill regs
2062 and we decide not to abort about it. */
2063 failed:
2065 reload_in_progress = 0;
2067 /* Now eliminate all pseudo regs by modifying them into
2068 their equivalent memory references.
2069 The REG-rtx's for the pseudos are modified in place,
2070 so all insns that used to refer to them now refer to memory.
2072 For a reg that has a reg_equiv_address, all those insns
2073 were changed by reloading so that no insns refer to it any longer;
2074 but the DECL_RTL of a variable decl may refer to it,
2075 and if so this causes the debugging info to mention the variable. */
2077 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2079 rtx addr = 0;
2080 int in_struct = 0;
2081 if (reg_equiv_mem[i])
2083 addr = XEXP (reg_equiv_mem[i], 0);
2084 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
2086 if (reg_equiv_address[i])
2087 addr = reg_equiv_address[i];
2088 if (addr)
2090 if (reg_renumber[i] < 0)
2092 rtx reg = regno_reg_rtx[i];
2093 XEXP (reg, 0) = addr;
2094 REG_USERVAR_P (reg) = 0;
2095 MEM_IN_STRUCT_P (reg) = in_struct;
2096 PUT_CODE (reg, MEM);
2098 else if (reg_equiv_mem[i])
2099 XEXP (reg_equiv_mem[i], 0) = addr;
2103 /* Make a pass over all the insns and delete all USEs which we inserted
2104 only to tag a REG_EQUAL note on them; if PRESERVE_DEATH_INFO_REGNO_P
2105 is defined, also remove death notes for things that are no longer
2106 registers or no longer die in the insn (e.g., an input and output
2107 pseudo being tied). */
2109 for (insn = first; insn; insn = NEXT_INSN (insn))
2110 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2112 #ifdef PRESERVE_DEATH_INFO_REGNO_P
2113 rtx note, next;
2114 #endif
2116 if (GET_CODE (PATTERN (insn)) == USE
2117 && find_reg_note (insn, REG_EQUAL, NULL_RTX))
2119 PUT_CODE (insn, NOTE);
2120 NOTE_SOURCE_FILE (insn) = 0;
2121 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2122 continue;
2124 #ifdef PRESERVE_DEATH_INFO_REGNO_P
2125 for (note = REG_NOTES (insn); note; note = next)
2127 next = XEXP (note, 1);
2128 if (REG_NOTE_KIND (note) == REG_DEAD
2129 && (GET_CODE (XEXP (note, 0)) != REG
2130 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2131 remove_note (insn, note);
2133 #endif
2136 /* If we are doing stack checking, give a warning if this function's
2137 frame size is larger than we expect. */
2138 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
2140 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
2142 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2143 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
2144 size += UNITS_PER_WORD;
2146 if (size > STACK_CHECK_MAX_FRAME_SIZE)
2147 warning ("frame size too large for reliable stack checking");
2150 /* Indicate that we no longer have known memory locations or constants. */
2151 reg_equiv_constant = 0;
2152 reg_equiv_memory_loc = 0;
2154 if (real_known_ptr)
2155 free (real_known_ptr);
2156 if (real_at_ptr)
2157 free (real_at_ptr);
2159 if (scratch_list)
2160 free (scratch_list);
2161 scratch_list = 0;
2162 if (scratch_block)
2163 free (scratch_block);
2164 scratch_block = 0;
2166 CLEAR_HARD_REG_SET (used_spill_regs);
2167 for (i = 0; i < n_spills; i++)
2168 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
2170 return failure;
2173 /* Nonzero if, after spilling reg REGNO for non-groups,
2174 it will still be possible to find a group if we still need one. */
2176 static int
2177 possible_group_p (regno, max_groups)
2178 int regno;
2179 int *max_groups;
2181 int i;
2182 int class = (int) NO_REGS;
2184 for (i = 0; i < (int) N_REG_CLASSES; i++)
2185 if (max_groups[i] > 0)
2187 class = i;
2188 break;
2191 if (class == (int) NO_REGS)
2192 return 1;
2194 /* Consider each pair of consecutive registers. */
2195 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2197 /* Ignore pairs that include reg REGNO. */
2198 if (i == regno || i + 1 == regno)
2199 continue;
2201 /* Ignore pairs that are outside the class that needs the group.
2202 ??? Here we fail to handle the case where two different classes
2203 independently need groups. But this never happens with our
2204 current machine descriptions. */
2205 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2206 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2207 continue;
2209 /* A pair of consecutive regs we can still spill does the trick. */
2210 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2211 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2212 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2213 return 1;
2215 /* A pair of one already spilled and one we can spill does it
2216 provided the one already spilled is not otherwise reserved. */
2217 if (spill_reg_order[i] < 0
2218 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2219 && spill_reg_order[i + 1] >= 0
2220 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2221 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2222 return 1;
2223 if (spill_reg_order[i + 1] < 0
2224 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2225 && spill_reg_order[i] >= 0
2226 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2227 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2228 return 1;
2231 return 0;
2234 /* Count any groups of CLASS that can be formed from the registers recently
2235 spilled. */
2237 static void
2238 count_possible_groups (group_size, group_mode, max_groups, class)
2239 int *group_size;
2240 enum machine_mode *group_mode;
2241 int *max_groups;
2242 int class;
2244 HARD_REG_SET new;
2245 int i, j;
2247 /* Now find all consecutive groups of spilled registers
2248 and mark each group off against the need for such groups.
2249 But don't count them against ordinary need, yet. */
2251 if (group_size[class] == 0)
2252 return;
2254 CLEAR_HARD_REG_SET (new);
2256 /* Make a mask of all the regs that are spill regs in class I. */
2257 for (i = 0; i < n_spills; i++)
2258 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2259 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2260 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2261 SET_HARD_REG_BIT (new, spill_regs[i]);
2263 /* Find each consecutive group of them. */
2264 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2265 if (TEST_HARD_REG_BIT (new, i)
2266 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2267 && HARD_REGNO_MODE_OK (i, group_mode[class]))
2269 for (j = 1; j < group_size[class]; j++)
2270 if (! TEST_HARD_REG_BIT (new, i + j))
2271 break;
2273 if (j == group_size[class])
2275 /* We found a group. Mark it off against this class's need for
2276 groups, and against each superclass too. */
2277 register enum reg_class *p;
2279 max_groups[class]--;
2280 p = reg_class_superclasses[class];
2281 while (*p != LIM_REG_CLASSES)
2283 if (group_size [(int) *p] <= group_size [class])
2284 max_groups[(int) *p]--;
2285 p++;
2288 /* Don't count these registers again. */
2289 for (j = 0; j < group_size[class]; j++)
2290 SET_HARD_REG_BIT (counted_for_groups, i + j);
2293 /* Skip to the last reg in this group. When i is incremented above,
2294 it will then point to the first reg of the next possible group. */
2295 i += j - 1;
2299 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2300 another mode that needs to be reloaded for the same register class CLASS.
2301 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2302 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2304 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2305 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2306 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2307 causes unnecessary failures on machines requiring alignment of register
2308 groups when the two modes are different sizes, because the larger mode has
2309 more strict alignment rules than the smaller mode. */
2311 static int
2312 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2313 enum machine_mode allocate_mode, other_mode;
2314 enum reg_class class;
2316 register int regno;
2317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2319 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2320 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2321 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2322 return 0;
2324 return 1;
2327 /* Handle the failure to find a register to spill.
2328 INSN should be one of the insns which needed this particular spill reg. */
2330 static void
2331 spill_failure (insn)
2332 rtx insn;
2334 if (asm_noperands (PATTERN (insn)) >= 0)
2335 error_for_asm (insn, "`asm' needs too many reloads");
2336 else
2337 fatal_insn ("Unable to find a register to spill.", insn);
2340 /* Add a new register to the tables of available spill-registers
2341 (as well as spilling all pseudos allocated to the register).
2342 I is the index of this register in potential_reload_regs.
2343 CLASS is the regclass whose need is being satisfied.
2344 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2345 so that this register can count off against them.
2346 MAX_NONGROUPS is 0 if this register is part of a group.
2347 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2349 static int
2350 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2351 int i;
2352 int class;
2353 int *max_needs;
2354 int *max_nongroups;
2355 int global;
2356 FILE *dumpfile;
2358 register enum reg_class *p;
2359 int val;
2360 int regno = potential_reload_regs[i];
2362 if (i >= FIRST_PSEUDO_REGISTER)
2363 abort (); /* Caller failed to find any register. */
2365 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2367 static char *reg_class_names[] = REG_CLASS_NAMES;
2368 fatal ("fixed or forbidden register %d (%s) was spilled for class %s.\n\
2369 This may be due to a compiler bug or to impossible asm\n\
2370 statements or clauses.", regno, reg_names[regno], reg_class_names[class]);
2373 /* Make reg REGNO an additional reload reg. */
2375 potential_reload_regs[i] = -1;
2376 spill_regs[n_spills] = regno;
2377 spill_reg_order[regno] = n_spills;
2378 if (dumpfile)
2379 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2381 /* Clear off the needs we just satisfied. */
2383 max_needs[class]--;
2384 p = reg_class_superclasses[class];
2385 while (*p != LIM_REG_CLASSES)
2386 max_needs[(int) *p++]--;
2388 if (max_nongroups && max_nongroups[class] > 0)
2390 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2391 max_nongroups[class]--;
2392 p = reg_class_superclasses[class];
2393 while (*p != LIM_REG_CLASSES)
2394 max_nongroups[(int) *p++]--;
2397 /* Spill every pseudo reg that was allocated to this reg
2398 or to something that overlaps this reg. */
2400 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2402 /* If there are some registers still to eliminate and this register
2403 wasn't ever used before, additional stack space may have to be
2404 allocated to store this register. Thus, we may have changed the offset
2405 between the stack and frame pointers, so mark that something has changed.
2406 (If new pseudos were spilled, thus requiring more space, VAL would have
2407 been set non-zero by the call to spill_hard_reg above since additional
2408 reloads may be needed in that case.
2410 One might think that we need only set VAL to 1 if this is a call-used
2411 register. However, the set of registers that must be saved by the
2412 prologue is not identical to the call-used set. For example, the
2413 register used by the call insn for the return PC is a call-used register,
2414 but must be saved by the prologue. */
2415 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2416 val = 1;
2418 regs_ever_live[spill_regs[n_spills]] = 1;
2419 n_spills++;
2421 return val;
2424 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2425 data that is dead in INSN. */
2427 static void
2428 delete_dead_insn (insn)
2429 rtx insn;
2431 rtx prev = prev_real_insn (insn);
2432 rtx prev_dest;
2434 /* If the previous insn sets a register that dies in our insn, delete it
2435 too. */
2436 if (prev && GET_CODE (PATTERN (prev)) == SET
2437 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2438 && reg_mentioned_p (prev_dest, PATTERN (insn))
2439 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2440 delete_dead_insn (prev);
2442 PUT_CODE (insn, NOTE);
2443 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2444 NOTE_SOURCE_FILE (insn) = 0;
2447 /* Modify the home of pseudo-reg I.
2448 The new home is present in reg_renumber[I].
2450 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2451 or it may be -1, meaning there is none or it is not relevant.
2452 This is used so that all pseudos spilled from a given hard reg
2453 can share one stack slot. */
2455 static void
2456 alter_reg (i, from_reg)
2457 register int i;
2458 int from_reg;
2460 /* When outputting an inline function, this can happen
2461 for a reg that isn't actually used. */
2462 if (regno_reg_rtx[i] == 0)
2463 return;
2465 /* If the reg got changed to a MEM at rtl-generation time,
2466 ignore it. */
2467 if (GET_CODE (regno_reg_rtx[i]) != REG)
2468 return;
2470 /* Modify the reg-rtx to contain the new hard reg
2471 number or else to contain its pseudo reg number. */
2472 REGNO (regno_reg_rtx[i])
2473 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2475 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2476 allocate a stack slot for it. */
2478 if (reg_renumber[i] < 0
2479 && REG_N_REFS (i) > 0
2480 && reg_equiv_constant[i] == 0
2481 && reg_equiv_memory_loc[i] == 0)
2483 register rtx x;
2484 int inherent_size = PSEUDO_REGNO_BYTES (i);
2485 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2486 int adjust = 0;
2488 /* Each pseudo reg has an inherent size which comes from its own mode,
2489 and a total size which provides room for paradoxical subregs
2490 which refer to the pseudo reg in wider modes.
2492 We can use a slot already allocated if it provides both
2493 enough inherent space and enough total space.
2494 Otherwise, we allocate a new slot, making sure that it has no less
2495 inherent space, and no less total space, then the previous slot. */
2496 if (from_reg == -1)
2498 /* No known place to spill from => no slot to reuse. */
2499 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
2500 inherent_size == total_size ? 0 : -1);
2501 if (BYTES_BIG_ENDIAN)
2502 /* Cancel the big-endian correction done in assign_stack_local.
2503 Get the address of the beginning of the slot.
2504 This is so we can do a big-endian correction unconditionally
2505 below. */
2506 adjust = inherent_size - total_size;
2508 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2510 /* Reuse a stack slot if possible. */
2511 else if (spill_stack_slot[from_reg] != 0
2512 && spill_stack_slot_width[from_reg] >= total_size
2513 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2514 >= inherent_size))
2515 x = spill_stack_slot[from_reg];
2516 /* Allocate a bigger slot. */
2517 else
2519 /* Compute maximum size needed, both for inherent size
2520 and for total size. */
2521 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2522 rtx stack_slot;
2523 if (spill_stack_slot[from_reg])
2525 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2526 > inherent_size)
2527 mode = GET_MODE (spill_stack_slot[from_reg]);
2528 if (spill_stack_slot_width[from_reg] > total_size)
2529 total_size = spill_stack_slot_width[from_reg];
2531 /* Make a slot with that size. */
2532 x = assign_stack_local (mode, total_size,
2533 inherent_size == total_size ? 0 : -1);
2534 stack_slot = x;
2535 if (BYTES_BIG_ENDIAN)
2537 /* Cancel the big-endian correction done in assign_stack_local.
2538 Get the address of the beginning of the slot.
2539 This is so we can do a big-endian correction unconditionally
2540 below. */
2541 adjust = GET_MODE_SIZE (mode) - total_size;
2542 if (adjust)
2543 stack_slot = gen_rtx_MEM (mode_for_size (total_size
2544 * BITS_PER_UNIT,
2545 MODE_INT, 1),
2546 plus_constant (XEXP (x, 0), adjust));
2548 spill_stack_slot[from_reg] = stack_slot;
2549 spill_stack_slot_width[from_reg] = total_size;
2552 /* On a big endian machine, the "address" of the slot
2553 is the address of the low part that fits its inherent mode. */
2554 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2555 adjust += (total_size - inherent_size);
2557 /* If we have any adjustment to make, or if the stack slot is the
2558 wrong mode, make a new stack slot. */
2559 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2561 x = gen_rtx_MEM (GET_MODE (regno_reg_rtx[i]),
2562 plus_constant (XEXP (x, 0), adjust));
2563 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2566 /* Save the stack slot for later. */
2567 reg_equiv_memory_loc[i] = x;
2571 /* Mark the slots in regs_ever_live for the hard regs
2572 used by pseudo-reg number REGNO. */
2574 void
2575 mark_home_live (regno)
2576 int regno;
2578 register int i, lim;
2579 i = reg_renumber[regno];
2580 if (i < 0)
2581 return;
2582 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2583 while (i < lim)
2584 regs_ever_live[i++] = 1;
2587 /* Mark the registers used in SCRATCH as being live. */
2589 static void
2590 mark_scratch_live (scratch)
2591 rtx scratch;
2593 register int i;
2594 int regno = REGNO (scratch);
2595 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2597 for (i = regno; i < lim; i++)
2598 regs_ever_live[i] = 1;
2601 /* This function handles the tracking of elimination offsets around branches.
2603 X is a piece of RTL being scanned.
2605 INSN is the insn that it came from, if any.
2607 INITIAL_P is non-zero if we are to set the offset to be the initial
2608 offset and zero if we are setting the offset of the label to be the
2609 current offset. */
2611 static void
2612 set_label_offsets (x, insn, initial_p)
2613 rtx x;
2614 rtx insn;
2615 int initial_p;
2617 enum rtx_code code = GET_CODE (x);
2618 rtx tem;
2619 int i;
2620 struct elim_table *p;
2622 switch (code)
2624 case LABEL_REF:
2625 if (LABEL_REF_NONLOCAL_P (x))
2626 return;
2628 x = XEXP (x, 0);
2630 /* ... fall through ... */
2632 case CODE_LABEL:
2633 /* If we know nothing about this label, set the desired offsets. Note
2634 that this sets the offset at a label to be the offset before a label
2635 if we don't know anything about the label. This is not correct for
2636 the label after a BARRIER, but is the best guess we can make. If
2637 we guessed wrong, we will suppress an elimination that might have
2638 been possible had we been able to guess correctly. */
2640 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2642 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2643 offsets_at[CODE_LABEL_NUMBER (x)][i]
2644 = (initial_p ? reg_eliminate[i].initial_offset
2645 : reg_eliminate[i].offset);
2646 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2649 /* Otherwise, if this is the definition of a label and it is
2650 preceded by a BARRIER, set our offsets to the known offset of
2651 that label. */
2653 else if (x == insn
2654 && (tem = prev_nonnote_insn (insn)) != 0
2655 && GET_CODE (tem) == BARRIER)
2657 num_not_at_initial_offset = 0;
2658 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2660 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2661 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2662 if (reg_eliminate[i].can_eliminate
2663 && (reg_eliminate[i].offset
2664 != reg_eliminate[i].initial_offset))
2665 num_not_at_initial_offset++;
2669 else
2670 /* If neither of the above cases is true, compare each offset
2671 with those previously recorded and suppress any eliminations
2672 where the offsets disagree. */
2674 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2675 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2676 != (initial_p ? reg_eliminate[i].initial_offset
2677 : reg_eliminate[i].offset))
2678 reg_eliminate[i].can_eliminate = 0;
2680 return;
2682 case JUMP_INSN:
2683 set_label_offsets (PATTERN (insn), insn, initial_p);
2685 /* ... fall through ... */
2687 case INSN:
2688 case CALL_INSN:
2689 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2690 and hence must have all eliminations at their initial offsets. */
2691 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2692 if (REG_NOTE_KIND (tem) == REG_LABEL)
2693 set_label_offsets (XEXP (tem, 0), insn, 1);
2694 return;
2696 case ADDR_VEC:
2697 case ADDR_DIFF_VEC:
2698 /* Each of the labels in the address vector must be at their initial
2699 offsets. We want the first field for ADDR_VEC and the second
2700 field for ADDR_DIFF_VEC. */
2702 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2703 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2704 insn, initial_p);
2705 return;
2707 case SET:
2708 /* We only care about setting PC. If the source is not RETURN,
2709 IF_THEN_ELSE, or a label, disable any eliminations not at
2710 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2711 isn't one of those possibilities. For branches to a label,
2712 call ourselves recursively.
2714 Note that this can disable elimination unnecessarily when we have
2715 a non-local goto since it will look like a non-constant jump to
2716 someplace in the current function. This isn't a significant
2717 problem since such jumps will normally be when all elimination
2718 pairs are back to their initial offsets. */
2720 if (SET_DEST (x) != pc_rtx)
2721 return;
2723 switch (GET_CODE (SET_SRC (x)))
2725 case PC:
2726 case RETURN:
2727 return;
2729 case LABEL_REF:
2730 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2731 return;
2733 case IF_THEN_ELSE:
2734 tem = XEXP (SET_SRC (x), 1);
2735 if (GET_CODE (tem) == LABEL_REF)
2736 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2737 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2738 break;
2740 tem = XEXP (SET_SRC (x), 2);
2741 if (GET_CODE (tem) == LABEL_REF)
2742 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2743 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2744 break;
2745 return;
2747 default:
2748 break;
2751 /* If we reach here, all eliminations must be at their initial
2752 offset because we are doing a jump to a variable address. */
2753 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2754 if (p->offset != p->initial_offset)
2755 p->can_eliminate = 0;
2756 break;
2758 default:
2759 break;
2763 /* Used for communication between the next two function to properly share
2764 the vector for an ASM_OPERANDS. */
2766 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2768 /* Scan X and replace any eliminable registers (such as fp) with a
2769 replacement (such as sp), plus an offset.
2771 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2772 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2773 MEM, we are allowed to replace a sum of a register and the constant zero
2774 with the register, which we cannot do outside a MEM. In addition, we need
2775 to record the fact that a register is referenced outside a MEM.
2777 If INSN is an insn, it is the insn containing X. If we replace a REG
2778 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2779 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2780 the REG is being modified.
2782 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2783 That's used when we eliminate in expressions stored in notes.
2784 This means, do not set ref_outside_mem even if the reference
2785 is outside of MEMs.
2787 If we see a modification to a register we know about, take the
2788 appropriate action (see case SET, below).
2790 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2791 replacements done assuming all offsets are at their initial values. If
2792 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2793 encounter, return the actual location so that find_reloads will do
2794 the proper thing. */
2797 eliminate_regs (x, mem_mode, insn)
2798 rtx x;
2799 enum machine_mode mem_mode;
2800 rtx insn;
2802 enum rtx_code code = GET_CODE (x);
2803 struct elim_table *ep;
2804 int regno;
2805 rtx new;
2806 int i, j;
2807 char *fmt;
2808 int copied = 0;
2810 switch (code)
2812 case CONST_INT:
2813 case CONST_DOUBLE:
2814 case CONST:
2815 case SYMBOL_REF:
2816 case CODE_LABEL:
2817 case PC:
2818 case CC0:
2819 case ASM_INPUT:
2820 case ADDR_VEC:
2821 case ADDR_DIFF_VEC:
2822 case RETURN:
2823 return x;
2825 case ADDRESSOF:
2826 /* This is only for the benefit of the debugging backends, which call
2827 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2828 removed after CSE. */
2829 new = eliminate_regs (XEXP (x, 0), 0, insn);
2830 if (GET_CODE (new) == MEM)
2831 return XEXP (new, 0);
2832 return x;
2834 case REG:
2835 regno = REGNO (x);
2837 /* First handle the case where we encounter a bare register that
2838 is eliminable. Replace it with a PLUS. */
2839 if (regno < FIRST_PSEUDO_REGISTER)
2841 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2842 ep++)
2843 if (ep->from_rtx == x && ep->can_eliminate)
2845 if (! mem_mode
2846 /* Refs inside notes don't count for this purpose. */
2847 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2848 || GET_CODE (insn) == INSN_LIST)))
2849 ep->ref_outside_mem = 1;
2850 return plus_constant (ep->to_rtx, ep->previous_offset);
2854 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2855 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2857 /* In this case, find_reloads would attempt to either use an
2858 incorrect address (if something is not at its initial offset)
2859 or substitute an replaced address into an insn (which loses
2860 if the offset is changed by some later action). So we simply
2861 return the replaced stack slot (assuming it is changed by
2862 elimination) and ignore the fact that this is actually a
2863 reference to the pseudo. Ensure we make a copy of the
2864 address in case it is shared. */
2865 new = eliminate_regs (reg_equiv_memory_loc[regno], mem_mode, insn);
2866 if (new != reg_equiv_memory_loc[regno])
2868 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
2869 && GET_CODE (insn) != INSN_LIST)
2870 REG_NOTES (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn))
2871 = gen_rtx_EXPR_LIST (REG_EQUAL, new, NULL_RTX);
2872 return copy_rtx (new);
2875 return x;
2877 case PLUS:
2878 /* If this is the sum of an eliminable register and a constant, rework
2879 the sum. */
2880 if (GET_CODE (XEXP (x, 0)) == REG
2881 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2882 && CONSTANT_P (XEXP (x, 1)))
2884 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2885 ep++)
2886 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2888 if (! mem_mode
2889 /* Refs inside notes don't count for this purpose. */
2890 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2891 || GET_CODE (insn) == INSN_LIST)))
2892 ep->ref_outside_mem = 1;
2894 /* The only time we want to replace a PLUS with a REG (this
2895 occurs when the constant operand of the PLUS is the negative
2896 of the offset) is when we are inside a MEM. We won't want
2897 to do so at other times because that would change the
2898 structure of the insn in a way that reload can't handle.
2899 We special-case the commonest situation in
2900 eliminate_regs_in_insn, so just replace a PLUS with a
2901 PLUS here, unless inside a MEM. */
2902 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2903 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2904 return ep->to_rtx;
2905 else
2906 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2907 plus_constant (XEXP (x, 1),
2908 ep->previous_offset));
2911 /* If the register is not eliminable, we are done since the other
2912 operand is a constant. */
2913 return x;
2916 /* If this is part of an address, we want to bring any constant to the
2917 outermost PLUS. We will do this by doing register replacement in
2918 our operands and seeing if a constant shows up in one of them.
2920 We assume here this is part of an address (or a "load address" insn)
2921 since an eliminable register is not likely to appear in any other
2922 context.
2924 If we have (plus (eliminable) (reg)), we want to produce
2925 (plus (plus (replacement) (reg) (const))). If this was part of a
2926 normal add insn, (plus (replacement) (reg)) will be pushed as a
2927 reload. This is the desired action. */
2930 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2931 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2933 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2935 /* If one side is a PLUS and the other side is a pseudo that
2936 didn't get a hard register but has a reg_equiv_constant,
2937 we must replace the constant here since it may no longer
2938 be in the position of any operand. */
2939 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2940 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2941 && reg_renumber[REGNO (new1)] < 0
2942 && reg_equiv_constant != 0
2943 && reg_equiv_constant[REGNO (new1)] != 0)
2944 new1 = reg_equiv_constant[REGNO (new1)];
2945 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2946 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2947 && reg_renumber[REGNO (new0)] < 0
2948 && reg_equiv_constant[REGNO (new0)] != 0)
2949 new0 = reg_equiv_constant[REGNO (new0)];
2951 new = form_sum (new0, new1);
2953 /* As above, if we are not inside a MEM we do not want to
2954 turn a PLUS into something else. We might try to do so here
2955 for an addition of 0 if we aren't optimizing. */
2956 if (! mem_mode && GET_CODE (new) != PLUS)
2957 return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
2958 else
2959 return new;
2962 return x;
2964 case MULT:
2965 /* If this is the product of an eliminable register and a
2966 constant, apply the distribute law and move the constant out
2967 so that we have (plus (mult ..) ..). This is needed in order
2968 to keep load-address insns valid. This case is pathological.
2969 We ignore the possibility of overflow here. */
2970 if (GET_CODE (XEXP (x, 0)) == REG
2971 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2972 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2973 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2974 ep++)
2975 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2977 if (! mem_mode
2978 /* Refs inside notes don't count for this purpose. */
2979 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2980 || GET_CODE (insn) == INSN_LIST)))
2981 ep->ref_outside_mem = 1;
2983 return
2984 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2985 ep->previous_offset * INTVAL (XEXP (x, 1)));
2988 /* ... fall through ... */
2990 case CALL:
2991 case COMPARE:
2992 case MINUS:
2993 case DIV: case UDIV:
2994 case MOD: case UMOD:
2995 case AND: case IOR: case XOR:
2996 case ROTATERT: case ROTATE:
2997 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2998 case NE: case EQ:
2999 case GE: case GT: case GEU: case GTU:
3000 case LE: case LT: case LEU: case LTU:
3002 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3003 rtx new1
3004 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
3006 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
3007 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
3009 return x;
3011 case EXPR_LIST:
3012 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
3013 if (XEXP (x, 0))
3015 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3016 if (new != XEXP (x, 0))
3017 x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
3020 /* ... fall through ... */
3022 case INSN_LIST:
3023 /* Now do eliminations in the rest of the chain. If this was
3024 an EXPR_LIST, this might result in allocating more memory than is
3025 strictly needed, but it simplifies the code. */
3026 if (XEXP (x, 1))
3028 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
3029 if (new != XEXP (x, 1))
3030 return gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
3032 return x;
3034 case PRE_INC:
3035 case POST_INC:
3036 case PRE_DEC:
3037 case POST_DEC:
3038 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3039 if (ep->to_rtx == XEXP (x, 0))
3041 int size = GET_MODE_SIZE (mem_mode);
3043 /* If more bytes than MEM_MODE are pushed, account for them. */
3044 #ifdef PUSH_ROUNDING
3045 if (ep->to_rtx == stack_pointer_rtx)
3046 size = PUSH_ROUNDING (size);
3047 #endif
3048 if (code == PRE_DEC || code == POST_DEC)
3049 ep->offset += size;
3050 else
3051 ep->offset -= size;
3054 /* Fall through to generic unary operation case. */
3055 case STRICT_LOW_PART:
3056 case NEG: case NOT:
3057 case SIGN_EXTEND: case ZERO_EXTEND:
3058 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3059 case FLOAT: case FIX:
3060 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3061 case ABS:
3062 case SQRT:
3063 case FFS:
3064 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3065 if (new != XEXP (x, 0))
3066 return gen_rtx_fmt_e (code, GET_MODE (x), new);
3067 return x;
3069 case SUBREG:
3070 /* Similar to above processing, but preserve SUBREG_WORD.
3071 Convert (subreg (mem)) to (mem) if not paradoxical.
3072 Also, if we have a non-paradoxical (subreg (pseudo)) and the
3073 pseudo didn't get a hard reg, we must replace this with the
3074 eliminated version of the memory location because push_reloads
3075 may do the replacement in certain circumstances. */
3076 if (GET_CODE (SUBREG_REG (x)) == REG
3077 && (GET_MODE_SIZE (GET_MODE (x))
3078 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3079 && reg_equiv_memory_loc != 0
3080 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
3082 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
3083 mem_mode, insn);
3085 /* If we didn't change anything, we must retain the pseudo. */
3086 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
3087 new = SUBREG_REG (x);
3088 else
3090 /* In this case, we must show that the pseudo is used in this
3091 insn so that delete_output_reload will do the right thing. */
3092 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
3093 && GET_CODE (insn) != INSN_LIST)
3094 REG_NOTES (emit_insn_before (gen_rtx_USE (VOIDmode,
3095 SUBREG_REG (x)),
3096 insn))
3097 = gen_rtx_EXPR_LIST (REG_EQUAL, new, NULL_RTX);
3099 /* Ensure NEW isn't shared in case we have to reload it. */
3100 new = copy_rtx (new);
3103 else
3104 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
3106 if (new != XEXP (x, 0))
3108 int x_size = GET_MODE_SIZE (GET_MODE (x));
3109 int new_size = GET_MODE_SIZE (GET_MODE (new));
3111 if (GET_CODE (new) == MEM
3112 && ((x_size < new_size
3113 #ifdef WORD_REGISTER_OPERATIONS
3114 /* On these machines, combine can create rtl of the form
3115 (set (subreg:m1 (reg:m2 R) 0) ...)
3116 where m1 < m2, and expects something interesting to
3117 happen to the entire word. Moreover, it will use the
3118 (reg:m2 R) later, expecting all bits to be preserved.
3119 So if the number of words is the same, preserve the
3120 subreg so that push_reloads can see it. */
3121 && ! ((x_size-1)/UNITS_PER_WORD == (new_size-1)/UNITS_PER_WORD)
3122 #endif
3124 || (x_size == new_size))
3127 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
3128 enum machine_mode mode = GET_MODE (x);
3130 if (BYTES_BIG_ENDIAN)
3131 offset += (MIN (UNITS_PER_WORD,
3132 GET_MODE_SIZE (GET_MODE (new)))
3133 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
3135 PUT_MODE (new, mode);
3136 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
3137 return new;
3139 else
3140 return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_WORD (x));
3143 return x;
3145 case USE:
3146 /* If using a register that is the source of an eliminate we still
3147 think can be performed, note it cannot be performed since we don't
3148 know how this register is used. */
3149 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3150 if (ep->from_rtx == XEXP (x, 0))
3151 ep->can_eliminate = 0;
3153 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3154 if (new != XEXP (x, 0))
3155 return gen_rtx_fmt_e (code, GET_MODE (x), new);
3156 return x;
3158 case CLOBBER:
3159 /* If clobbering a register that is the replacement register for an
3160 elimination we still think can be performed, note that it cannot
3161 be performed. Otherwise, we need not be concerned about it. */
3162 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3163 if (ep->to_rtx == XEXP (x, 0))
3164 ep->can_eliminate = 0;
3166 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3167 if (new != XEXP (x, 0))
3168 return gen_rtx_fmt_e (code, GET_MODE (x), new);
3169 return x;
3171 case ASM_OPERANDS:
3173 rtx *temp_vec;
3174 /* Properly handle sharing input and constraint vectors. */
3175 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
3177 /* When we come to a new vector not seen before,
3178 scan all its elements; keep the old vector if none
3179 of them changes; otherwise, make a copy. */
3180 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3181 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3182 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3183 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
3184 mem_mode, insn);
3186 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3187 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3188 break;
3190 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3191 new_asm_operands_vec = old_asm_operands_vec;
3192 else
3193 new_asm_operands_vec
3194 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3197 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3198 if (new_asm_operands_vec == old_asm_operands_vec)
3199 return x;
3201 new = gen_rtx_ASM_OPERANDS (VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3202 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3203 ASM_OPERANDS_OUTPUT_IDX (x),
3204 new_asm_operands_vec,
3205 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3206 ASM_OPERANDS_SOURCE_FILE (x),
3207 ASM_OPERANDS_SOURCE_LINE (x));
3208 new->volatil = x->volatil;
3209 return new;
3212 case SET:
3213 /* Check for setting a register that we know about. */
3214 if (GET_CODE (SET_DEST (x)) == REG)
3216 /* See if this is setting the replacement register for an
3217 elimination.
3219 If DEST is the hard frame pointer, we do nothing because we
3220 assume that all assignments to the frame pointer are for
3221 non-local gotos and are being done at a time when they are valid
3222 and do not disturb anything else. Some machines want to
3223 eliminate a fake argument pointer (or even a fake frame pointer)
3224 with either the real frame or the stack pointer. Assignments to
3225 the hard frame pointer must not prevent this elimination. */
3227 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3228 ep++)
3229 if (ep->to_rtx == SET_DEST (x)
3230 && SET_DEST (x) != hard_frame_pointer_rtx)
3232 /* If it is being incremented, adjust the offset. Otherwise,
3233 this elimination can't be done. */
3234 rtx src = SET_SRC (x);
3236 if (GET_CODE (src) == PLUS
3237 && XEXP (src, 0) == SET_DEST (x)
3238 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3239 ep->offset -= INTVAL (XEXP (src, 1));
3240 else
3241 ep->can_eliminate = 0;
3244 /* Now check to see we are assigning to a register that can be
3245 eliminated. If so, it must be as part of a PARALLEL, since we
3246 will not have been called if this is a single SET. So indicate
3247 that we can no longer eliminate this reg. */
3248 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3249 ep++)
3250 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3251 ep->can_eliminate = 0;
3254 /* Now avoid the loop below in this common case. */
3256 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3257 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3259 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3260 write a CLOBBER insn. */
3261 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3262 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3263 && GET_CODE (insn) != INSN_LIST)
3264 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, SET_DEST (x)), insn);
3266 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3267 return gen_rtx_SET (VOIDmode, new0, new1);
3270 return x;
3272 case MEM:
3273 /* This is only for the benefit of the debugging backends, which call
3274 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
3275 removed after CSE. */
3276 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
3277 return eliminate_regs (XEXP (XEXP (x, 0), 0), 0, insn);
3279 /* Our only special processing is to pass the mode of the MEM to our
3280 recursive call and copy the flags. While we are here, handle this
3281 case more efficiently. */
3282 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3283 if (new != XEXP (x, 0))
3285 new = gen_rtx_MEM (GET_MODE (x), new);
3286 new->volatil = x->volatil;
3287 new->unchanging = x->unchanging;
3288 new->in_struct = x->in_struct;
3289 return new;
3291 else
3292 return x;
3294 default:
3295 break;
3298 /* Process each of our operands recursively. If any have changed, make a
3299 copy of the rtx. */
3300 fmt = GET_RTX_FORMAT (code);
3301 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3303 if (*fmt == 'e')
3305 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3306 if (new != XEXP (x, i) && ! copied)
3308 rtx new_x = rtx_alloc (code);
3309 bcopy ((char *) x, (char *) new_x,
3310 (sizeof (*new_x) - sizeof (new_x->fld)
3311 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3312 x = new_x;
3313 copied = 1;
3315 XEXP (x, i) = new;
3317 else if (*fmt == 'E')
3319 int copied_vec = 0;
3320 for (j = 0; j < XVECLEN (x, i); j++)
3322 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3323 if (new != XVECEXP (x, i, j) && ! copied_vec)
3325 rtvec new_v = gen_rtvec_vv (XVECLEN (x, i),
3326 XVEC (x, i)->elem);
3327 if (! copied)
3329 rtx new_x = rtx_alloc (code);
3330 bcopy ((char *) x, (char *) new_x,
3331 (sizeof (*new_x) - sizeof (new_x->fld)
3332 + (sizeof (new_x->fld[0])
3333 * GET_RTX_LENGTH (code))));
3334 x = new_x;
3335 copied = 1;
3337 XVEC (x, i) = new_v;
3338 copied_vec = 1;
3340 XVECEXP (x, i, j) = new;
3345 return x;
3348 /* Scan INSN and eliminate all eliminable registers in it.
3350 If REPLACE is nonzero, do the replacement destructively. Also
3351 delete the insn as dead it if it is setting an eliminable register.
3353 If REPLACE is zero, do all our allocations in reload_obstack.
3355 If no eliminations were done and this insn doesn't require any elimination
3356 processing (these are not identical conditions: it might be updating sp,
3357 but not referencing fp; this needs to be seen during reload_as_needed so
3358 that the offset between fp and sp can be taken into consideration), zero
3359 is returned. Otherwise, 1 is returned. */
3361 static int
3362 eliminate_regs_in_insn (insn, replace)
3363 rtx insn;
3364 int replace;
3366 rtx old_body = PATTERN (insn);
3367 rtx old_set = single_set (insn);
3368 rtx new_body;
3369 int val = 0;
3370 struct elim_table *ep;
3372 if (! replace)
3373 push_obstacks (&reload_obstack, &reload_obstack);
3375 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3376 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3378 /* Check for setting an eliminable register. */
3379 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3380 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3382 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3383 /* If this is setting the frame pointer register to the
3384 hardware frame pointer register and this is an elimination
3385 that will be done (tested above), this insn is really
3386 adjusting the frame pointer downward to compensate for
3387 the adjustment done before a nonlocal goto. */
3388 if (ep->from == FRAME_POINTER_REGNUM
3389 && ep->to == HARD_FRAME_POINTER_REGNUM)
3391 rtx src = SET_SRC (old_set);
3392 int offset, ok = 0;
3393 rtx prev_insn, prev_set;
3395 if (src == ep->to_rtx)
3396 offset = 0, ok = 1;
3397 else if (GET_CODE (src) == PLUS
3398 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3399 offset = INTVAL (XEXP (src, 0)), ok = 1;
3400 else if ((prev_insn = prev_nonnote_insn (insn)) != 0
3401 && (prev_set = single_set (prev_insn)) != 0
3402 && rtx_equal_p (SET_DEST (prev_set), src))
3404 src = SET_SRC (prev_set);
3405 if (src == ep->to_rtx)
3406 offset = 0, ok = 1;
3407 else if (GET_CODE (src) == PLUS
3408 && GET_CODE (XEXP (src, 0)) == CONST_INT
3409 && XEXP (src, 1) == ep->to_rtx)
3410 offset = INTVAL (XEXP (src, 0)), ok = 1;
3411 else if (GET_CODE (src) == PLUS
3412 && GET_CODE (XEXP (src, 1)) == CONST_INT
3413 && XEXP (src, 0) == ep->to_rtx)
3414 offset = INTVAL (XEXP (src, 1)), ok = 1;
3417 if (ok)
3419 if (replace)
3421 rtx src
3422 = plus_constant (ep->to_rtx, offset - ep->offset);
3424 /* First see if this insn remains valid when we
3425 make the change. If not, keep the INSN_CODE
3426 the same and let reload fit it up. */
3427 validate_change (insn, &SET_SRC (old_set), src, 1);
3428 validate_change (insn, &SET_DEST (old_set),
3429 ep->to_rtx, 1);
3430 if (! apply_change_group ())
3432 SET_SRC (old_set) = src;
3433 SET_DEST (old_set) = ep->to_rtx;
3437 val = 1;
3438 goto done;
3441 #endif
3443 /* In this case this insn isn't serving a useful purpose. We
3444 will delete it in reload_as_needed once we know that this
3445 elimination is, in fact, being done.
3447 If REPLACE isn't set, we can't delete this insn, but needn't
3448 process it since it won't be used unless something changes. */
3449 if (replace)
3450 delete_dead_insn (insn);
3451 val = 1;
3452 goto done;
3455 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3456 in the insn is the negative of the offset in FROM. Substitute
3457 (set (reg) (reg to)) for the insn and change its code.
3459 We have to do this here, rather than in eliminate_regs, do that we can
3460 change the insn code. */
3462 if (GET_CODE (SET_SRC (old_set)) == PLUS
3463 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3464 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3465 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3466 ep++)
3467 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3468 && ep->can_eliminate)
3470 /* We must stop at the first elimination that will be used.
3471 If this one would replace the PLUS with a REG, do it
3472 now. Otherwise, quit the loop and let eliminate_regs
3473 do its normal replacement. */
3474 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3476 /* We assume here that we don't need a PARALLEL of
3477 any CLOBBERs for this assignment. There's not
3478 much we can do if we do need it. */
3479 PATTERN (insn) = gen_rtx_SET (VOIDmode,
3480 SET_DEST (old_set),
3481 ep->to_rtx);
3482 INSN_CODE (insn) = -1;
3483 val = 1;
3484 goto done;
3487 break;
3491 old_asm_operands_vec = 0;
3493 /* Replace the body of this insn with a substituted form. If we changed
3494 something, return non-zero.
3496 If we are replacing a body that was a (set X (plus Y Z)), try to
3497 re-recognize the insn. We do this in case we had a simple addition
3498 but now can do this as a load-address. This saves an insn in this
3499 common case. */
3501 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3502 if (new_body != old_body)
3504 /* If we aren't replacing things permanently and we changed something,
3505 make another copy to ensure that all the RTL is new. Otherwise
3506 things can go wrong if find_reload swaps commutative operands
3507 and one is inside RTL that has been copied while the other is not. */
3509 /* Don't copy an asm_operands because (1) there's no need and (2)
3510 copy_rtx can't do it properly when there are multiple outputs. */
3511 if (! replace && asm_noperands (old_body) < 0)
3512 new_body = copy_rtx (new_body);
3514 /* If we had a move insn but now we don't, rerecognize it. This will
3515 cause spurious re-recognition if the old move had a PARALLEL since
3516 the new one still will, but we can't call single_set without
3517 having put NEW_BODY into the insn and the re-recognition won't
3518 hurt in this rare case. */
3519 if (old_set != 0
3520 && ((GET_CODE (SET_SRC (old_set)) == REG
3521 && (GET_CODE (new_body) != SET
3522 || GET_CODE (SET_SRC (new_body)) != REG))
3523 /* If this was a load from or store to memory, compare
3524 the MEM in recog_operand to the one in the insn. If they
3525 are not equal, then rerecognize the insn. */
3526 || (old_set != 0
3527 && ((GET_CODE (SET_SRC (old_set)) == MEM
3528 && SET_SRC (old_set) != recog_operand[1])
3529 || (GET_CODE (SET_DEST (old_set)) == MEM
3530 && SET_DEST (old_set) != recog_operand[0])))
3531 /* If this was an add insn before, rerecognize. */
3532 || GET_CODE (SET_SRC (old_set)) == PLUS))
3534 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3535 /* If recognition fails, store the new body anyway.
3536 It's normal to have recognition failures here
3537 due to bizarre memory addresses; reloading will fix them. */
3538 PATTERN (insn) = new_body;
3540 else
3541 PATTERN (insn) = new_body;
3543 val = 1;
3546 /* Loop through all elimination pairs. See if any have changed and
3547 recalculate the number not at initial offset.
3549 Compute the maximum offset (minimum offset if the stack does not
3550 grow downward) for each elimination pair.
3552 We also detect a cases where register elimination cannot be done,
3553 namely, if a register would be both changed and referenced outside a MEM
3554 in the resulting insn since such an insn is often undefined and, even if
3555 not, we cannot know what meaning will be given to it. Note that it is
3556 valid to have a register used in an address in an insn that changes it
3557 (presumably with a pre- or post-increment or decrement).
3559 If anything changes, return nonzero. */
3561 num_not_at_initial_offset = 0;
3562 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3564 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3565 ep->can_eliminate = 0;
3567 ep->ref_outside_mem = 0;
3569 if (ep->previous_offset != ep->offset)
3570 val = 1;
3572 ep->previous_offset = ep->offset;
3573 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3574 num_not_at_initial_offset++;
3576 #ifdef STACK_GROWS_DOWNWARD
3577 ep->max_offset = MAX (ep->max_offset, ep->offset);
3578 #else
3579 ep->max_offset = MIN (ep->max_offset, ep->offset);
3580 #endif
3583 done:
3584 /* If we changed something, perform elimination in REG_NOTES. This is
3585 needed even when REPLACE is zero because a REG_DEAD note might refer
3586 to a register that we eliminate and could cause a different number
3587 of spill registers to be needed in the final reload pass than in
3588 the pre-passes. */
3589 if (val && REG_NOTES (insn) != 0)
3590 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3592 if (! replace)
3593 pop_obstacks ();
3595 return val;
3598 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3599 replacement we currently believe is valid, mark it as not eliminable if X
3600 modifies DEST in any way other than by adding a constant integer to it.
3602 If DEST is the frame pointer, we do nothing because we assume that
3603 all assignments to the hard frame pointer are nonlocal gotos and are being
3604 done at a time when they are valid and do not disturb anything else.
3605 Some machines want to eliminate a fake argument pointer with either the
3606 frame or stack pointer. Assignments to the hard frame pointer must not
3607 prevent this elimination.
3609 Called via note_stores from reload before starting its passes to scan
3610 the insns of the function. */
3612 static void
3613 mark_not_eliminable (dest, x)
3614 rtx dest;
3615 rtx x;
3617 register int i;
3619 /* A SUBREG of a hard register here is just changing its mode. We should
3620 not see a SUBREG of an eliminable hard register, but check just in
3621 case. */
3622 if (GET_CODE (dest) == SUBREG)
3623 dest = SUBREG_REG (dest);
3625 if (dest == hard_frame_pointer_rtx)
3626 return;
3628 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3629 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3630 && (GET_CODE (x) != SET
3631 || GET_CODE (SET_SRC (x)) != PLUS
3632 || XEXP (SET_SRC (x), 0) != dest
3633 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3635 reg_eliminate[i].can_eliminate_previous
3636 = reg_eliminate[i].can_eliminate = 0;
3637 num_eliminable--;
3641 /* Kick all pseudos out of hard register REGNO.
3642 If GLOBAL is nonzero, try to find someplace else to put them.
3643 If DUMPFILE is nonzero, log actions taken on that file.
3645 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3646 because we found we can't eliminate some register. In the case, no pseudos
3647 are allowed to be in the register, even if they are only in a block that
3648 doesn't require spill registers, unlike the case when we are spilling this
3649 hard reg to produce another spill register.
3651 Return nonzero if any pseudos needed to be kicked out. */
3653 static int
3654 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3655 register int regno;
3656 int global;
3657 FILE *dumpfile;
3658 int cant_eliminate;
3660 enum reg_class class = REGNO_REG_CLASS (regno);
3661 int something_changed = 0;
3662 register int i;
3664 SET_HARD_REG_BIT (forbidden_regs, regno);
3666 if (cant_eliminate)
3667 regs_ever_live[regno] = 1;
3669 /* Spill every pseudo reg that was allocated to this reg
3670 or to something that overlaps this reg. */
3672 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3673 if (reg_renumber[i] >= 0
3674 && reg_renumber[i] <= regno
3675 && (reg_renumber[i]
3676 + HARD_REGNO_NREGS (reg_renumber[i],
3677 PSEUDO_REGNO_MODE (i))
3678 > regno))
3680 /* If this register belongs solely to a basic block which needed no
3681 spilling of any class that this register is contained in,
3682 leave it be, unless we are spilling this register because
3683 it was a hard register that can't be eliminated. */
3685 if (! cant_eliminate
3686 && basic_block_needs[0]
3687 && REG_BASIC_BLOCK (i) >= 0
3688 && basic_block_needs[(int) class][REG_BASIC_BLOCK (i)] == 0)
3690 enum reg_class *p;
3692 for (p = reg_class_superclasses[(int) class];
3693 *p != LIM_REG_CLASSES; p++)
3694 if (basic_block_needs[(int) *p][REG_BASIC_BLOCK (i)] > 0)
3695 break;
3697 if (*p == LIM_REG_CLASSES)
3698 continue;
3701 /* Mark it as no longer having a hard register home. */
3702 reg_renumber[i] = -1;
3703 /* We will need to scan everything again. */
3704 something_changed = 1;
3705 if (global)
3706 retry_global_alloc (i, forbidden_regs);
3708 alter_reg (i, regno);
3709 if (dumpfile)
3711 if (reg_renumber[i] == -1)
3712 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3713 else
3714 fprintf (dumpfile, " Register %d now in %d.\n\n",
3715 i, reg_renumber[i]);
3718 for (i = 0; i < scratch_list_length; i++)
3720 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3722 if (! cant_eliminate && basic_block_needs[0]
3723 && ! basic_block_needs[(int) class][scratch_block[i]])
3725 enum reg_class *p;
3727 for (p = reg_class_superclasses[(int) class];
3728 *p != LIM_REG_CLASSES; p++)
3729 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3730 break;
3732 if (*p == LIM_REG_CLASSES)
3733 continue;
3735 PUT_CODE (scratch_list[i], SCRATCH);
3736 scratch_list[i] = 0;
3737 something_changed = 1;
3738 continue;
3742 return something_changed;
3745 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3746 Also mark any hard registers used to store user variables as
3747 forbidden from being used for spill registers. */
3749 static void
3750 scan_paradoxical_subregs (x)
3751 register rtx x;
3753 register int i;
3754 register char *fmt;
3755 register enum rtx_code code = GET_CODE (x);
3757 switch (code)
3759 case REG:
3760 if (SMALL_REGISTER_CLASSES && REGNO (x) < FIRST_PSEUDO_REGISTER
3761 && REG_USERVAR_P (x))
3762 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3763 return;
3765 case CONST_INT:
3766 case CONST:
3767 case SYMBOL_REF:
3768 case LABEL_REF:
3769 case CONST_DOUBLE:
3770 case CC0:
3771 case PC:
3772 case USE:
3773 case CLOBBER:
3774 return;
3776 case SUBREG:
3777 if (GET_CODE (SUBREG_REG (x)) == REG
3778 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3779 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3780 = GET_MODE_SIZE (GET_MODE (x));
3781 return;
3783 default:
3784 break;
3787 fmt = GET_RTX_FORMAT (code);
3788 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3790 if (fmt[i] == 'e')
3791 scan_paradoxical_subregs (XEXP (x, i));
3792 else if (fmt[i] == 'E')
3794 register int j;
3795 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3796 scan_paradoxical_subregs (XVECEXP (x, i, j));
3801 static int
3802 hard_reg_use_compare (p1p, p2p)
3803 const GENERIC_PTR p1p;
3804 const GENERIC_PTR p2p;
3806 struct hard_reg_n_uses *p1 = (struct hard_reg_n_uses *)p1p,
3807 *p2 = (struct hard_reg_n_uses *)p2p;
3808 int tem = p1->uses - p2->uses;
3809 if (tem != 0) return tem;
3810 /* If regs are equally good, sort by regno,
3811 so that the results of qsort leave nothing to chance. */
3812 return p1->regno - p2->regno;
3815 /* Choose the order to consider regs for use as reload registers
3816 based on how much trouble would be caused by spilling one.
3817 Store them in order of decreasing preference in potential_reload_regs. */
3819 static void
3820 order_regs_for_reload (global)
3821 int global;
3823 register int i;
3824 register int o = 0;
3825 int large = 0;
3827 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3829 CLEAR_HARD_REG_SET (bad_spill_regs);
3831 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3832 potential_reload_regs[i] = -1;
3834 /* Count number of uses of each hard reg by pseudo regs allocated to it
3835 and then order them by decreasing use. */
3837 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3839 hard_reg_n_uses[i].uses = 0;
3840 hard_reg_n_uses[i].regno = i;
3843 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3845 int regno = reg_renumber[i];
3846 if (regno >= 0)
3848 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3849 while (regno < lim)
3851 /* If allocated by local-alloc, show more uses since
3852 we're not going to be able to reallocate it, but
3853 we might if allocated by global alloc. */
3854 if (global && reg_allocno[i] < 0)
3855 hard_reg_n_uses[regno].uses += (REG_N_REFS (i) + 1) / 2;
3857 hard_reg_n_uses[regno++].uses += REG_N_REFS (i);
3860 large += REG_N_REFS (i);
3863 /* Now fixed registers (which cannot safely be used for reloading)
3864 get a very high use count so they will be considered least desirable.
3865 Registers used explicitly in the rtl code are almost as bad. */
3867 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3869 if (fixed_regs[i])
3871 hard_reg_n_uses[i].uses += 2 * large + 2;
3872 SET_HARD_REG_BIT (bad_spill_regs, i);
3874 else if (regs_explicitly_used[i])
3876 hard_reg_n_uses[i].uses += large + 1;
3877 if (! SMALL_REGISTER_CLASSES)
3878 /* ??? We are doing this here because of the potential
3879 that bad code may be generated if a register explicitly
3880 used in an insn was used as a spill register for that
3881 insn. But not using these are spill registers may lose
3882 on some machine. We'll have to see how this works out. */
3883 SET_HARD_REG_BIT (bad_spill_regs, i);
3886 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3887 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3889 #ifdef ELIMINABLE_REGS
3890 /* If registers other than the frame pointer are eliminable, mark them as
3891 poor choices. */
3892 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3894 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3895 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3897 #endif
3899 /* Prefer registers not so far used, for use in temporary loading.
3900 Among them, if REG_ALLOC_ORDER is defined, use that order.
3901 Otherwise, prefer registers not preserved by calls. */
3903 #ifdef REG_ALLOC_ORDER
3904 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3906 int regno = reg_alloc_order[i];
3908 if (hard_reg_n_uses[regno].uses == 0)
3909 potential_reload_regs[o++] = regno;
3911 #else
3912 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3914 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3915 potential_reload_regs[o++] = i;
3917 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3919 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3920 potential_reload_regs[o++] = i;
3922 #endif
3924 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3925 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3927 /* Now add the regs that are already used,
3928 preferring those used less often. The fixed and otherwise forbidden
3929 registers will be at the end of this list. */
3931 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3932 if (hard_reg_n_uses[i].uses != 0)
3933 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3936 /* Used in reload_as_needed to sort the spilled regs. */
3938 static int
3939 compare_spill_regs (r1p, r2p)
3940 const GENERIC_PTR r1p;
3941 const GENERIC_PTR r2p;
3943 short r1 = *(short *)r1p, r2 = *(short *)r2p;
3944 return r1 - r2;
3947 /* Reload pseudo-registers into hard regs around each insn as needed.
3948 Additional register load insns are output before the insn that needs it
3949 and perhaps store insns after insns that modify the reloaded pseudo reg.
3951 reg_last_reload_reg and reg_reloaded_contents keep track of
3952 which registers are already available in reload registers.
3953 We update these for the reloads that we perform,
3954 as the insns are scanned. */
3956 static void
3957 reload_as_needed (first, live_known)
3958 rtx first;
3959 int live_known;
3961 register rtx insn;
3962 register int i;
3963 int this_block = 0;
3964 rtx x;
3965 rtx after_call = 0;
3967 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3968 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3969 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3970 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3971 reg_has_output_reload = (char *) alloca (max_regno);
3972 CLEAR_HARD_REG_SET (reg_reloaded_valid);
3974 /* Reset all offsets on eliminable registers to their initial values. */
3975 #ifdef ELIMINABLE_REGS
3976 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3978 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3979 reg_eliminate[i].initial_offset);
3980 reg_eliminate[i].previous_offset
3981 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3983 #else
3984 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3985 reg_eliminate[0].previous_offset
3986 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3987 #endif
3989 num_not_at_initial_offset = 0;
3991 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3992 pack registers with group needs. */
3993 if (n_spills > 1)
3995 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3996 for (i = 0; i < n_spills; i++)
3997 spill_reg_order[spill_regs[i]] = i;
4000 for (insn = first; insn;)
4002 register rtx next = NEXT_INSN (insn);
4004 /* Notice when we move to a new basic block. */
4005 if (live_known && this_block + 1 < n_basic_blocks
4006 && insn == basic_block_head[this_block+1])
4007 ++this_block;
4009 /* If we pass a label, copy the offsets from the label information
4010 into the current offsets of each elimination. */
4011 if (GET_CODE (insn) == CODE_LABEL)
4013 num_not_at_initial_offset = 0;
4014 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
4016 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
4017 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
4018 if (reg_eliminate[i].can_eliminate
4019 && (reg_eliminate[i].offset
4020 != reg_eliminate[i].initial_offset))
4021 num_not_at_initial_offset++;
4025 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4027 rtx avoid_return_reg = 0;
4028 rtx oldpat = PATTERN (insn);
4030 /* Set avoid_return_reg if this is an insn
4031 that might use the value of a function call. */
4032 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
4034 if (GET_CODE (PATTERN (insn)) == SET)
4035 after_call = SET_DEST (PATTERN (insn));
4036 else if (GET_CODE (PATTERN (insn)) == PARALLEL
4037 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
4038 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
4039 else
4040 after_call = 0;
4042 else if (SMALL_REGISTER_CLASSES && after_call != 0
4043 && !(GET_CODE (PATTERN (insn)) == SET
4044 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx)
4045 && GET_CODE (PATTERN (insn)) != USE)
4047 if (reg_referenced_p (after_call, PATTERN (insn)))
4048 avoid_return_reg = after_call;
4049 after_call = 0;
4052 /* If this is a USE and CLOBBER of a MEM, ensure that any
4053 references to eliminable registers have been removed. */
4055 if ((GET_CODE (PATTERN (insn)) == USE
4056 || GET_CODE (PATTERN (insn)) == CLOBBER)
4057 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
4058 XEXP (XEXP (PATTERN (insn), 0), 0)
4059 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4060 GET_MODE (XEXP (PATTERN (insn), 0)),
4061 NULL_RTX);
4063 /* If we need to do register elimination processing, do so.
4064 This might delete the insn, in which case we are done. */
4065 if (num_eliminable && GET_MODE (insn) == QImode)
4067 eliminate_regs_in_insn (insn, 1);
4068 if (GET_CODE (insn) == NOTE)
4070 insn = next;
4071 continue;
4075 if (GET_MODE (insn) == VOIDmode)
4076 n_reloads = 0;
4077 /* First find the pseudo regs that must be reloaded for this insn.
4078 This info is returned in the tables reload_... (see reload.h).
4079 Also modify the body of INSN by substituting RELOAD
4080 rtx's for those pseudo regs. */
4081 else
4083 bzero (reg_has_output_reload, max_regno);
4084 CLEAR_HARD_REG_SET (reg_is_output_reload);
4086 find_reloads (insn, 1, spill_indirect_levels, live_known,
4087 spill_reg_order);
4090 if (n_reloads > 0)
4092 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
4093 rtx p;
4094 int class;
4096 /* If this block has not had spilling done for a
4097 particular clas and we have any non-optionals that need a
4098 spill reg in that class, abort. */
4100 for (class = 0; class < N_REG_CLASSES; class++)
4101 if (basic_block_needs[class] != 0
4102 && basic_block_needs[class][this_block] == 0)
4103 for (i = 0; i < n_reloads; i++)
4104 if (class == (int) reload_reg_class[i]
4105 && reload_reg_rtx[i] == 0
4106 && ! reload_optional[i]
4107 && (reload_in[i] != 0 || reload_out[i] != 0
4108 || reload_secondary_p[i] != 0))
4109 fatal_insn ("Non-optional registers need a spill register", insn);
4111 /* Now compute which reload regs to reload them into. Perhaps
4112 reusing reload regs from previous insns, or else output
4113 load insns to reload them. Maybe output store insns too.
4114 Record the choices of reload reg in reload_reg_rtx. */
4115 choose_reload_regs (insn, avoid_return_reg);
4117 /* Merge any reloads that we didn't combine for fear of
4118 increasing the number of spill registers needed but now
4119 discover can be safely merged. */
4120 if (SMALL_REGISTER_CLASSES)
4121 merge_assigned_reloads (insn);
4123 /* Generate the insns to reload operands into or out of
4124 their reload regs. */
4125 emit_reload_insns (insn);
4127 /* Substitute the chosen reload regs from reload_reg_rtx
4128 into the insn's body (or perhaps into the bodies of other
4129 load and store insn that we just made for reloading
4130 and that we moved the structure into). */
4131 subst_reloads ();
4133 /* If this was an ASM, make sure that all the reload insns
4134 we have generated are valid. If not, give an error
4135 and delete them. */
4137 if (asm_noperands (PATTERN (insn)) >= 0)
4138 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4139 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
4140 && (recog_memoized (p) < 0
4141 || (insn_extract (p),
4142 ! constrain_operands (INSN_CODE (p), 1))))
4144 error_for_asm (insn,
4145 "`asm' operand requires impossible reload");
4146 PUT_CODE (p, NOTE);
4147 NOTE_SOURCE_FILE (p) = 0;
4148 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
4151 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4152 is no longer validly lying around to save a future reload.
4153 Note that this does not detect pseudos that were reloaded
4154 for this insn in order to be stored in
4155 (obeying register constraints). That is correct; such reload
4156 registers ARE still valid. */
4157 note_stores (oldpat, forget_old_reloads_1);
4159 /* There may have been CLOBBER insns placed after INSN. So scan
4160 between INSN and NEXT and use them to forget old reloads. */
4161 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
4162 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
4163 note_stores (PATTERN (x), forget_old_reloads_1);
4165 #ifdef AUTO_INC_DEC
4166 /* Likewise for regs altered by auto-increment in this insn.
4167 But note that the reg-notes are not changed by reloading:
4168 they still contain the pseudo-regs, not the spill regs. */
4169 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4170 if (REG_NOTE_KIND (x) == REG_INC)
4172 /* See if this pseudo reg was reloaded in this insn.
4173 If so, its last-reload info is still valid
4174 because it is based on this insn's reload. */
4175 for (i = 0; i < n_reloads; i++)
4176 if (reload_out[i] == XEXP (x, 0))
4177 break;
4179 if (i == n_reloads)
4180 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
4182 #endif
4184 /* A reload reg's contents are unknown after a label. */
4185 if (GET_CODE (insn) == CODE_LABEL)
4186 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4188 /* Don't assume a reload reg is still good after a call insn
4189 if it is a call-used reg. */
4190 else if (GET_CODE (insn) == CALL_INSN)
4191 AND_COMPL_HARD_REG_SET(reg_reloaded_valid, call_used_reg_set);
4193 /* In case registers overlap, allow certain insns to invalidate
4194 particular hard registers. */
4196 #ifdef INSN_CLOBBERS_REGNO_P
4197 for (i = 0 ; i < FIRST_PSEUDO_REGISTER; i++)
4198 if (TEST_HARD_REG_BIT (reg_reloaded_valid, i)
4199 && INSN_CLOBBERS_REGNO_P (insn, i))
4200 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i);
4201 #endif
4203 insn = next;
4205 #ifdef USE_C_ALLOCA
4206 alloca (0);
4207 #endif
4211 /* Discard all record of any value reloaded from X,
4212 or reloaded in X from someplace else;
4213 unless X is an output reload reg of the current insn.
4215 X may be a hard reg (the reload reg)
4216 or it may be a pseudo reg that was reloaded from. */
4218 static void
4219 forget_old_reloads_1 (x, ignored)
4220 rtx x;
4221 rtx ignored ATTRIBUTE_UNUSED;
4223 register int regno;
4224 int nr;
4225 int offset = 0;
4227 /* note_stores does give us subregs of hard regs. */
4228 while (GET_CODE (x) == SUBREG)
4230 offset += SUBREG_WORD (x);
4231 x = SUBREG_REG (x);
4234 if (GET_CODE (x) != REG)
4235 return;
4237 regno = REGNO (x) + offset;
4239 if (regno >= FIRST_PSEUDO_REGISTER)
4240 nr = 1;
4241 else
4243 int i;
4244 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4245 /* Storing into a spilled-reg invalidates its contents.
4246 This can happen if a block-local pseudo is allocated to that reg
4247 and it wasn't spilled because this block's total need is 0.
4248 Then some insn might have an optional reload and use this reg. */
4249 for (i = 0; i < nr; i++)
4250 /* But don't do this if the reg actually serves as an output
4251 reload reg in the current instruction. */
4252 if (n_reloads == 0
4253 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4254 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4257 /* Since value of X has changed,
4258 forget any value previously copied from it. */
4260 while (nr-- > 0)
4261 /* But don't forget a copy if this is the output reload
4262 that establishes the copy's validity. */
4263 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4264 reg_last_reload_reg[regno + nr] = 0;
4267 /* For each reload, the mode of the reload register. */
4268 static enum machine_mode reload_mode[MAX_RELOADS];
4270 /* For each reload, the largest number of registers it will require. */
4271 static int reload_nregs[MAX_RELOADS];
4273 /* Comparison function for qsort to decide which of two reloads
4274 should be handled first. *P1 and *P2 are the reload numbers. */
4276 static int
4277 reload_reg_class_lower (r1p, r2p)
4278 const GENERIC_PTR r1p;
4279 const GENERIC_PTR r2p;
4281 register int r1 = *(short *)r1p, r2 = *(short *)r2p;
4282 register int t;
4284 /* Consider required reloads before optional ones. */
4285 t = reload_optional[r1] - reload_optional[r2];
4286 if (t != 0)
4287 return t;
4289 /* Count all solitary classes before non-solitary ones. */
4290 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4291 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4292 if (t != 0)
4293 return t;
4295 /* Aside from solitaires, consider all multi-reg groups first. */
4296 t = reload_nregs[r2] - reload_nregs[r1];
4297 if (t != 0)
4298 return t;
4300 /* Consider reloads in order of increasing reg-class number. */
4301 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4302 if (t != 0)
4303 return t;
4305 /* If reloads are equally urgent, sort by reload number,
4306 so that the results of qsort leave nothing to chance. */
4307 return r1 - r2;
4310 /* The following HARD_REG_SETs indicate when each hard register is
4311 used for a reload of various parts of the current insn. */
4313 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4314 static HARD_REG_SET reload_reg_used;
4315 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4316 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4317 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4318 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4319 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4320 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4321 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4322 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4323 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4324 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4325 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4326 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4327 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4328 static HARD_REG_SET reload_reg_used_in_op_addr;
4329 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4330 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4331 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4332 static HARD_REG_SET reload_reg_used_in_insn;
4333 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4334 static HARD_REG_SET reload_reg_used_in_other_addr;
4336 /* If reg is in use as a reload reg for any sort of reload. */
4337 static HARD_REG_SET reload_reg_used_at_all;
4339 /* If reg is use as an inherited reload. We just mark the first register
4340 in the group. */
4341 static HARD_REG_SET reload_reg_used_for_inherit;
4343 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4344 TYPE. MODE is used to indicate how many consecutive regs are
4345 actually used. */
4347 static void
4348 mark_reload_reg_in_use (regno, opnum, type, mode)
4349 int regno;
4350 int opnum;
4351 enum reload_type type;
4352 enum machine_mode mode;
4354 int nregs = HARD_REGNO_NREGS (regno, mode);
4355 int i;
4357 for (i = regno; i < nregs + regno; i++)
4359 switch (type)
4361 case RELOAD_OTHER:
4362 SET_HARD_REG_BIT (reload_reg_used, i);
4363 break;
4365 case RELOAD_FOR_INPUT_ADDRESS:
4366 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4367 break;
4369 case RELOAD_FOR_INPADDR_ADDRESS:
4370 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4371 break;
4373 case RELOAD_FOR_OUTPUT_ADDRESS:
4374 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4375 break;
4377 case RELOAD_FOR_OUTADDR_ADDRESS:
4378 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4379 break;
4381 case RELOAD_FOR_OPERAND_ADDRESS:
4382 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4383 break;
4385 case RELOAD_FOR_OPADDR_ADDR:
4386 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4387 break;
4389 case RELOAD_FOR_OTHER_ADDRESS:
4390 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4391 break;
4393 case RELOAD_FOR_INPUT:
4394 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4395 break;
4397 case RELOAD_FOR_OUTPUT:
4398 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4399 break;
4401 case RELOAD_FOR_INSN:
4402 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4403 break;
4406 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4410 /* Similarly, but show REGNO is no longer in use for a reload. */
4412 static void
4413 clear_reload_reg_in_use (regno, opnum, type, mode)
4414 int regno;
4415 int opnum;
4416 enum reload_type type;
4417 enum machine_mode mode;
4419 int nregs = HARD_REGNO_NREGS (regno, mode);
4420 int i;
4422 for (i = regno; i < nregs + regno; i++)
4424 switch (type)
4426 case RELOAD_OTHER:
4427 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4428 break;
4430 case RELOAD_FOR_INPUT_ADDRESS:
4431 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4432 break;
4434 case RELOAD_FOR_INPADDR_ADDRESS:
4435 CLEAR_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4436 break;
4438 case RELOAD_FOR_OUTPUT_ADDRESS:
4439 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4440 break;
4442 case RELOAD_FOR_OUTADDR_ADDRESS:
4443 CLEAR_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4444 break;
4446 case RELOAD_FOR_OPERAND_ADDRESS:
4447 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4448 break;
4450 case RELOAD_FOR_OPADDR_ADDR:
4451 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4452 break;
4454 case RELOAD_FOR_OTHER_ADDRESS:
4455 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4456 break;
4458 case RELOAD_FOR_INPUT:
4459 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4460 break;
4462 case RELOAD_FOR_OUTPUT:
4463 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4464 break;
4466 case RELOAD_FOR_INSN:
4467 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4468 break;
4473 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4474 specified by OPNUM and TYPE. */
4476 static int
4477 reload_reg_free_p (regno, opnum, type)
4478 int regno;
4479 int opnum;
4480 enum reload_type type;
4482 int i;
4484 /* In use for a RELOAD_OTHER means it's not available for anything. */
4485 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
4486 return 0;
4488 switch (type)
4490 case RELOAD_OTHER:
4491 /* In use for anything means we can't use it for RELOAD_OTHER. */
4492 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4493 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4494 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4495 return 0;
4497 for (i = 0; i < reload_n_operands; i++)
4498 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4499 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4500 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4501 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4502 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4503 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4504 return 0;
4506 return 1;
4508 case RELOAD_FOR_INPUT:
4509 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4510 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4511 return 0;
4513 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4514 return 0;
4516 /* If it is used for some other input, can't use it. */
4517 for (i = 0; i < reload_n_operands; i++)
4518 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4519 return 0;
4521 /* If it is used in a later operand's address, can't use it. */
4522 for (i = opnum + 1; i < reload_n_operands; i++)
4523 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4524 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4525 return 0;
4527 return 1;
4529 case RELOAD_FOR_INPUT_ADDRESS:
4530 /* Can't use a register if it is used for an input address for this
4531 operand or used as an input in an earlier one. */
4532 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4533 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4534 return 0;
4536 for (i = 0; i < opnum; i++)
4537 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4538 return 0;
4540 return 1;
4542 case RELOAD_FOR_INPADDR_ADDRESS:
4543 /* Can't use a register if it is used for an input address
4544 for this operand or used as an input in an earlier
4545 one. */
4546 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4547 return 0;
4549 for (i = 0; i < opnum; i++)
4550 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4551 return 0;
4553 return 1;
4555 case RELOAD_FOR_OUTPUT_ADDRESS:
4556 /* Can't use a register if it is used for an output address for this
4557 operand or used as an output in this or a later operand. */
4558 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4559 return 0;
4561 for (i = opnum; i < reload_n_operands; i++)
4562 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4563 return 0;
4565 return 1;
4567 case RELOAD_FOR_OUTADDR_ADDRESS:
4568 /* Can't use a register if it is used for an output address
4569 for this operand or used as an output in this or a
4570 later operand. */
4571 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4572 return 0;
4574 for (i = opnum; i < reload_n_operands; i++)
4575 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4576 return 0;
4578 return 1;
4580 case RELOAD_FOR_OPERAND_ADDRESS:
4581 for (i = 0; i < reload_n_operands; i++)
4582 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4583 return 0;
4585 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4586 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4588 case RELOAD_FOR_OPADDR_ADDR:
4589 for (i = 0; i < reload_n_operands; i++)
4590 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4591 return 0;
4593 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4595 case RELOAD_FOR_OUTPUT:
4596 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4597 outputs, or an operand address for this or an earlier output. */
4598 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4599 return 0;
4601 for (i = 0; i < reload_n_operands; i++)
4602 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4603 return 0;
4605 for (i = 0; i <= opnum; i++)
4606 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4607 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4608 return 0;
4610 return 1;
4612 case RELOAD_FOR_INSN:
4613 for (i = 0; i < reload_n_operands; i++)
4614 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4615 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4616 return 0;
4618 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4619 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4621 case RELOAD_FOR_OTHER_ADDRESS:
4622 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4624 abort ();
4627 /* Return 1 if the value in reload reg REGNO, as used by a reload
4628 needed for the part of the insn specified by OPNUM and TYPE,
4629 is not in use for a reload in any prior part of the insn.
4631 We can assume that the reload reg was already tested for availability
4632 at the time it is needed, and we should not check this again,
4633 in case the reg has already been marked in use. */
4635 static int
4636 reload_reg_free_before_p (regno, opnum, type)
4637 int regno;
4638 int opnum;
4639 enum reload_type type;
4641 int i;
4643 switch (type)
4645 case RELOAD_FOR_OTHER_ADDRESS:
4646 /* These always come first. */
4647 return 1;
4649 case RELOAD_OTHER:
4650 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4652 /* If this use is for part of the insn,
4653 check the reg is not in use for any prior part. It is tempting
4654 to try to do this by falling through from objecs that occur
4655 later in the insn to ones that occur earlier, but that will not
4656 correctly take into account the fact that here we MUST ignore
4657 things that would prevent the register from being allocated in
4658 the first place, since we know that it was allocated. */
4660 case RELOAD_FOR_OUTPUT_ADDRESS:
4661 case RELOAD_FOR_OUTADDR_ADDRESS:
4662 /* Earlier reloads are for earlier outputs or their addresses,
4663 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4664 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4665 RELOAD_OTHER).. */
4666 for (i = 0; i < opnum; i++)
4667 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4668 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4669 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4670 return 0;
4672 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4673 return 0;
4675 for (i = 0; i < reload_n_operands; i++)
4676 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4677 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4678 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4679 return 0;
4681 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4682 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4683 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4685 case RELOAD_FOR_OUTPUT:
4686 /* This can't be used in the output address for this operand and
4687 anything that can't be used for it, except that we've already
4688 tested for RELOAD_FOR_INSN objects. */
4690 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno)
4691 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4692 return 0;
4694 for (i = 0; i < opnum; i++)
4695 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4696 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4697 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4698 return 0;
4700 for (i = 0; i < reload_n_operands; i++)
4701 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4702 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4703 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4704 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4705 return 0;
4707 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4709 case RELOAD_FOR_OPERAND_ADDRESS:
4710 /* Earlier reloads include RELOAD_FOR_OPADDR_ADDR reloads. */
4711 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4712 return 0;
4714 /* ... fall through ... */
4716 case RELOAD_FOR_OPADDR_ADDR:
4717 case RELOAD_FOR_INSN:
4718 /* These can't conflict with inputs, or each other, so all we have to
4719 test is input addresses and the addresses of OTHER items. */
4721 for (i = 0; i < reload_n_operands; i++)
4722 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4723 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4724 return 0;
4726 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4728 case RELOAD_FOR_INPUT:
4729 /* The only things earlier are the address for this and
4730 earlier inputs, other inputs (which we know we don't conflict
4731 with), and addresses of RELOAD_OTHER objects. */
4733 for (i = 0; i <= opnum; i++)
4734 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4735 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4736 return 0;
4738 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4740 case RELOAD_FOR_INPUT_ADDRESS:
4741 case RELOAD_FOR_INPADDR_ADDRESS:
4742 /* Similarly, all we have to check is for use in earlier inputs'
4743 addresses. */
4744 for (i = 0; i < opnum; i++)
4745 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4746 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4747 return 0;
4749 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4751 abort ();
4754 /* Return 1 if the value in reload reg REGNO, as used by a reload
4755 needed for the part of the insn specified by OPNUM and TYPE,
4756 is still available in REGNO at the end of the insn.
4758 We can assume that the reload reg was already tested for availability
4759 at the time it is needed, and we should not check this again,
4760 in case the reg has already been marked in use. */
4762 static int
4763 reload_reg_reaches_end_p (regno, opnum, type)
4764 int regno;
4765 int opnum;
4766 enum reload_type type;
4768 int i;
4770 switch (type)
4772 case RELOAD_OTHER:
4773 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4774 its value must reach the end. */
4775 return 1;
4777 /* If this use is for part of the insn,
4778 its value reaches if no subsequent part uses the same register.
4779 Just like the above function, don't try to do this with lots
4780 of fallthroughs. */
4782 case RELOAD_FOR_OTHER_ADDRESS:
4783 /* Here we check for everything else, since these don't conflict
4784 with anything else and everything comes later. */
4786 for (i = 0; i < reload_n_operands; i++)
4787 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4788 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4789 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4790 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4791 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4792 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4793 return 0;
4795 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4796 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4797 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4799 case RELOAD_FOR_INPUT_ADDRESS:
4800 case RELOAD_FOR_INPADDR_ADDRESS:
4801 /* Similar, except that we check only for this and subsequent inputs
4802 and the address of only subsequent inputs and we do not need
4803 to check for RELOAD_OTHER objects since they are known not to
4804 conflict. */
4806 for (i = opnum; i < reload_n_operands; i++)
4807 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4808 return 0;
4810 for (i = opnum + 1; i < reload_n_operands; i++)
4811 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4812 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4813 return 0;
4815 for (i = 0; i < reload_n_operands; i++)
4816 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4817 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4818 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4819 return 0;
4821 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4822 return 0;
4824 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4825 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4827 case RELOAD_FOR_INPUT:
4828 /* Similar to input address, except we start at the next operand for
4829 both input and input address and we do not check for
4830 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4831 would conflict. */
4833 for (i = opnum + 1; i < reload_n_operands; i++)
4834 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4835 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4836 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4837 return 0;
4839 /* ... fall through ... */
4841 case RELOAD_FOR_OPERAND_ADDRESS:
4842 /* Check outputs and their addresses. */
4844 for (i = 0; i < reload_n_operands; i++)
4845 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4846 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4847 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4848 return 0;
4850 return 1;
4852 case RELOAD_FOR_OPADDR_ADDR:
4853 for (i = 0; i < reload_n_operands; i++)
4854 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4855 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4856 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4857 return 0;
4859 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4860 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4862 case RELOAD_FOR_INSN:
4863 /* These conflict with other outputs with RELOAD_OTHER. So
4864 we need only check for output addresses. */
4866 opnum = -1;
4868 /* ... fall through ... */
4870 case RELOAD_FOR_OUTPUT:
4871 case RELOAD_FOR_OUTPUT_ADDRESS:
4872 case RELOAD_FOR_OUTADDR_ADDRESS:
4873 /* We already know these can't conflict with a later output. So the
4874 only thing to check are later output addresses. */
4875 for (i = opnum + 1; i < reload_n_operands; i++)
4876 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4877 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4878 return 0;
4880 return 1;
4883 abort ();
4886 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4887 Return 0 otherwise.
4889 This function uses the same algorithm as reload_reg_free_p above. */
4892 reloads_conflict (r1, r2)
4893 int r1, r2;
4895 enum reload_type r1_type = reload_when_needed[r1];
4896 enum reload_type r2_type = reload_when_needed[r2];
4897 int r1_opnum = reload_opnum[r1];
4898 int r2_opnum = reload_opnum[r2];
4900 /* RELOAD_OTHER conflicts with everything. */
4901 if (r2_type == RELOAD_OTHER)
4902 return 1;
4904 /* Otherwise, check conflicts differently for each type. */
4906 switch (r1_type)
4908 case RELOAD_FOR_INPUT:
4909 return (r2_type == RELOAD_FOR_INSN
4910 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4911 || r2_type == RELOAD_FOR_OPADDR_ADDR
4912 || r2_type == RELOAD_FOR_INPUT
4913 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
4914 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
4915 && r2_opnum > r1_opnum));
4917 case RELOAD_FOR_INPUT_ADDRESS:
4918 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4919 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4921 case RELOAD_FOR_INPADDR_ADDRESS:
4922 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
4923 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4925 case RELOAD_FOR_OUTPUT_ADDRESS:
4926 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4927 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4929 case RELOAD_FOR_OUTADDR_ADDRESS:
4930 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
4931 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4933 case RELOAD_FOR_OPERAND_ADDRESS:
4934 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4935 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4937 case RELOAD_FOR_OPADDR_ADDR:
4938 return (r2_type == RELOAD_FOR_INPUT
4939 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4941 case RELOAD_FOR_OUTPUT:
4942 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4943 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4944 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
4945 && r2_opnum >= r1_opnum));
4947 case RELOAD_FOR_INSN:
4948 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4949 || r2_type == RELOAD_FOR_INSN
4950 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4952 case RELOAD_FOR_OTHER_ADDRESS:
4953 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4955 case RELOAD_OTHER:
4956 return 1;
4958 default:
4959 abort ();
4963 /* Vector of reload-numbers showing the order in which the reloads should
4964 be processed. */
4965 short reload_order[MAX_RELOADS];
4967 /* Indexed by reload number, 1 if incoming value
4968 inherited from previous insns. */
4969 char reload_inherited[MAX_RELOADS];
4971 /* For an inherited reload, this is the insn the reload was inherited from,
4972 if we know it. Otherwise, this is 0. */
4973 rtx reload_inheritance_insn[MAX_RELOADS];
4975 /* If non-zero, this is a place to get the value of the reload,
4976 rather than using reload_in. */
4977 rtx reload_override_in[MAX_RELOADS];
4979 /* For each reload, the hard register number of the register used,
4980 or -1 if we did not need a register for this reload. */
4981 int reload_spill_index[MAX_RELOADS];
4983 /* Return 1 if the value in reload reg REGNO, as used by a reload
4984 needed for the part of the insn specified by OPNUM and TYPE,
4985 may be used to load VALUE into it.
4987 Other read-only reloads with the same value do not conflict
4988 unless OUT is non-zero and these other reloads have to live while
4989 output reloads live.
4991 RELOADNUM is the number of the reload we want to load this value for;
4992 a reload does not conflict with itself.
4994 The caller has to make sure that there is no conflict with the return
4995 register. */
4996 static int
4997 reload_reg_free_for_value_p (regno, opnum, type, value, out, reloadnum)
4998 int regno;
4999 int opnum;
5000 enum reload_type type;
5001 rtx value, out;
5002 int reloadnum;
5004 int time1;
5005 int i;
5007 /* We use some pseudo 'time' value to check if the lifetimes of the
5008 new register use would overlap with the one of a previous reload
5009 that is not read-only or uses a different value.
5010 The 'time' used doesn't have to be linear in any shape or form, just
5011 monotonic.
5012 Some reload types use different 'buckets' for each operand.
5013 So there are MAX_RECOG_OPERANDS different time values for each
5014 such reload type.
5015 We compute TIME1 as the time when the register for the prospective
5016 new reload ceases to be live, and TIME2 for each existing
5017 reload as the time when that the reload register of that reload
5018 becomes live.
5019 Where there is little to be gained by exact lifetime calculations,
5020 we just make conservative assumptions, i.e. a longer lifetime;
5021 this is done in the 'default:' cases. */
5022 switch (type)
5024 case RELOAD_FOR_OTHER_ADDRESS:
5025 time1 = 0;
5026 break;
5027 /* For each input, we might have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5028 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5029 respectively, to the time values for these, we get distinct time
5030 values. To get distinct time values for each operand, we have to
5031 multiply opnum by at least three. We round that up to four because
5032 multiply by four is often cheaper. */
5033 case RELOAD_FOR_INPADDR_ADDRESS:
5034 time1 = opnum * 4 + 1;
5035 break;
5036 case RELOAD_FOR_INPUT_ADDRESS:
5037 time1 = opnum * 4 + 2;
5038 break;
5039 case RELOAD_FOR_INPUT:
5040 /* All RELOAD_FOR_INPUT reloads remain live till just before the
5041 instruction is executed. */
5042 time1 = (MAX_RECOG_OPERANDS - 1) * 4 + 3;
5043 break;
5044 /* opnum * 4 + 3 < opnum * 4 + 4
5045 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5046 case RELOAD_FOR_OUTPUT_ADDRESS:
5047 time1 = MAX_RECOG_OPERANDS * 4 + opnum;
5048 break;
5049 default:
5050 time1 = MAX_RECOG_OPERANDS * 5;
5053 for (i = 0; i < n_reloads; i++)
5055 rtx reg = reload_reg_rtx[i];
5056 if (reg && GET_CODE (reg) == REG
5057 && ((unsigned) regno - true_regnum (reg)
5058 <= HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)) - (unsigned)1)
5059 && i != reloadnum)
5061 if (out
5062 && reload_when_needed[i] != RELOAD_FOR_INPUT
5063 && reload_when_needed[i] != RELOAD_FOR_INPUT_ADDRESS
5064 && reload_when_needed[i] != RELOAD_FOR_INPADDR_ADDRESS)
5065 return 0;
5066 if (! reload_in[i] || ! rtx_equal_p (reload_in[i], value)
5067 || reload_out[i])
5069 int time2;
5070 switch (reload_when_needed[i])
5072 case RELOAD_FOR_OTHER_ADDRESS:
5073 time2 = 0;
5074 break;
5075 case RELOAD_FOR_INPADDR_ADDRESS:
5076 time2 = reload_opnum[i] * 4 + 1;
5077 break;
5078 case RELOAD_FOR_INPUT_ADDRESS:
5079 time2 = reload_opnum[i] * 4 + 2;
5080 break;
5081 case RELOAD_FOR_INPUT:
5082 time2 = reload_opnum[i] * 4 + 3;
5083 break;
5084 case RELOAD_FOR_OUTPUT:
5085 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5086 instruction is executed. */
5087 time2 = MAX_RECOG_OPERANDS * 4;
5088 break;
5089 /* The first RELOAD_FOR_OUTPUT_ADDRESS reload conflicts with the
5090 RELOAD_FOR_OUTPUT reloads, so assign it the same time value. */
5091 case RELOAD_FOR_OUTPUT_ADDRESS:
5092 time2 = MAX_RECOG_OPERANDS * 4 + reload_opnum[i];
5093 break;
5094 case RELOAD_OTHER:
5095 if (! reload_in[i] || rtx_equal_p (reload_in[i], value))
5097 time2 = MAX_RECOG_OPERANDS * 4;
5098 break;
5100 default:
5101 time2 = 0;
5103 if (time1 >= time2)
5104 return 0;
5108 return 1;
5111 /* Find a spill register to use as a reload register for reload R.
5112 LAST_RELOAD is non-zero if this is the last reload for the insn being
5113 processed.
5115 Set reload_reg_rtx[R] to the register allocated.
5117 If NOERROR is nonzero, we return 1 if successful,
5118 or 0 if we couldn't find a spill reg and we didn't change anything. */
5120 static int
5121 allocate_reload_reg (r, insn, last_reload, noerror)
5122 int r;
5123 rtx insn;
5124 int last_reload;
5125 int noerror;
5127 int i;
5128 int pass;
5129 int count;
5130 rtx new;
5131 int regno;
5133 /* If we put this reload ahead, thinking it is a group,
5134 then insist on finding a group. Otherwise we can grab a
5135 reg that some other reload needs.
5136 (That can happen when we have a 68000 DATA_OR_FP_REG
5137 which is a group of data regs or one fp reg.)
5138 We need not be so restrictive if there are no more reloads
5139 for this insn.
5141 ??? Really it would be nicer to have smarter handling
5142 for that kind of reg class, where a problem like this is normal.
5143 Perhaps those classes should be avoided for reloading
5144 by use of more alternatives. */
5146 int force_group = reload_nregs[r] > 1 && ! last_reload;
5148 /* If we want a single register and haven't yet found one,
5149 take any reg in the right class and not in use.
5150 If we want a consecutive group, here is where we look for it.
5152 We use two passes so we can first look for reload regs to
5153 reuse, which are already in use for other reloads in this insn,
5154 and only then use additional registers.
5155 I think that maximizing reuse is needed to make sure we don't
5156 run out of reload regs. Suppose we have three reloads, and
5157 reloads A and B can share regs. These need two regs.
5158 Suppose A and B are given different regs.
5159 That leaves none for C. */
5160 for (pass = 0; pass < 2; pass++)
5162 /* I is the index in spill_regs.
5163 We advance it round-robin between insns to use all spill regs
5164 equally, so that inherited reloads have a chance
5165 of leapfrogging each other. Don't do this, however, when we have
5166 group needs and failure would be fatal; if we only have a relatively
5167 small number of spill registers, and more than one of them has
5168 group needs, then by starting in the middle, we may end up
5169 allocating the first one in such a way that we are not left with
5170 sufficient groups to handle the rest. */
5172 if (noerror || ! force_group)
5173 i = last_spill_reg;
5174 else
5175 i = -1;
5177 for (count = 0; count < n_spills; count++)
5179 int class = (int) reload_reg_class[r];
5181 i = (i + 1) % n_spills;
5183 if ((reload_reg_free_p (spill_regs[i], reload_opnum[r],
5184 reload_when_needed[r])
5185 || (reload_in[r]
5186 /* We check reload_reg_used to make sure we
5187 don't clobber the return register. */
5188 && ! TEST_HARD_REG_BIT (reload_reg_used, spill_regs[i])
5189 && reload_reg_free_for_value_p (spill_regs[i],
5190 reload_opnum[r],
5191 reload_when_needed[r],
5192 reload_in[r],
5193 reload_out[r], r)))
5194 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
5195 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5196 /* Look first for regs to share, then for unshared. But
5197 don't share regs used for inherited reloads; they are
5198 the ones we want to preserve. */
5199 && (pass
5200 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5201 spill_regs[i])
5202 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5203 spill_regs[i]))))
5205 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5206 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5207 (on 68000) got us two FP regs. If NR is 1,
5208 we would reject both of them. */
5209 if (force_group)
5210 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
5211 /* If we need only one reg, we have already won. */
5212 if (nr == 1)
5214 /* But reject a single reg if we demand a group. */
5215 if (force_group)
5216 continue;
5217 break;
5219 /* Otherwise check that as many consecutive regs as we need
5220 are available here.
5221 Also, don't use for a group registers that are
5222 needed for nongroups. */
5223 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
5224 while (nr > 1)
5226 regno = spill_regs[i] + nr - 1;
5227 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5228 && spill_reg_order[regno] >= 0
5229 && reload_reg_free_p (regno, reload_opnum[r],
5230 reload_when_needed[r])
5231 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
5232 regno)))
5233 break;
5234 nr--;
5236 if (nr == 1)
5237 break;
5241 /* If we found something on pass 1, omit pass 2. */
5242 if (count < n_spills)
5243 break;
5246 /* We should have found a spill register by now. */
5247 if (count == n_spills)
5249 if (noerror)
5250 return 0;
5251 goto failure;
5254 /* I is the index in SPILL_REG_RTX of the reload register we are to
5255 allocate. Get an rtx for it and find its register number. */
5257 new = spill_reg_rtx[i];
5259 if (new == 0 || GET_MODE (new) != reload_mode[r])
5260 spill_reg_rtx[i] = new
5261 = gen_rtx_REG (reload_mode[r], spill_regs[i]);
5263 regno = true_regnum (new);
5265 /* Detect when the reload reg can't hold the reload mode.
5266 This used to be one `if', but Sequent compiler can't handle that. */
5267 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5269 enum machine_mode test_mode = VOIDmode;
5270 if (reload_in[r])
5271 test_mode = GET_MODE (reload_in[r]);
5272 /* If reload_in[r] has VOIDmode, it means we will load it
5273 in whatever mode the reload reg has: to wit, reload_mode[r].
5274 We have already tested that for validity. */
5275 /* Aside from that, we need to test that the expressions
5276 to reload from or into have modes which are valid for this
5277 reload register. Otherwise the reload insns would be invalid. */
5278 if (! (reload_in[r] != 0 && test_mode != VOIDmode
5279 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5280 if (! (reload_out[r] != 0
5281 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
5283 /* The reg is OK. */
5284 last_spill_reg = i;
5286 /* Mark as in use for this insn the reload regs we use
5287 for this. */
5288 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
5289 reload_when_needed[r], reload_mode[r]);
5291 reload_reg_rtx[r] = new;
5292 reload_spill_index[r] = spill_regs[i];
5293 return 1;
5297 /* The reg is not OK. */
5298 if (noerror)
5299 return 0;
5301 failure:
5302 if (asm_noperands (PATTERN (insn)) < 0)
5303 /* It's the compiler's fault. */
5304 fatal_insn ("Could not find a spill register", insn);
5306 /* It's the user's fault; the operand's mode and constraint
5307 don't match. Disable this reload so we don't crash in final. */
5308 error_for_asm (insn,
5309 "`asm' operand constraint incompatible with operand size");
5310 reload_in[r] = 0;
5311 reload_out[r] = 0;
5312 reload_reg_rtx[r] = 0;
5313 reload_optional[r] = 1;
5314 reload_secondary_p[r] = 1;
5316 return 1;
5319 /* Assign hard reg targets for the pseudo-registers we must reload
5320 into hard regs for this insn.
5321 Also output the instructions to copy them in and out of the hard regs.
5323 For machines with register classes, we are responsible for
5324 finding a reload reg in the proper class. */
5326 static void
5327 choose_reload_regs (insn, avoid_return_reg)
5328 rtx insn;
5329 rtx avoid_return_reg;
5331 register int i, j;
5332 int max_group_size = 1;
5333 enum reg_class group_class = NO_REGS;
5334 int inheritance;
5336 rtx save_reload_reg_rtx[MAX_RELOADS];
5337 char save_reload_inherited[MAX_RELOADS];
5338 rtx save_reload_inheritance_insn[MAX_RELOADS];
5339 rtx save_reload_override_in[MAX_RELOADS];
5340 int save_reload_spill_index[MAX_RELOADS];
5341 HARD_REG_SET save_reload_reg_used;
5342 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
5343 HARD_REG_SET save_reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
5344 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5345 HARD_REG_SET save_reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5346 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5347 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5348 HARD_REG_SET save_reload_reg_used_in_op_addr;
5349 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
5350 HARD_REG_SET save_reload_reg_used_in_insn;
5351 HARD_REG_SET save_reload_reg_used_in_other_addr;
5352 HARD_REG_SET save_reload_reg_used_at_all;
5354 bzero (reload_inherited, MAX_RELOADS);
5355 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
5356 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
5358 CLEAR_HARD_REG_SET (reload_reg_used);
5359 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5360 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5361 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5362 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5363 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5365 for (i = 0; i < reload_n_operands; i++)
5367 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5368 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5369 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5370 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5371 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5372 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5375 /* Don't bother with avoiding the return reg
5376 if we have no mandatory reload that could use it. */
5377 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5379 int do_avoid = 0;
5380 int regno = REGNO (avoid_return_reg);
5381 int nregs
5382 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5383 int r;
5385 for (r = regno; r < regno + nregs; r++)
5386 if (spill_reg_order[r] >= 0)
5387 for (j = 0; j < n_reloads; j++)
5388 if (!reload_optional[j] && reload_reg_rtx[j] == 0
5389 && (reload_in[j] != 0 || reload_out[j] != 0
5390 || reload_secondary_p[j])
5392 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
5393 do_avoid = 1;
5394 if (!do_avoid)
5395 avoid_return_reg = 0;
5398 #if 0 /* Not needed, now that we can always retry without inheritance. */
5399 /* See if we have more mandatory reloads than spill regs.
5400 If so, then we cannot risk optimizations that could prevent
5401 reloads from sharing one spill register.
5403 Since we will try finding a better register than reload_reg_rtx
5404 unless it is equal to reload_in or reload_out, count such reloads. */
5407 int tem = SMALL_REGISTER_CLASSES? (avoid_return_reg != 0): 0;
5408 for (j = 0; j < n_reloads; j++)
5409 if (! reload_optional[j]
5410 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
5411 && (reload_reg_rtx[j] == 0
5412 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
5413 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
5414 tem++;
5415 if (tem > n_spills)
5416 must_reuse = 1;
5418 #endif
5420 /* Don't use the subroutine call return reg for a reload
5421 if we are supposed to avoid it. */
5422 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5424 int regno = REGNO (avoid_return_reg);
5425 int nregs
5426 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5427 int r;
5429 for (r = regno; r < regno + nregs; r++)
5430 if (spill_reg_order[r] >= 0)
5431 SET_HARD_REG_BIT (reload_reg_used, r);
5434 /* In order to be certain of getting the registers we need,
5435 we must sort the reloads into order of increasing register class.
5436 Then our grabbing of reload registers will parallel the process
5437 that provided the reload registers.
5439 Also note whether any of the reloads wants a consecutive group of regs.
5440 If so, record the maximum size of the group desired and what
5441 register class contains all the groups needed by this insn. */
5443 for (j = 0; j < n_reloads; j++)
5445 reload_order[j] = j;
5446 reload_spill_index[j] = -1;
5448 reload_mode[j]
5449 = (reload_inmode[j] == VOIDmode
5450 || (GET_MODE_SIZE (reload_outmode[j])
5451 > GET_MODE_SIZE (reload_inmode[j])))
5452 ? reload_outmode[j] : reload_inmode[j];
5454 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5456 if (reload_nregs[j] > 1)
5458 max_group_size = MAX (reload_nregs[j], max_group_size);
5459 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5462 /* If we have already decided to use a certain register,
5463 don't use it in another way. */
5464 if (reload_reg_rtx[j])
5465 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5466 reload_when_needed[j], reload_mode[j]);
5469 if (n_reloads > 1)
5470 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5472 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5473 sizeof reload_reg_rtx);
5474 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5475 bcopy ((char *) reload_inheritance_insn,
5476 (char *) save_reload_inheritance_insn,
5477 sizeof reload_inheritance_insn);
5478 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5479 sizeof reload_override_in);
5480 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5481 sizeof reload_spill_index);
5482 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5483 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5484 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5485 reload_reg_used_in_op_addr);
5487 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5488 reload_reg_used_in_op_addr_reload);
5490 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5491 reload_reg_used_in_insn);
5492 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5493 reload_reg_used_in_other_addr);
5495 for (i = 0; i < reload_n_operands; i++)
5497 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5498 reload_reg_used_in_output[i]);
5499 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5500 reload_reg_used_in_input[i]);
5501 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5502 reload_reg_used_in_input_addr[i]);
5503 COPY_HARD_REG_SET (save_reload_reg_used_in_inpaddr_addr[i],
5504 reload_reg_used_in_inpaddr_addr[i]);
5505 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5506 reload_reg_used_in_output_addr[i]);
5507 COPY_HARD_REG_SET (save_reload_reg_used_in_outaddr_addr[i],
5508 reload_reg_used_in_outaddr_addr[i]);
5511 /* If -O, try first with inheritance, then turning it off.
5512 If not -O, don't do inheritance.
5513 Using inheritance when not optimizing leads to paradoxes
5514 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5515 because one side of the comparison might be inherited. */
5517 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5519 /* Process the reloads in order of preference just found.
5520 Beyond this point, subregs can be found in reload_reg_rtx.
5522 This used to look for an existing reloaded home for all
5523 of the reloads, and only then perform any new reloads.
5524 But that could lose if the reloads were done out of reg-class order
5525 because a later reload with a looser constraint might have an old
5526 home in a register needed by an earlier reload with a tighter constraint.
5528 To solve this, we make two passes over the reloads, in the order
5529 described above. In the first pass we try to inherit a reload
5530 from a previous insn. If there is a later reload that needs a
5531 class that is a proper subset of the class being processed, we must
5532 also allocate a spill register during the first pass.
5534 Then make a second pass over the reloads to allocate any reloads
5535 that haven't been given registers yet. */
5537 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5539 for (j = 0; j < n_reloads; j++)
5541 register int r = reload_order[j];
5543 /* Ignore reloads that got marked inoperative. */
5544 if (reload_out[r] == 0 && reload_in[r] == 0
5545 && ! reload_secondary_p[r])
5546 continue;
5548 /* If find_reloads chose a to use reload_in or reload_out as a reload
5549 register, we don't need to chose one. Otherwise, try even if it
5550 found one since we might save an insn if we find the value lying
5551 around. */
5552 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5553 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5554 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5555 continue;
5557 #if 0 /* No longer needed for correct operation.
5558 It might give better code, or might not; worth an experiment? */
5559 /* If this is an optional reload, we can't inherit from earlier insns
5560 until we are sure that any non-optional reloads have been allocated.
5561 The following code takes advantage of the fact that optional reloads
5562 are at the end of reload_order. */
5563 if (reload_optional[r] != 0)
5564 for (i = 0; i < j; i++)
5565 if ((reload_out[reload_order[i]] != 0
5566 || reload_in[reload_order[i]] != 0
5567 || reload_secondary_p[reload_order[i]])
5568 && ! reload_optional[reload_order[i]]
5569 && reload_reg_rtx[reload_order[i]] == 0)
5570 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5571 #endif
5573 /* First see if this pseudo is already available as reloaded
5574 for a previous insn. We cannot try to inherit for reloads
5575 that are smaller than the maximum number of registers needed
5576 for groups unless the register we would allocate cannot be used
5577 for the groups.
5579 We could check here to see if this is a secondary reload for
5580 an object that is already in a register of the desired class.
5581 This would avoid the need for the secondary reload register.
5582 But this is complex because we can't easily determine what
5583 objects might want to be loaded via this reload. So let a
5584 register be allocated here. In `emit_reload_insns' we suppress
5585 one of the loads in the case described above. */
5587 if (inheritance)
5589 register int regno = -1;
5590 enum machine_mode mode;
5592 if (reload_in[r] == 0)
5594 else if (GET_CODE (reload_in[r]) == REG)
5596 regno = REGNO (reload_in[r]);
5597 mode = GET_MODE (reload_in[r]);
5599 else if (GET_CODE (reload_in_reg[r]) == REG)
5601 regno = REGNO (reload_in_reg[r]);
5602 mode = GET_MODE (reload_in_reg[r]);
5604 else if (GET_CODE (reload_in[r]) == MEM)
5606 rtx prev = prev_nonnote_insn (insn), note;
5608 if (prev && GET_CODE (prev) == INSN
5609 && GET_CODE (PATTERN (prev)) == USE
5610 && GET_CODE (XEXP (PATTERN (prev), 0)) == REG
5611 && (REGNO (XEXP (PATTERN (prev), 0))
5612 >= FIRST_PSEUDO_REGISTER)
5613 && (note = find_reg_note (prev, REG_EQUAL, NULL_RTX))
5614 && GET_CODE (XEXP (note, 0)) == MEM)
5616 rtx addr = XEXP (XEXP (note, 0), 0);
5617 int size_diff
5618 = (GET_MODE_SIZE (GET_MODE (addr))
5619 - GET_MODE_SIZE (GET_MODE (reload_in[r])));
5620 if (size_diff >= 0
5621 && rtx_equal_p ((BYTES_BIG_ENDIAN
5622 ? plus_constant (addr, size_diff)
5623 : addr),
5624 XEXP (reload_in[r], 0)))
5626 regno = REGNO (XEXP (PATTERN (prev), 0));
5627 mode = GET_MODE (reload_in[r]);
5631 #if 0
5632 /* This won't work, since REGNO can be a pseudo reg number.
5633 Also, it takes much more hair to keep track of all the things
5634 that can invalidate an inherited reload of part of a pseudoreg. */
5635 else if (GET_CODE (reload_in[r]) == SUBREG
5636 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5637 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5638 #endif
5640 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5642 i = REGNO (reg_last_reload_reg[regno]);
5644 if (reg_reloaded_contents[i] == regno
5645 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
5646 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5647 >= GET_MODE_SIZE (mode))
5648 && HARD_REGNO_MODE_OK (i, reload_mode[r])
5649 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5651 && (reload_nregs[r] == max_group_size
5652 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5654 && ((reload_reg_free_p (i, reload_opnum[r],
5655 reload_when_needed[r])
5656 && reload_reg_free_before_p (i, reload_opnum[r],
5657 reload_when_needed[r]))
5658 || reload_reg_free_for_value_p (i, reload_opnum[r],
5659 reload_when_needed[r],
5660 reload_in[r],
5661 reload_out[r], r)))
5663 /* If a group is needed, verify that all the subsequent
5664 registers still have their values intact. */
5665 int nr
5666 = HARD_REGNO_NREGS (i, reload_mode[r]);
5667 int k;
5669 for (k = 1; k < nr; k++)
5670 if (reg_reloaded_contents[i + k] != regno
5671 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
5672 break;
5674 if (k == nr)
5676 int i1;
5678 /* We found a register that contains the
5679 value we need. If this register is the
5680 same as an `earlyclobber' operand of the
5681 current insn, just mark it as a place to
5682 reload from since we can't use it as the
5683 reload register itself. */
5685 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5686 if (reg_overlap_mentioned_for_reload_p
5687 (reg_last_reload_reg[regno],
5688 reload_earlyclobbers[i1]))
5689 break;
5691 if (i1 != n_earlyclobbers
5692 /* Don't use it if we'd clobber a pseudo reg. */
5693 || (spill_reg_order[i] < 0
5694 && reload_out[r]
5695 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
5696 /* Don't really use the inherited spill reg
5697 if we need it wider than we've got it. */
5698 || (GET_MODE_SIZE (reload_mode[r])
5699 > GET_MODE_SIZE (mode)))
5700 reload_override_in[r] = reg_last_reload_reg[regno];
5701 else
5703 int k;
5704 /* We can use this as a reload reg. */
5705 /* Mark the register as in use for this part of
5706 the insn. */
5707 mark_reload_reg_in_use (i,
5708 reload_opnum[r],
5709 reload_when_needed[r],
5710 reload_mode[r]);
5711 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5712 reload_inherited[r] = 1;
5713 reload_inheritance_insn[r]
5714 = reg_reloaded_insn[i];
5715 reload_spill_index[r] = i;
5716 for (k = 0; k < nr; k++)
5717 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5718 i + k);
5725 /* Here's another way to see if the value is already lying around. */
5726 if (inheritance
5727 && reload_in[r] != 0
5728 && ! reload_inherited[r]
5729 && reload_out[r] == 0
5730 && (CONSTANT_P (reload_in[r])
5731 || GET_CODE (reload_in[r]) == PLUS
5732 || GET_CODE (reload_in[r]) == REG
5733 || GET_CODE (reload_in[r]) == MEM)
5734 && (reload_nregs[r] == max_group_size
5735 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5737 register rtx equiv
5738 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5739 -1, NULL_PTR, 0, reload_mode[r]);
5740 int regno;
5742 if (equiv != 0)
5744 if (GET_CODE (equiv) == REG)
5745 regno = REGNO (equiv);
5746 else if (GET_CODE (equiv) == SUBREG)
5748 /* This must be a SUBREG of a hard register.
5749 Make a new REG since this might be used in an
5750 address and not all machines support SUBREGs
5751 there. */
5752 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5753 equiv = gen_rtx_REG (reload_mode[r], regno);
5755 else
5756 abort ();
5759 /* If we found a spill reg, reject it unless it is free
5760 and of the desired class. */
5761 if (equiv != 0
5762 && ((spill_reg_order[regno] >= 0
5763 && ! (reload_reg_free_before_p (regno, reload_opnum[r],
5764 reload_when_needed[r])
5765 || reload_reg_free_for_value_p (regno,
5766 reload_opnum[r],
5767 reload_when_needed[r],
5768 reload_in[r],
5769 reload_out[r], r)))
5770 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5771 regno)))
5772 equiv = 0;
5774 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5775 equiv = 0;
5777 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5778 equiv = 0;
5780 /* We found a register that contains the value we need.
5781 If this register is the same as an `earlyclobber' operand
5782 of the current insn, just mark it as a place to reload from
5783 since we can't use it as the reload register itself. */
5785 if (equiv != 0)
5786 for (i = 0; i < n_earlyclobbers; i++)
5787 if (reg_overlap_mentioned_for_reload_p (equiv,
5788 reload_earlyclobbers[i]))
5790 reload_override_in[r] = equiv;
5791 equiv = 0;
5792 break;
5795 /* JRV: If the equiv register we have found is
5796 explicitly clobbered in the current insn, mark but
5797 don't use, as above. */
5799 if (equiv != 0 && regno_clobbered_p (regno, insn))
5801 reload_override_in[r] = equiv;
5802 equiv = 0;
5805 /* If we found an equivalent reg, say no code need be generated
5806 to load it, and use it as our reload reg. */
5807 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5809 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5810 int k;
5811 reload_reg_rtx[r] = equiv;
5812 reload_inherited[r] = 1;
5814 /* If any of the hard registers in EQUIV are spill
5815 registers, mark them as in use for this insn. */
5816 for (k = 0; k < nr; k++)
5818 i = spill_reg_order[regno + k];
5819 if (i >= 0)
5821 mark_reload_reg_in_use (regno, reload_opnum[r],
5822 reload_when_needed[r],
5823 reload_mode[r]);
5824 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5825 regno + k);
5831 /* If we found a register to use already, or if this is an optional
5832 reload, we are done. */
5833 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5834 continue;
5836 #if 0 /* No longer needed for correct operation. Might or might not
5837 give better code on the average. Want to experiment? */
5839 /* See if there is a later reload that has a class different from our
5840 class that intersects our class or that requires less register
5841 than our reload. If so, we must allocate a register to this
5842 reload now, since that reload might inherit a previous reload
5843 and take the only available register in our class. Don't do this
5844 for optional reloads since they will force all previous reloads
5845 to be allocated. Also don't do this for reloads that have been
5846 turned off. */
5848 for (i = j + 1; i < n_reloads; i++)
5850 int s = reload_order[i];
5852 if ((reload_in[s] == 0 && reload_out[s] == 0
5853 && ! reload_secondary_p[s])
5854 || reload_optional[s])
5855 continue;
5857 if ((reload_reg_class[s] != reload_reg_class[r]
5858 && reg_classes_intersect_p (reload_reg_class[r],
5859 reload_reg_class[s]))
5860 || reload_nregs[s] < reload_nregs[r])
5861 break;
5864 if (i == n_reloads)
5865 continue;
5867 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5868 #endif
5871 /* Now allocate reload registers for anything non-optional that
5872 didn't get one yet. */
5873 for (j = 0; j < n_reloads; j++)
5875 register int r = reload_order[j];
5877 /* Ignore reloads that got marked inoperative. */
5878 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5879 continue;
5881 /* Skip reloads that already have a register allocated or are
5882 optional. */
5883 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5884 continue;
5886 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5887 break;
5890 /* If that loop got all the way, we have won. */
5891 if (j == n_reloads)
5892 break;
5894 fail:
5895 /* Loop around and try without any inheritance. */
5896 /* First undo everything done by the failed attempt
5897 to allocate with inheritance. */
5898 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5899 sizeof reload_reg_rtx);
5900 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5901 sizeof reload_inherited);
5902 bcopy ((char *) save_reload_inheritance_insn,
5903 (char *) reload_inheritance_insn,
5904 sizeof reload_inheritance_insn);
5905 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5906 sizeof reload_override_in);
5907 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5908 sizeof reload_spill_index);
5909 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5910 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5911 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5912 save_reload_reg_used_in_op_addr);
5913 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5914 save_reload_reg_used_in_op_addr_reload);
5915 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5916 save_reload_reg_used_in_insn);
5917 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5918 save_reload_reg_used_in_other_addr);
5920 for (i = 0; i < reload_n_operands; i++)
5922 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5923 save_reload_reg_used_in_input[i]);
5924 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5925 save_reload_reg_used_in_output[i]);
5926 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5927 save_reload_reg_used_in_input_addr[i]);
5928 COPY_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i],
5929 save_reload_reg_used_in_inpaddr_addr[i]);
5930 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5931 save_reload_reg_used_in_output_addr[i]);
5932 COPY_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i],
5933 save_reload_reg_used_in_outaddr_addr[i]);
5937 /* If we thought we could inherit a reload, because it seemed that
5938 nothing else wanted the same reload register earlier in the insn,
5939 verify that assumption, now that all reloads have been assigned. */
5941 for (j = 0; j < n_reloads; j++)
5943 register int r = reload_order[j];
5945 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5946 && ! (reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5947 reload_opnum[r],
5948 reload_when_needed[r])
5949 || reload_reg_free_for_value_p (true_regnum (reload_reg_rtx[r]),
5950 reload_opnum[r],
5951 reload_when_needed[r],
5952 reload_in[r],
5953 reload_out[r], r)))
5954 reload_inherited[r] = 0;
5955 /* If we can inherit a RELOAD_FOR_INPUT, then we do not need its related
5956 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads.
5957 ??? This could be extended to other reload types, but these are
5958 more tricky to handle:
5959 RELOAD_FOR_OTHER_ADDRESS reloads might have been merged, so we
5960 can't eliminate them without a check that *all* references are
5961 now unused due to inheritance.
5962 While RELOAD_FOR_INPADDR_ADDRESS and RELOAD_FOR_OUTADDR_ADDRESS are
5963 not merged, we can't be sure that we have eliminated the use of
5964 that particular reload if we have seen just one
5965 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_OUTPUT_ADDRESS being inherited,
5966 since there might be multiple of the latter two reloads for a single
5967 operand.
5968 RELOAD_FOR_OPADDR_ADDR reloads for different operands are not
5969 merged, but might share the same register by courtesy of
5970 reload_reg_free_for_value_p. reload_reg_used_in_op_addr_reload
5971 does not differentiate by opnum, thus calling clear_reload_reg_in_use
5972 for one of these reloads would mark the register as free even though
5973 another RELOAD_FOR_OPADDR_ADDR reload might still use it. */
5974 else if (reload_inherited[r] && reload_when_needed[r] == RELOAD_FOR_INPUT)
5976 for (i = 0; i < n_reloads; i++)
5978 if ((reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5979 || reload_when_needed[i] == RELOAD_FOR_INPADDR_ADDRESS)
5980 && reload_opnum[i] == reload_opnum[r]
5981 && reload_in[i] && reload_reg_rtx[i])
5983 int regno = true_regnum (reload_reg_rtx[i]);
5985 reload_in[i] = 0;
5986 if (spill_reg_order[regno] >= 0)
5987 clear_reload_reg_in_use (regno, reload_opnum[i],
5988 reload_when_needed[i],
5989 reload_mode[i]);
5990 reload_reg_rtx[i] = 0;
5991 reload_spill_index[i] = -1;
5992 remove_replacements (i);
5997 /* If we found a better place to reload from,
5998 validate it in the same fashion, if it is a reload reg. */
5999 if (reload_override_in[r]
6000 && (GET_CODE (reload_override_in[r]) == REG
6001 || GET_CODE (reload_override_in[r]) == SUBREG))
6003 int regno = true_regnum (reload_override_in[r]);
6004 if (spill_reg_order[regno] >= 0
6005 && ! reload_reg_free_before_p (regno, reload_opnum[r],
6006 reload_when_needed[r]))
6007 reload_override_in[r] = 0;
6011 /* Now that reload_override_in is known valid,
6012 actually override reload_in. */
6013 for (j = 0; j < n_reloads; j++)
6014 if (reload_override_in[j])
6015 reload_in[j] = reload_override_in[j];
6017 /* If this reload won't be done because it has been cancelled or is
6018 optional and not inherited, clear reload_reg_rtx so other
6019 routines (such as subst_reloads) don't get confused. */
6020 for (j = 0; j < n_reloads; j++)
6021 if (reload_reg_rtx[j] != 0
6022 && ((reload_optional[j] && ! reload_inherited[j])
6023 || (reload_in[j] == 0 && reload_out[j] == 0
6024 && ! reload_secondary_p[j])))
6026 int regno = true_regnum (reload_reg_rtx[j]);
6028 if (spill_reg_order[regno] >= 0)
6029 clear_reload_reg_in_use (regno, reload_opnum[j],
6030 reload_when_needed[j], reload_mode[j]);
6031 reload_reg_rtx[j] = 0;
6034 /* Record which pseudos and which spill regs have output reloads. */
6035 for (j = 0; j < n_reloads; j++)
6037 register int r = reload_order[j];
6039 i = reload_spill_index[r];
6041 /* I is nonneg if this reload uses a register.
6042 If reload_reg_rtx[r] is 0, this is an optional reload
6043 that we opted to ignore. */
6044 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
6045 && reload_reg_rtx[r] != 0)
6047 register int nregno = REGNO (reload_out[r]);
6048 int nr = 1;
6050 if (nregno < FIRST_PSEUDO_REGISTER)
6051 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
6053 while (--nr >= 0)
6054 reg_has_output_reload[nregno + nr] = 1;
6056 if (i >= 0)
6058 nr = HARD_REGNO_NREGS (i, reload_mode[r]);
6059 while (--nr >= 0)
6060 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
6063 if (reload_when_needed[r] != RELOAD_OTHER
6064 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
6065 && reload_when_needed[r] != RELOAD_FOR_INSN)
6066 abort ();
6071 /* If SMALL_REGISTER_CLASSES is non-zero, we may not have merged two
6072 reloads of the same item for fear that we might not have enough reload
6073 registers. However, normally they will get the same reload register
6074 and hence actually need not be loaded twice.
6076 Here we check for the most common case of this phenomenon: when we have
6077 a number of reloads for the same object, each of which were allocated
6078 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6079 reload, and is not modified in the insn itself. If we find such,
6080 merge all the reloads and set the resulting reload to RELOAD_OTHER.
6081 This will not increase the number of spill registers needed and will
6082 prevent redundant code. */
6084 static void
6085 merge_assigned_reloads (insn)
6086 rtx insn;
6088 int i, j;
6090 /* Scan all the reloads looking for ones that only load values and
6091 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6092 assigned and not modified by INSN. */
6094 for (i = 0; i < n_reloads; i++)
6096 int conflicting_input = 0;
6097 int max_input_address_opnum = -1;
6098 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6100 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
6101 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
6102 || reg_set_p (reload_reg_rtx[i], insn))
6103 continue;
6105 /* Look at all other reloads. Ensure that the only use of this
6106 reload_reg_rtx is in a reload that just loads the same value
6107 as we do. Note that any secondary reloads must be of the identical
6108 class since the values, modes, and result registers are the
6109 same, so we need not do anything with any secondary reloads. */
6111 for (j = 0; j < n_reloads; j++)
6113 if (i == j || reload_reg_rtx[j] == 0
6114 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
6115 reload_reg_rtx[i]))
6116 continue;
6118 if (reload_when_needed[j] == RELOAD_FOR_INPUT_ADDRESS
6119 && reload_opnum[j] > max_input_address_opnum)
6120 max_input_address_opnum = reload_opnum[j];
6122 /* If the reload regs aren't exactly the same (e.g, different modes)
6123 or if the values are different, we can't merge this reload.
6124 But if it is an input reload, we might still merge
6125 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
6127 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
6128 || reload_out[j] != 0 || reload_in[j] == 0
6129 || ! rtx_equal_p (reload_in[i], reload_in[j]))
6131 if (reload_when_needed[j] != RELOAD_FOR_INPUT
6132 || ((reload_when_needed[i] != RELOAD_FOR_INPUT_ADDRESS
6133 || reload_opnum[i] > reload_opnum[j])
6134 && reload_when_needed[i] != RELOAD_FOR_OTHER_ADDRESS))
6135 break;
6136 conflicting_input = 1;
6137 if (min_conflicting_input_opnum > reload_opnum[j])
6138 min_conflicting_input_opnum = reload_opnum[j];
6142 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6143 we, in fact, found any matching reloads. */
6145 if (j == n_reloads
6146 && max_input_address_opnum <= min_conflicting_input_opnum)
6148 for (j = 0; j < n_reloads; j++)
6149 if (i != j && reload_reg_rtx[j] != 0
6150 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
6151 && (! conflicting_input
6152 || reload_when_needed[j] == RELOAD_FOR_INPUT_ADDRESS
6153 || reload_when_needed[j] == RELOAD_FOR_OTHER_ADDRESS))
6155 reload_when_needed[i] = RELOAD_OTHER;
6156 reload_in[j] = 0;
6157 reload_spill_index[j] = -1;
6158 transfer_replacements (i, j);
6161 /* If this is now RELOAD_OTHER, look for any reloads that load
6162 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
6163 if they were for inputs, RELOAD_OTHER for outputs. Note that
6164 this test is equivalent to looking for reloads for this operand
6165 number. */
6167 if (reload_when_needed[i] == RELOAD_OTHER)
6168 for (j = 0; j < n_reloads; j++)
6169 if (reload_in[j] != 0
6170 && reload_when_needed[i] != RELOAD_OTHER
6171 && reg_overlap_mentioned_for_reload_p (reload_in[j],
6172 reload_in[i]))
6173 reload_when_needed[j]
6174 = ((reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
6175 || reload_when_needed[i] == RELOAD_FOR_INPADDR_ADDRESS)
6176 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
6182 /* Output insns to reload values in and out of the chosen reload regs. */
6184 static void
6185 emit_reload_insns (insn)
6186 rtx insn;
6188 register int j;
6189 rtx input_reload_insns[MAX_RECOG_OPERANDS];
6190 rtx other_input_address_reload_insns = 0;
6191 rtx other_input_reload_insns = 0;
6192 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6193 rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6194 rtx output_reload_insns[MAX_RECOG_OPERANDS];
6195 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6196 rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6197 rtx operand_reload_insns = 0;
6198 rtx other_operand_reload_insns = 0;
6199 rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6200 rtx following_insn = NEXT_INSN (insn);
6201 rtx before_insn = insn;
6202 int special;
6203 /* Values to be put in spill_reg_store are put here first. */
6204 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6205 HARD_REG_SET reg_reloaded_died;
6207 CLEAR_HARD_REG_SET (reg_reloaded_died);
6209 for (j = 0; j < reload_n_operands; j++)
6210 input_reload_insns[j] = input_address_reload_insns[j]
6211 = inpaddr_address_reload_insns[j]
6212 = output_reload_insns[j] = output_address_reload_insns[j]
6213 = outaddr_address_reload_insns[j]
6214 = other_output_reload_insns[j] = 0;
6216 /* Now output the instructions to copy the data into and out of the
6217 reload registers. Do these in the order that the reloads were reported,
6218 since reloads of base and index registers precede reloads of operands
6219 and the operands may need the base and index registers reloaded. */
6221 for (j = 0; j < n_reloads; j++)
6223 register rtx old;
6224 rtx oldequiv_reg = 0;
6225 rtx this_reload_insn = 0;
6226 int expect_occurrences = 1;
6228 if (reload_spill_index[j] >= 0)
6229 new_spill_reg_store[reload_spill_index[j]] = 0;
6231 old = reload_in[j];
6232 if (old != 0 && ! reload_inherited[j]
6233 && ! rtx_equal_p (reload_reg_rtx[j], old)
6234 && reload_reg_rtx[j] != 0)
6236 register rtx reloadreg = reload_reg_rtx[j];
6237 rtx oldequiv = 0;
6238 enum machine_mode mode;
6239 rtx *where;
6241 /* Determine the mode to reload in.
6242 This is very tricky because we have three to choose from.
6243 There is the mode the insn operand wants (reload_inmode[J]).
6244 There is the mode of the reload register RELOADREG.
6245 There is the intrinsic mode of the operand, which we could find
6246 by stripping some SUBREGs.
6247 It turns out that RELOADREG's mode is irrelevant:
6248 we can change that arbitrarily.
6250 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6251 then the reload reg may not support QImode moves, so use SImode.
6252 If foo is in memory due to spilling a pseudo reg, this is safe,
6253 because the QImode value is in the least significant part of a
6254 slot big enough for a SImode. If foo is some other sort of
6255 memory reference, then it is impossible to reload this case,
6256 so previous passes had better make sure this never happens.
6258 Then consider a one-word union which has SImode and one of its
6259 members is a float, being fetched as (SUBREG:SF union:SI).
6260 We must fetch that as SFmode because we could be loading into
6261 a float-only register. In this case OLD's mode is correct.
6263 Consider an immediate integer: it has VOIDmode. Here we need
6264 to get a mode from something else.
6266 In some cases, there is a fourth mode, the operand's
6267 containing mode. If the insn specifies a containing mode for
6268 this operand, it overrides all others.
6270 I am not sure whether the algorithm here is always right,
6271 but it does the right things in those cases. */
6273 mode = GET_MODE (old);
6274 if (mode == VOIDmode)
6275 mode = reload_inmode[j];
6277 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6278 /* If we need a secondary register for this operation, see if
6279 the value is already in a register in that class. Don't
6280 do this if the secondary register will be used as a scratch
6281 register. */
6283 if (reload_secondary_in_reload[j] >= 0
6284 && reload_secondary_in_icode[j] == CODE_FOR_nothing
6285 && optimize)
6286 oldequiv
6287 = find_equiv_reg (old, insn,
6288 reload_reg_class[reload_secondary_in_reload[j]],
6289 -1, NULL_PTR, 0, mode);
6290 #endif
6292 /* If reloading from memory, see if there is a register
6293 that already holds the same value. If so, reload from there.
6294 We can pass 0 as the reload_reg_p argument because
6295 any other reload has either already been emitted,
6296 in which case find_equiv_reg will see the reload-insn,
6297 or has yet to be emitted, in which case it doesn't matter
6298 because we will use this equiv reg right away. */
6300 if (oldequiv == 0 && optimize
6301 && (GET_CODE (old) == MEM
6302 || (GET_CODE (old) == REG
6303 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6304 && reg_renumber[REGNO (old)] < 0)))
6305 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
6306 -1, NULL_PTR, 0, mode);
6308 if (oldequiv)
6310 int regno = true_regnum (oldequiv);
6312 /* If OLDEQUIV is a spill register, don't use it for this
6313 if any other reload needs it at an earlier stage of this insn
6314 or at this stage. */
6315 if (spill_reg_order[regno] >= 0
6316 && (! reload_reg_free_p (regno, reload_opnum[j],
6317 reload_when_needed[j])
6318 || ! reload_reg_free_before_p (regno, reload_opnum[j],
6319 reload_when_needed[j])))
6320 oldequiv = 0;
6322 /* If OLDEQUIV is not a spill register,
6323 don't use it if any other reload wants it. */
6324 if (spill_reg_order[regno] < 0)
6326 int k;
6327 for (k = 0; k < n_reloads; k++)
6328 if (reload_reg_rtx[k] != 0 && k != j
6329 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
6330 oldequiv))
6332 oldequiv = 0;
6333 break;
6337 /* If it is no cheaper to copy from OLDEQUIV into the
6338 reload register than it would be to move from memory,
6339 don't use it. Likewise, if we need a secondary register
6340 or memory. */
6342 if (oldequiv != 0
6343 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
6344 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
6345 reload_reg_class[j])
6346 >= MEMORY_MOVE_COST (mode, REGNO_REG_CLASS (regno),
6347 1)))
6348 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6349 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6350 mode, oldequiv)
6351 != NO_REGS)
6352 #endif
6353 #ifdef SECONDARY_MEMORY_NEEDED
6354 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
6355 REGNO_REG_CLASS (regno),
6356 mode)
6357 #endif
6359 oldequiv = 0;
6362 if (oldequiv == 0)
6363 oldequiv = old;
6364 else if (GET_CODE (oldequiv) == REG)
6365 oldequiv_reg = oldequiv;
6366 else if (GET_CODE (oldequiv) == SUBREG)
6367 oldequiv_reg = SUBREG_REG (oldequiv);
6369 /* If we are reloading from a register that was recently stored in
6370 with an output-reload, see if we can prove there was
6371 actually no need to store the old value in it. */
6373 if (optimize && GET_CODE (oldequiv) == REG
6374 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6375 && spill_reg_store[REGNO (oldequiv)]
6376 && GET_CODE (old) == REG && dead_or_set_p (insn, old)
6377 /* This is unsafe if operand occurs more than once in current
6378 insn. Perhaps some occurrences weren't reloaded. */
6379 && count_occurrences (PATTERN (insn), old) == 1)
6380 delete_output_reload (insn, j, spill_reg_store[REGNO (oldequiv)]);
6382 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6383 then load RELOADREG from OLDEQUIV. Note that we cannot use
6384 gen_lowpart_common since it can do the wrong thing when
6385 RELOADREG has a multi-word mode. Note that RELOADREG
6386 must always be a REG here. */
6388 if (GET_MODE (reloadreg) != mode)
6389 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
6390 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6391 oldequiv = SUBREG_REG (oldequiv);
6392 if (GET_MODE (oldequiv) != VOIDmode
6393 && mode != GET_MODE (oldequiv))
6394 oldequiv = gen_rtx_SUBREG (mode, oldequiv, 0);
6396 /* Switch to the right place to emit the reload insns. */
6397 switch (reload_when_needed[j])
6399 case RELOAD_OTHER:
6400 where = &other_input_reload_insns;
6401 break;
6402 case RELOAD_FOR_INPUT:
6403 where = &input_reload_insns[reload_opnum[j]];
6404 break;
6405 case RELOAD_FOR_INPUT_ADDRESS:
6406 where = &input_address_reload_insns[reload_opnum[j]];
6407 break;
6408 case RELOAD_FOR_INPADDR_ADDRESS:
6409 where = &inpaddr_address_reload_insns[reload_opnum[j]];
6410 break;
6411 case RELOAD_FOR_OUTPUT_ADDRESS:
6412 where = &output_address_reload_insns[reload_opnum[j]];
6413 break;
6414 case RELOAD_FOR_OUTADDR_ADDRESS:
6415 where = &outaddr_address_reload_insns[reload_opnum[j]];
6416 break;
6417 case RELOAD_FOR_OPERAND_ADDRESS:
6418 where = &operand_reload_insns;
6419 break;
6420 case RELOAD_FOR_OPADDR_ADDR:
6421 where = &other_operand_reload_insns;
6422 break;
6423 case RELOAD_FOR_OTHER_ADDRESS:
6424 where = &other_input_address_reload_insns;
6425 break;
6426 default:
6427 abort ();
6430 push_to_sequence (*where);
6431 special = 0;
6433 /* Auto-increment addresses must be reloaded in a special way. */
6434 if (GET_CODE (oldequiv) == POST_INC
6435 || GET_CODE (oldequiv) == POST_DEC
6436 || GET_CODE (oldequiv) == PRE_INC
6437 || GET_CODE (oldequiv) == PRE_DEC)
6439 /* We are not going to bother supporting the case where a
6440 incremented register can't be copied directly from
6441 OLDEQUIV since this seems highly unlikely. */
6442 if (reload_secondary_in_reload[j] >= 0)
6443 abort ();
6444 /* Prevent normal processing of this reload. */
6445 special = 1;
6446 /* Output a special code sequence for this case. */
6447 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
6450 /* If we are reloading a pseudo-register that was set by the previous
6451 insn, see if we can get rid of that pseudo-register entirely
6452 by redirecting the previous insn into our reload register. */
6454 else if (optimize && GET_CODE (old) == REG
6455 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6456 && dead_or_set_p (insn, old)
6457 /* This is unsafe if some other reload
6458 uses the same reg first. */
6459 && reload_reg_free_before_p (REGNO (reloadreg),
6460 reload_opnum[j],
6461 reload_when_needed[j]))
6463 rtx temp = PREV_INSN (insn);
6464 while (temp && GET_CODE (temp) == NOTE)
6465 temp = PREV_INSN (temp);
6466 if (temp
6467 && GET_CODE (temp) == INSN
6468 && GET_CODE (PATTERN (temp)) == SET
6469 && SET_DEST (PATTERN (temp)) == old
6470 /* Make sure we can access insn_operand_constraint. */
6471 && asm_noperands (PATTERN (temp)) < 0
6472 /* This is unsafe if prev insn rejects our reload reg. */
6473 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
6474 reloadreg)
6475 /* This is unsafe if operand occurs more than once in current
6476 insn. Perhaps some occurrences aren't reloaded. */
6477 && count_occurrences (PATTERN (insn), old) == 1
6478 /* Don't risk splitting a matching pair of operands. */
6479 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
6481 /* Store into the reload register instead of the pseudo. */
6482 SET_DEST (PATTERN (temp)) = reloadreg;
6483 /* If these are the only uses of the pseudo reg,
6484 pretend for GDB it lives in the reload reg we used. */
6485 if (REG_N_DEATHS (REGNO (old)) == 1
6486 && REG_N_SETS (REGNO (old)) == 1)
6488 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
6489 alter_reg (REGNO (old), -1);
6491 special = 1;
6495 /* We can't do that, so output an insn to load RELOADREG. */
6497 if (! special)
6499 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6500 rtx second_reload_reg = 0;
6501 enum insn_code icode;
6503 /* If we have a secondary reload, pick up the secondary register
6504 and icode, if any. If OLDEQUIV and OLD are different or
6505 if this is an in-out reload, recompute whether or not we
6506 still need a secondary register and what the icode should
6507 be. If we still need a secondary register and the class or
6508 icode is different, go back to reloading from OLD if using
6509 OLDEQUIV means that we got the wrong type of register. We
6510 cannot have different class or icode due to an in-out reload
6511 because we don't make such reloads when both the input and
6512 output need secondary reload registers. */
6514 if (reload_secondary_in_reload[j] >= 0)
6516 int secondary_reload = reload_secondary_in_reload[j];
6517 rtx real_oldequiv = oldequiv;
6518 rtx real_old = old;
6520 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6521 and similarly for OLD.
6522 See comments in get_secondary_reload in reload.c. */
6523 if (GET_CODE (oldequiv) == REG
6524 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6525 && reg_equiv_mem[REGNO (oldequiv)] != 0)
6526 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
6528 if (GET_CODE (old) == REG
6529 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6530 && reg_equiv_mem[REGNO (old)] != 0)
6531 real_old = reg_equiv_mem[REGNO (old)];
6533 second_reload_reg = reload_reg_rtx[secondary_reload];
6534 icode = reload_secondary_in_icode[j];
6536 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6537 || (reload_in[j] != 0 && reload_out[j] != 0))
6539 enum reg_class new_class
6540 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6541 mode, real_oldequiv);
6543 if (new_class == NO_REGS)
6544 second_reload_reg = 0;
6545 else
6547 enum insn_code new_icode;
6548 enum machine_mode new_mode;
6550 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6551 REGNO (second_reload_reg)))
6552 oldequiv = old, real_oldequiv = real_old;
6553 else
6555 new_icode = reload_in_optab[(int) mode];
6556 if (new_icode != CODE_FOR_nothing
6557 && ((insn_operand_predicate[(int) new_icode][0]
6558 && ! ((*insn_operand_predicate[(int) new_icode][0])
6559 (reloadreg, mode)))
6560 || (insn_operand_predicate[(int) new_icode][1]
6561 && ! ((*insn_operand_predicate[(int) new_icode][1])
6562 (real_oldequiv, mode)))))
6563 new_icode = CODE_FOR_nothing;
6565 if (new_icode == CODE_FOR_nothing)
6566 new_mode = mode;
6567 else
6568 new_mode = insn_operand_mode[(int) new_icode][2];
6570 if (GET_MODE (second_reload_reg) != new_mode)
6572 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6573 new_mode))
6574 oldequiv = old, real_oldequiv = real_old;
6575 else
6576 second_reload_reg
6577 = gen_rtx_REG (new_mode,
6578 REGNO (second_reload_reg));
6584 /* If we still need a secondary reload register, check
6585 to see if it is being used as a scratch or intermediate
6586 register and generate code appropriately. If we need
6587 a scratch register, use REAL_OLDEQUIV since the form of
6588 the insn may depend on the actual address if it is
6589 a MEM. */
6591 if (second_reload_reg)
6593 if (icode != CODE_FOR_nothing)
6595 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6596 second_reload_reg));
6597 special = 1;
6599 else
6601 /* See if we need a scratch register to load the
6602 intermediate register (a tertiary reload). */
6603 enum insn_code tertiary_icode
6604 = reload_secondary_in_icode[secondary_reload];
6606 if (tertiary_icode != CODE_FOR_nothing)
6608 rtx third_reload_reg
6609 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6611 emit_insn ((GEN_FCN (tertiary_icode)
6612 (second_reload_reg, real_oldequiv,
6613 third_reload_reg)));
6615 else
6616 gen_reload (second_reload_reg, oldequiv,
6617 reload_opnum[j],
6618 reload_when_needed[j]);
6620 oldequiv = second_reload_reg;
6624 #endif
6626 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6627 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6628 reload_when_needed[j]);
6630 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6631 /* We may have to make a REG_DEAD note for the secondary reload
6632 register in the insns we just made. Find the last insn that
6633 mentioned the register. */
6634 if (! special && second_reload_reg
6635 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6637 rtx prev;
6639 for (prev = get_last_insn (); prev;
6640 prev = PREV_INSN (prev))
6641 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6642 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6643 PATTERN (prev)))
6645 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_DEAD,
6646 second_reload_reg,
6647 REG_NOTES (prev));
6648 break;
6651 #endif
6654 this_reload_insn = get_last_insn ();
6655 /* End this sequence. */
6656 *where = get_insns ();
6657 end_sequence ();
6660 /* When inheriting a wider reload, we have a MEM in reload_in[j],
6661 e.g. inheriting a SImode output reload for
6662 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
6663 if (optimize && reload_inherited[j] && reload_in[j]
6664 && GET_CODE (reload_in[j]) == MEM
6665 && reload_spill_index[j] >= 0
6666 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
6668 expect_occurrences
6669 = count_occurrences (PATTERN (insn), reload_in[j]) == 1 ? 0 : -1;
6670 reload_in[j]
6671 = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
6673 /* Add a note saying the input reload reg
6674 dies in this insn, if anyone cares. */
6675 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6676 if (old != 0
6677 && reload_reg_rtx[j] != old
6678 && reload_reg_rtx[j] != 0
6679 && reload_out[j] == 0
6680 && ! reload_inherited[j]
6681 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6683 register rtx reloadreg = reload_reg_rtx[j];
6685 #if 0
6686 /* We can't abort here because we need to support this for sched.c.
6687 It's not terrible to miss a REG_DEAD note, but we should try
6688 to figure out how to do this correctly. */
6689 /* The code below is incorrect for address-only reloads. */
6690 if (reload_when_needed[j] != RELOAD_OTHER
6691 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6692 abort ();
6693 #endif
6695 /* Add a death note to this insn, for an input reload. */
6697 if ((reload_when_needed[j] == RELOAD_OTHER
6698 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6699 && ! dead_or_set_p (insn, reloadreg))
6700 REG_NOTES (insn)
6701 = gen_rtx_EXPR_LIST (REG_DEAD,
6702 reloadreg, REG_NOTES (insn));
6705 /* When we inherit a reload, the last marked death of the reload reg
6706 may no longer really be a death. */
6707 if (reload_reg_rtx[j] != 0
6708 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6709 && reload_inherited[j])
6711 /* Handle inheriting an output reload.
6712 Remove the death note from the output reload insn. */
6713 if (reload_spill_index[j] >= 0
6714 && GET_CODE (reload_in[j]) == REG
6715 && spill_reg_store[reload_spill_index[j]] != 0
6716 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6717 REG_DEAD, REGNO (reload_reg_rtx[j])))
6718 remove_death (REGNO (reload_reg_rtx[j]),
6719 spill_reg_store[reload_spill_index[j]]);
6720 /* Likewise for input reloads that were inherited. */
6721 else if (reload_spill_index[j] >= 0
6722 && GET_CODE (reload_in[j]) == REG
6723 && spill_reg_store[reload_spill_index[j]] == 0
6724 && reload_inheritance_insn[j] != 0
6725 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6726 REGNO (reload_reg_rtx[j])))
6727 remove_death (REGNO (reload_reg_rtx[j]),
6728 reload_inheritance_insn[j]);
6729 else
6731 rtx prev;
6733 /* We got this register from find_equiv_reg.
6734 Search back for its last death note and get rid of it.
6735 But don't search back too far.
6736 Don't go past a place where this reg is set,
6737 since a death note before that remains valid. */
6738 for (prev = PREV_INSN (insn);
6739 prev && GET_CODE (prev) != CODE_LABEL;
6740 prev = PREV_INSN (prev))
6741 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6742 && dead_or_set_p (prev, reload_reg_rtx[j]))
6744 if (find_regno_note (prev, REG_DEAD,
6745 REGNO (reload_reg_rtx[j])))
6746 remove_death (REGNO (reload_reg_rtx[j]), prev);
6747 break;
6752 /* We might have used find_equiv_reg above to choose an alternate
6753 place from which to reload. If so, and it died, we need to remove
6754 that death and move it to one of the insns we just made. */
6756 if (oldequiv_reg != 0
6757 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6759 rtx prev, prev1;
6761 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6762 prev = PREV_INSN (prev))
6763 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6764 && dead_or_set_p (prev, oldequiv_reg))
6766 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6768 for (prev1 = this_reload_insn;
6769 prev1; prev1 = PREV_INSN (prev1))
6770 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6771 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6772 PATTERN (prev1)))
6774 REG_NOTES (prev1) = gen_rtx_EXPR_LIST (REG_DEAD,
6775 oldequiv_reg,
6776 REG_NOTES (prev1));
6777 break;
6779 remove_death (REGNO (oldequiv_reg), prev);
6781 break;
6784 #endif
6786 /* If we are reloading a register that was recently stored in with an
6787 output-reload, see if we can prove there was
6788 actually no need to store the old value in it. */
6790 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6791 && reload_in[j] != 0
6792 && GET_CODE (reload_in[j]) == REG
6793 #if 0
6794 /* There doesn't seem to be any reason to restrict this to pseudos
6795 and doing so loses in the case where we are copying from a
6796 register of the wrong class. */
6797 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6798 #endif
6799 && spill_reg_store[reload_spill_index[j]] != 0
6800 /* This is unsafe if some other reload uses the same reg first. */
6801 && reload_reg_free_before_p (reload_spill_index[j],
6802 reload_opnum[j], reload_when_needed[j])
6803 && dead_or_set_p (insn, reload_in[j])
6804 /* This is unsafe if operand occurs more than once in current
6805 insn. Perhaps some occurrences weren't reloaded. */
6806 && (count_occurrences (PATTERN (insn), reload_in[j])
6807 == expect_occurrences))
6808 delete_output_reload (insn, j,
6809 spill_reg_store[reload_spill_index[j]]);
6811 /* Input-reloading is done. Now do output-reloading,
6812 storing the value from the reload-register after the main insn
6813 if reload_out[j] is nonzero.
6815 ??? At some point we need to support handling output reloads of
6816 JUMP_INSNs or insns that set cc0. */
6817 old = reload_out[j];
6818 if (old != 0
6819 && reload_reg_rtx[j] != old
6820 && reload_reg_rtx[j] != 0)
6822 register rtx reloadreg = reload_reg_rtx[j];
6823 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6824 register rtx second_reloadreg = 0;
6825 #endif
6826 rtx note, p;
6827 enum machine_mode mode;
6828 int special = 0;
6830 /* An output operand that dies right away does need a reload,
6831 but need not be copied from it. Show the new location in the
6832 REG_UNUSED note. */
6833 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6834 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6836 XEXP (note, 0) = reload_reg_rtx[j];
6837 continue;
6839 /* Likewise for a SUBREG of an operand that dies. */
6840 else if (GET_CODE (old) == SUBREG
6841 && GET_CODE (SUBREG_REG (old)) == REG
6842 && 0 != (note = find_reg_note (insn, REG_UNUSED,
6843 SUBREG_REG (old))))
6845 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
6846 reload_reg_rtx[j]);
6847 continue;
6849 else if (GET_CODE (old) == SCRATCH)
6850 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6851 but we don't want to make an output reload. */
6852 continue;
6854 #if 0
6855 /* Strip off of OLD any size-increasing SUBREGs such as
6856 (SUBREG:SI foo:QI 0). */
6858 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6859 && (GET_MODE_SIZE (GET_MODE (old))
6860 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6861 old = SUBREG_REG (old);
6862 #endif
6864 /* If is a JUMP_INSN, we can't support output reloads yet. */
6865 if (GET_CODE (insn) == JUMP_INSN)
6866 abort ();
6868 if (reload_when_needed[j] == RELOAD_OTHER)
6869 start_sequence ();
6870 else
6871 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6873 /* Determine the mode to reload in.
6874 See comments above (for input reloading). */
6876 mode = GET_MODE (old);
6877 if (mode == VOIDmode)
6879 /* VOIDmode should never happen for an output. */
6880 if (asm_noperands (PATTERN (insn)) < 0)
6881 /* It's the compiler's fault. */
6882 fatal_insn ("VOIDmode on an output", insn);
6883 error_for_asm (insn, "output operand is constant in `asm'");
6884 /* Prevent crash--use something we know is valid. */
6885 mode = word_mode;
6886 old = gen_rtx_REG (mode, REGNO (reloadreg));
6889 if (GET_MODE (reloadreg) != mode)
6890 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
6892 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6894 /* If we need two reload regs, set RELOADREG to the intermediate
6895 one, since it will be stored into OLD. We might need a secondary
6896 register only for an input reload, so check again here. */
6898 if (reload_secondary_out_reload[j] >= 0)
6900 rtx real_old = old;
6902 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6903 && reg_equiv_mem[REGNO (old)] != 0)
6904 real_old = reg_equiv_mem[REGNO (old)];
6906 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6907 mode, real_old)
6908 != NO_REGS))
6910 second_reloadreg = reloadreg;
6911 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6913 /* See if RELOADREG is to be used as a scratch register
6914 or as an intermediate register. */
6915 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6917 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6918 (real_old, second_reloadreg, reloadreg)));
6919 special = 1;
6921 else
6923 /* See if we need both a scratch and intermediate reload
6924 register. */
6926 int secondary_reload = reload_secondary_out_reload[j];
6927 enum insn_code tertiary_icode
6928 = reload_secondary_out_icode[secondary_reload];
6930 if (GET_MODE (reloadreg) != mode)
6931 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
6933 if (tertiary_icode != CODE_FOR_nothing)
6935 rtx third_reloadreg
6936 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6937 rtx tem;
6939 /* Copy primary reload reg to secondary reload reg.
6940 (Note that these have been swapped above, then
6941 secondary reload reg to OLD using our insn. */
6943 /* If REAL_OLD is a paradoxical SUBREG, remove it
6944 and try to put the opposite SUBREG on
6945 RELOADREG. */
6946 if (GET_CODE (real_old) == SUBREG
6947 && (GET_MODE_SIZE (GET_MODE (real_old))
6948 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6949 && 0 != (tem = gen_lowpart_common
6950 (GET_MODE (SUBREG_REG (real_old)),
6951 reloadreg)))
6952 real_old = SUBREG_REG (real_old), reloadreg = tem;
6954 gen_reload (reloadreg, second_reloadreg,
6955 reload_opnum[j], reload_when_needed[j]);
6956 emit_insn ((GEN_FCN (tertiary_icode)
6957 (real_old, reloadreg, third_reloadreg)));
6958 special = 1;
6961 else
6962 /* Copy between the reload regs here and then to
6963 OUT later. */
6965 gen_reload (reloadreg, second_reloadreg,
6966 reload_opnum[j], reload_when_needed[j]);
6970 #endif
6972 /* Output the last reload insn. */
6973 if (! special)
6975 rtx set;
6977 /* Don't output the last reload if OLD is not the dest of
6978 INSN and is in the src and is clobbered by INSN. */
6979 if (! flag_expensive_optimizations
6980 || GET_CODE (old) != REG
6981 || !(set = single_set (insn))
6982 || rtx_equal_p (old, SET_DEST (set))
6983 || !reg_mentioned_p (old, SET_SRC (set))
6984 || !regno_clobbered_p (REGNO (old), insn))
6985 gen_reload (old, reloadreg, reload_opnum[j],
6986 reload_when_needed[j]);
6989 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6990 /* If final will look at death notes for this reg,
6991 put one on the last output-reload insn to use it. Similarly
6992 for any secondary register. */
6993 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6994 for (p = get_last_insn (); p; p = PREV_INSN (p))
6995 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6996 && reg_overlap_mentioned_for_reload_p (reloadreg,
6997 PATTERN (p)))
6998 REG_NOTES (p) = gen_rtx_EXPR_LIST (REG_DEAD,
6999 reloadreg, REG_NOTES (p));
7001 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
7002 if (! special && second_reloadreg
7003 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
7004 for (p = get_last_insn (); p; p = PREV_INSN (p))
7005 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
7006 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
7007 PATTERN (p)))
7008 REG_NOTES (p) = gen_rtx_EXPR_LIST (REG_DEAD,
7009 second_reloadreg,
7010 REG_NOTES (p));
7011 #endif
7012 #endif
7013 /* Look at all insns we emitted, just to be safe. */
7014 for (p = get_insns (); p; p = NEXT_INSN (p))
7015 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
7017 rtx pat = PATTERN (p);
7019 /* If this output reload doesn't come from a spill reg,
7020 clear any memory of reloaded copies of the pseudo reg.
7021 If this output reload comes from a spill reg,
7022 reg_has_output_reload will make this do nothing. */
7023 note_stores (pat, forget_old_reloads_1);
7025 if (reg_mentioned_p (reload_reg_rtx[j], pat))
7027 if (reload_spill_index[j] < 0
7028 && GET_CODE (pat) == SET
7029 && SET_SRC (pat) == reload_reg_rtx[j])
7031 int src = REGNO (SET_SRC (pat));
7033 reload_spill_index[j] = src;
7034 SET_HARD_REG_BIT (reg_is_output_reload, src);
7035 if (find_regno_note (insn, REG_DEAD, src))
7036 SET_HARD_REG_BIT (reg_reloaded_died, src);
7038 if (reload_spill_index[j] >= 0)
7039 new_spill_reg_store[reload_spill_index[j]] = p;
7043 if (reload_when_needed[j] == RELOAD_OTHER)
7045 emit_insns (other_output_reload_insns[reload_opnum[j]]);
7046 other_output_reload_insns[reload_opnum[j]] = get_insns ();
7048 else
7049 output_reload_insns[reload_opnum[j]] = get_insns ();
7051 end_sequence ();
7055 /* Now write all the insns we made for reloads in the order expected by
7056 the allocation functions. Prior to the insn being reloaded, we write
7057 the following reloads:
7059 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7061 RELOAD_OTHER reloads.
7063 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7064 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7065 RELOAD_FOR_INPUT reload for the operand.
7067 RELOAD_FOR_OPADDR_ADDRS reloads.
7069 RELOAD_FOR_OPERAND_ADDRESS reloads.
7071 After the insn being reloaded, we write the following:
7073 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7074 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7075 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7076 reloads for the operand. The RELOAD_OTHER output reloads are
7077 output in descending order by reload number. */
7079 emit_insns_before (other_input_address_reload_insns, before_insn);
7080 emit_insns_before (other_input_reload_insns, before_insn);
7082 for (j = 0; j < reload_n_operands; j++)
7084 emit_insns_before (inpaddr_address_reload_insns[j], before_insn);
7085 emit_insns_before (input_address_reload_insns[j], before_insn);
7086 emit_insns_before (input_reload_insns[j], before_insn);
7089 emit_insns_before (other_operand_reload_insns, before_insn);
7090 emit_insns_before (operand_reload_insns, before_insn);
7092 for (j = 0; j < reload_n_operands; j++)
7094 emit_insns_before (outaddr_address_reload_insns[j], following_insn);
7095 emit_insns_before (output_address_reload_insns[j], following_insn);
7096 emit_insns_before (output_reload_insns[j], following_insn);
7097 emit_insns_before (other_output_reload_insns[j], following_insn);
7100 /* Move death notes from INSN
7101 to output-operand-address and output reload insns. */
7102 #ifdef PRESERVE_DEATH_INFO_REGNO_P
7104 rtx insn1;
7105 /* Loop over those insns, last ones first. */
7106 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
7107 insn1 = PREV_INSN (insn1))
7108 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
7110 rtx source = SET_SRC (PATTERN (insn1));
7111 rtx dest = SET_DEST (PATTERN (insn1));
7113 /* The note we will examine next. */
7114 rtx reg_notes = REG_NOTES (insn);
7115 /* The place that pointed to this note. */
7116 rtx *prev_reg_note = &REG_NOTES (insn);
7118 /* If the note is for something used in the source of this
7119 reload insn, or in the output address, move the note. */
7120 while (reg_notes)
7122 rtx next_reg_notes = XEXP (reg_notes, 1);
7123 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
7124 && GET_CODE (XEXP (reg_notes, 0)) == REG
7125 && ((GET_CODE (dest) != REG
7126 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
7127 dest))
7128 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
7129 source)))
7131 *prev_reg_note = next_reg_notes;
7132 XEXP (reg_notes, 1) = REG_NOTES (insn1);
7133 REG_NOTES (insn1) = reg_notes;
7135 else
7136 prev_reg_note = &XEXP (reg_notes, 1);
7138 reg_notes = next_reg_notes;
7142 #endif
7144 /* For all the spill regs newly reloaded in this instruction,
7145 record what they were reloaded from, so subsequent instructions
7146 can inherit the reloads.
7148 Update spill_reg_store for the reloads of this insn.
7149 Copy the elements that were updated in the loop above. */
7151 for (j = 0; j < n_reloads; j++)
7153 register int r = reload_order[j];
7154 register int i = reload_spill_index[r];
7156 /* I is nonneg if this reload used a register.
7157 If reload_reg_rtx[r] is 0, this is an optional reload
7158 that we opted to ignore. */
7160 if (i >= 0 && reload_reg_rtx[r] != 0)
7162 int nr
7163 = HARD_REGNO_NREGS (i, GET_MODE (reload_reg_rtx[r]));
7164 int k;
7165 int part_reaches_end = 0;
7166 int all_reaches_end = 1;
7168 /* For a multi register reload, we need to check if all or part
7169 of the value lives to the end. */
7170 for (k = 0; k < nr; k++)
7172 if (reload_reg_reaches_end_p (i + k, reload_opnum[r],
7173 reload_when_needed[r]))
7174 part_reaches_end = 1;
7175 else
7176 all_reaches_end = 0;
7179 /* Ignore reloads that don't reach the end of the insn in
7180 entirety. */
7181 if (all_reaches_end)
7183 /* First, clear out memory of what used to be in this spill reg.
7184 If consecutive registers are used, clear them all. */
7186 for (k = 0; k < nr; k++)
7187 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7189 /* Maybe the spill reg contains a copy of reload_out. */
7190 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
7192 register int nregno = REGNO (reload_out[r]);
7193 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7194 : HARD_REGNO_NREGS (nregno,
7195 GET_MODE (reload_reg_rtx[r])));
7197 spill_reg_store[i] = new_spill_reg_store[i];
7198 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
7200 /* If NREGNO is a hard register, it may occupy more than
7201 one register. If it does, say what is in the
7202 rest of the registers assuming that both registers
7203 agree on how many words the object takes. If not,
7204 invalidate the subsequent registers. */
7206 if (nregno < FIRST_PSEUDO_REGISTER)
7207 for (k = 1; k < nnr; k++)
7208 reg_last_reload_reg[nregno + k]
7209 = (nr == nnr
7210 ? gen_rtx_REG (reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
7211 REGNO (reload_reg_rtx[r]) + k)
7212 : 0);
7214 /* Now do the inverse operation. */
7215 for (k = 0; k < nr; k++)
7217 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7218 reg_reloaded_contents[i + k]
7219 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7220 ? nregno
7221 : nregno + k);
7222 reg_reloaded_insn[i + k] = insn;
7223 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7227 /* Maybe the spill reg contains a copy of reload_in. Only do
7228 something if there will not be an output reload for
7229 the register being reloaded. */
7230 else if (reload_out[r] == 0
7231 && reload_in[r] != 0
7232 && spill_reg_order[i] >= 0
7233 && ((GET_CODE (reload_in[r]) == REG
7234 && ! reg_has_output_reload[REGNO (reload_in[r])])
7235 || (GET_CODE (reload_in_reg[r]) == REG
7236 && ! reg_has_output_reload[REGNO (reload_in_reg[r])])))
7238 register int nregno;
7239 int nnr;
7241 if (GET_CODE (reload_in[r]) == REG)
7242 nregno = REGNO (reload_in[r]);
7243 else
7244 nregno = REGNO (reload_in_reg[r]);
7246 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7247 : HARD_REGNO_NREGS (nregno,
7248 GET_MODE (reload_reg_rtx[r])));
7250 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
7252 if (nregno < FIRST_PSEUDO_REGISTER)
7253 for (k = 1; k < nnr; k++)
7254 reg_last_reload_reg[nregno + k]
7255 = (nr == nnr
7256 ? gen_rtx_REG (reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
7257 REGNO (reload_reg_rtx[r]) + k)
7258 : 0);
7260 /* Unless we inherited this reload, show we haven't
7261 recently done a store. */
7262 if (! reload_inherited[r])
7263 spill_reg_store[i] = 0;
7265 for (k = 0; k < nr; k++)
7267 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7268 reg_reloaded_contents[i + k]
7269 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7270 ? nregno
7271 : nregno + k);
7272 reg_reloaded_insn[i + k] = insn;
7273 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7278 /* However, if part of the reload reaches the end, then we must
7279 invalidate the old info for the part that survives to the end. */
7280 else if (part_reaches_end)
7282 for (k = 0; k < nr; k++)
7283 if (reload_reg_reaches_end_p (i + k,
7284 reload_opnum[r],
7285 reload_when_needed[r]))
7286 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7290 /* The following if-statement was #if 0'd in 1.34 (or before...).
7291 It's reenabled in 1.35 because supposedly nothing else
7292 deals with this problem. */
7294 /* If a register gets output-reloaded from a non-spill register,
7295 that invalidates any previous reloaded copy of it.
7296 But forget_old_reloads_1 won't get to see it, because
7297 it thinks only about the original insn. So invalidate it here. */
7298 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
7300 register int nregno = REGNO (reload_out[r]);
7301 if (nregno >= FIRST_PSEUDO_REGISTER)
7302 reg_last_reload_reg[nregno] = 0;
7303 else
7305 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
7307 while (num_regs-- > 0)
7308 reg_last_reload_reg[nregno + num_regs] = 0;
7312 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
7315 /* Emit code to perform a reload from IN (which may be a reload register) to
7316 OUT (which may also be a reload register). IN or OUT is from operand
7317 OPNUM with reload type TYPE.
7319 Returns first insn emitted. */
7322 gen_reload (out, in, opnum, type)
7323 rtx out;
7324 rtx in;
7325 int opnum;
7326 enum reload_type type;
7328 rtx last = get_last_insn ();
7329 rtx tem;
7331 /* If IN is a paradoxical SUBREG, remove it and try to put the
7332 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7333 if (GET_CODE (in) == SUBREG
7334 && (GET_MODE_SIZE (GET_MODE (in))
7335 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7336 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7337 in = SUBREG_REG (in), out = tem;
7338 else if (GET_CODE (out) == SUBREG
7339 && (GET_MODE_SIZE (GET_MODE (out))
7340 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7341 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7342 out = SUBREG_REG (out), in = tem;
7344 /* How to do this reload can get quite tricky. Normally, we are being
7345 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7346 register that didn't get a hard register. In that case we can just
7347 call emit_move_insn.
7349 We can also be asked to reload a PLUS that adds a register or a MEM to
7350 another register, constant or MEM. This can occur during frame pointer
7351 elimination and while reloading addresses. This case is handled by
7352 trying to emit a single insn to perform the add. If it is not valid,
7353 we use a two insn sequence.
7355 Finally, we could be called to handle an 'o' constraint by putting
7356 an address into a register. In that case, we first try to do this
7357 with a named pattern of "reload_load_address". If no such pattern
7358 exists, we just emit a SET insn and hope for the best (it will normally
7359 be valid on machines that use 'o').
7361 This entire process is made complex because reload will never
7362 process the insns we generate here and so we must ensure that
7363 they will fit their constraints and also by the fact that parts of
7364 IN might be being reloaded separately and replaced with spill registers.
7365 Because of this, we are, in some sense, just guessing the right approach
7366 here. The one listed above seems to work.
7368 ??? At some point, this whole thing needs to be rethought. */
7370 if (GET_CODE (in) == PLUS
7371 && (GET_CODE (XEXP (in, 0)) == REG
7372 || GET_CODE (XEXP (in, 0)) == SUBREG
7373 || GET_CODE (XEXP (in, 0)) == MEM)
7374 && (GET_CODE (XEXP (in, 1)) == REG
7375 || GET_CODE (XEXP (in, 1)) == SUBREG
7376 || CONSTANT_P (XEXP (in, 1))
7377 || GET_CODE (XEXP (in, 1)) == MEM))
7379 /* We need to compute the sum of a register or a MEM and another
7380 register, constant, or MEM, and put it into the reload
7381 register. The best possible way of doing this is if the machine
7382 has a three-operand ADD insn that accepts the required operands.
7384 The simplest approach is to try to generate such an insn and see if it
7385 is recognized and matches its constraints. If so, it can be used.
7387 It might be better not to actually emit the insn unless it is valid,
7388 but we need to pass the insn as an operand to `recog' and
7389 `insn_extract' and it is simpler to emit and then delete the insn if
7390 not valid than to dummy things up. */
7392 rtx op0, op1, tem, insn;
7393 int code;
7395 op0 = find_replacement (&XEXP (in, 0));
7396 op1 = find_replacement (&XEXP (in, 1));
7398 /* Since constraint checking is strict, commutativity won't be
7399 checked, so we need to do that here to avoid spurious failure
7400 if the add instruction is two-address and the second operand
7401 of the add is the same as the reload reg, which is frequently
7402 the case. If the insn would be A = B + A, rearrange it so
7403 it will be A = A + B as constrain_operands expects. */
7405 if (GET_CODE (XEXP (in, 1)) == REG
7406 && REGNO (out) == REGNO (XEXP (in, 1)))
7407 tem = op0, op0 = op1, op1 = tem;
7409 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7410 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
7412 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
7413 code = recog_memoized (insn);
7415 if (code >= 0)
7417 insn_extract (insn);
7418 /* We want constrain operands to treat this insn strictly in
7419 its validity determination, i.e., the way it would after reload
7420 has completed. */
7421 if (constrain_operands (code, 1))
7422 return insn;
7425 delete_insns_since (last);
7427 /* If that failed, we must use a conservative two-insn sequence.
7428 use move to copy constant, MEM, or pseudo register to the reload
7429 register since "move" will be able to handle an arbitrary operand,
7430 unlike add which can't, in general. Then add the registers.
7432 If there is another way to do this for a specific machine, a
7433 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7434 we emit below. */
7436 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
7437 || (GET_CODE (op1) == REG
7438 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
7439 tem = op0, op0 = op1, op1 = tem;
7441 gen_reload (out, op0, opnum, type);
7443 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7444 This fixes a problem on the 32K where the stack pointer cannot
7445 be used as an operand of an add insn. */
7447 if (rtx_equal_p (op0, op1))
7448 op1 = out;
7450 insn = emit_insn (gen_add2_insn (out, op1));
7452 /* If that failed, copy the address register to the reload register.
7453 Then add the constant to the reload register. */
7455 code = recog_memoized (insn);
7457 if (code >= 0)
7459 insn_extract (insn);
7460 /* We want constrain operands to treat this insn strictly in
7461 its validity determination, i.e., the way it would after reload
7462 has completed. */
7463 if (constrain_operands (code, 1))
7465 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
7466 REG_NOTES (insn)
7467 = gen_rtx (EXPR_LIST, REG_EQUIV, in, REG_NOTES (insn));
7468 return insn;
7472 delete_insns_since (last);
7474 gen_reload (out, op1, opnum, type);
7475 insn = emit_insn (gen_add2_insn (out, op0));
7476 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUIV, in, REG_NOTES (insn));
7479 #ifdef SECONDARY_MEMORY_NEEDED
7480 /* If we need a memory location to do the move, do it that way. */
7481 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
7482 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
7483 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
7484 REGNO_REG_CLASS (REGNO (out)),
7485 GET_MODE (out)))
7487 /* Get the memory to use and rewrite both registers to its mode. */
7488 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7490 if (GET_MODE (loc) != GET_MODE (out))
7491 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
7493 if (GET_MODE (loc) != GET_MODE (in))
7494 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
7496 gen_reload (loc, in, opnum, type);
7497 gen_reload (out, loc, opnum, type);
7499 #endif
7501 /* If IN is a simple operand, use gen_move_insn. */
7502 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
7503 emit_insn (gen_move_insn (out, in));
7505 #ifdef HAVE_reload_load_address
7506 else if (HAVE_reload_load_address)
7507 emit_insn (gen_reload_load_address (out, in));
7508 #endif
7510 /* Otherwise, just write (set OUT IN) and hope for the best. */
7511 else
7512 emit_insn (gen_rtx_SET (VOIDmode, out, in));
7514 /* Return the first insn emitted.
7515 We can not just return get_last_insn, because there may have
7516 been multiple instructions emitted. Also note that gen_move_insn may
7517 emit more than one insn itself, so we can not assume that there is one
7518 insn emitted per emit_insn_before call. */
7520 return last ? NEXT_INSN (last) : get_insns ();
7523 /* Delete a previously made output-reload
7524 whose result we now believe is not needed.
7525 First we double-check.
7527 INSN is the insn now being processed.
7528 OUTPUT_RELOAD_INSN is the insn of the output reload.
7529 J is the reload-number for this insn. */
7531 static void
7532 delete_output_reload (insn, j, output_reload_insn)
7533 rtx insn;
7534 int j;
7535 rtx output_reload_insn;
7537 register rtx i1;
7539 /* Get the raw pseudo-register referred to. */
7541 rtx reg = reload_in[j];
7542 while (GET_CODE (reg) == SUBREG)
7543 reg = SUBREG_REG (reg);
7545 /* If the pseudo-reg we are reloading is no longer referenced
7546 anywhere between the store into it and here,
7547 and no jumps or labels intervene, then the value can get
7548 here through the reload reg alone.
7549 Otherwise, give up--return. */
7550 for (i1 = NEXT_INSN (output_reload_insn);
7551 i1 != insn; i1 = NEXT_INSN (i1))
7553 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
7554 return;
7555 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
7556 && reg_mentioned_p (reg, PATTERN (i1)))
7558 /* If this is just a single USE with an REG_EQUAL note in front
7559 of INSN, this is no problem, because this mentions just the
7560 address that we are using here.
7561 But if there is more than one such USE, the insn might use
7562 the operand directly, or another reload might do that.
7563 This is analogous to the count_occurences check in the callers. */
7564 int num_occurences = 0;
7566 while (GET_CODE (i1) == INSN && GET_CODE (PATTERN (i1)) == USE
7567 && find_reg_note (i1, REG_EQUAL, NULL_RTX))
7569 num_occurences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
7570 i1 = NEXT_INSN (i1);
7572 if (num_occurences == 1 && i1 == insn)
7573 break;
7574 return;
7578 /* The caller has already checked that REG dies or is set in INSN.
7579 It has also checked that we are optimizing, and thus some inaccurancies
7580 in the debugging information are acceptable.
7581 So we could just delete output_reload_insn.
7582 But in some cases we can improve the debugging information without
7583 sacrificing optimization - maybe even improving the code:
7584 See if the pseudo reg has been completely replaced
7585 with reload regs. If so, delete the store insn
7586 and forget we had a stack slot for the pseudo. */
7587 if (reload_out[j] != reload_in[j]
7588 && REG_N_DEATHS (REGNO (reg)) == 1
7589 && REG_BASIC_BLOCK (REGNO (reg)) >= 0
7590 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7592 rtx i2;
7594 /* We know that it was used only between here
7595 and the beginning of the current basic block.
7596 (We also know that the last use before INSN was
7597 the output reload we are thinking of deleting, but never mind that.)
7598 Search that range; see if any ref remains. */
7599 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7601 rtx set = single_set (i2);
7603 /* Uses which just store in the pseudo don't count,
7604 since if they are the only uses, they are dead. */
7605 if (set != 0 && SET_DEST (set) == reg)
7606 continue;
7607 if (GET_CODE (i2) == CODE_LABEL
7608 || GET_CODE (i2) == JUMP_INSN)
7609 break;
7610 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7611 && reg_mentioned_p (reg, PATTERN (i2)))
7613 /* Some other ref remains; just delete the output reload we
7614 know to be dead. */
7615 delete_insn (output_reload_insn);
7616 return;
7620 /* Delete the now-dead stores into this pseudo. */
7621 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7623 rtx set = single_set (i2);
7625 if (set != 0 && SET_DEST (set) == reg)
7627 /* This might be a basic block head,
7628 thus don't use delete_insn. */
7629 PUT_CODE (i2, NOTE);
7630 NOTE_SOURCE_FILE (i2) = 0;
7631 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
7633 if (GET_CODE (i2) == CODE_LABEL
7634 || GET_CODE (i2) == JUMP_INSN)
7635 break;
7638 /* For the debugging info,
7639 say the pseudo lives in this reload reg. */
7640 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
7641 alter_reg (REGNO (reg), -1);
7643 delete_insn (output_reload_insn);
7647 /* Output reload-insns to reload VALUE into RELOADREG.
7648 VALUE is an autoincrement or autodecrement RTX whose operand
7649 is a register or memory location;
7650 so reloading involves incrementing that location.
7652 INC_AMOUNT is the number to increment or decrement by (always positive).
7653 This cannot be deduced from VALUE. */
7655 static void
7656 inc_for_reload (reloadreg, value, inc_amount)
7657 rtx reloadreg;
7658 rtx value;
7659 int inc_amount;
7661 /* REG or MEM to be copied and incremented. */
7662 rtx incloc = XEXP (value, 0);
7663 /* Nonzero if increment after copying. */
7664 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
7665 rtx last;
7666 rtx inc;
7667 rtx add_insn;
7668 int code;
7670 /* No hard register is equivalent to this register after
7671 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7672 we could inc/dec that register as well (maybe even using it for
7673 the source), but I'm not sure it's worth worrying about. */
7674 if (GET_CODE (incloc) == REG)
7675 reg_last_reload_reg[REGNO (incloc)] = 0;
7677 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7678 inc_amount = - inc_amount;
7680 inc = GEN_INT (inc_amount);
7682 /* If this is post-increment, first copy the location to the reload reg. */
7683 if (post)
7684 emit_insn (gen_move_insn (reloadreg, incloc));
7686 /* See if we can directly increment INCLOC. Use a method similar to that
7687 in gen_reload. */
7689 last = get_last_insn ();
7690 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
7691 gen_rtx_PLUS (GET_MODE (incloc),
7692 incloc, inc)));
7694 code = recog_memoized (add_insn);
7695 if (code >= 0)
7697 insn_extract (add_insn);
7698 if (constrain_operands (code, 1))
7700 /* If this is a pre-increment and we have incremented the value
7701 where it lives, copy the incremented value to RELOADREG to
7702 be used as an address. */
7704 if (! post)
7705 emit_insn (gen_move_insn (reloadreg, incloc));
7707 return;
7711 delete_insns_since (last);
7713 /* If couldn't do the increment directly, must increment in RELOADREG.
7714 The way we do this depends on whether this is pre- or post-increment.
7715 For pre-increment, copy INCLOC to the reload register, increment it
7716 there, then save back. */
7718 if (! post)
7720 emit_insn (gen_move_insn (reloadreg, incloc));
7721 emit_insn (gen_add2_insn (reloadreg, inc));
7722 emit_insn (gen_move_insn (incloc, reloadreg));
7724 else
7726 /* Postincrement.
7727 Because this might be a jump insn or a compare, and because RELOADREG
7728 may not be available after the insn in an input reload, we must do
7729 the incrementation before the insn being reloaded for.
7731 We have already copied INCLOC to RELOADREG. Increment the copy in
7732 RELOADREG, save that back, then decrement RELOADREG so it has
7733 the original value. */
7735 emit_insn (gen_add2_insn (reloadreg, inc));
7736 emit_insn (gen_move_insn (incloc, reloadreg));
7737 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7740 return;
7743 /* Return 1 if we are certain that the constraint-string STRING allows
7744 the hard register REG. Return 0 if we can't be sure of this. */
7746 static int
7747 constraint_accepts_reg_p (string, reg)
7748 char *string;
7749 rtx reg;
7751 int value = 0;
7752 int regno = true_regnum (reg);
7753 int c;
7755 /* Initialize for first alternative. */
7756 value = 0;
7757 /* Check that each alternative contains `g' or `r'. */
7758 while (1)
7759 switch (c = *string++)
7761 case 0:
7762 /* If an alternative lacks `g' or `r', we lose. */
7763 return value;
7764 case ',':
7765 /* If an alternative lacks `g' or `r', we lose. */
7766 if (value == 0)
7767 return 0;
7768 /* Initialize for next alternative. */
7769 value = 0;
7770 break;
7771 case 'g':
7772 case 'r':
7773 /* Any general reg wins for this alternative. */
7774 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7775 value = 1;
7776 break;
7777 default:
7778 /* Any reg in specified class wins for this alternative. */
7780 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7782 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7783 value = 1;
7788 /* Return the number of places FIND appears within X, but don't count
7789 an occurrence if some SET_DEST is FIND. */
7792 count_occurrences (x, find)
7793 register rtx x, find;
7795 register int i, j;
7796 register enum rtx_code code;
7797 register char *format_ptr;
7798 int count;
7800 if (x == find)
7801 return 1;
7802 if (x == 0)
7803 return 0;
7805 code = GET_CODE (x);
7807 switch (code)
7809 case REG:
7810 case QUEUED:
7811 case CONST_INT:
7812 case CONST_DOUBLE:
7813 case SYMBOL_REF:
7814 case CODE_LABEL:
7815 case PC:
7816 case CC0:
7817 return 0;
7819 case SET:
7820 if (SET_DEST (x) == find)
7821 return count_occurrences (SET_SRC (x), find);
7822 break;
7824 default:
7825 break;
7828 format_ptr = GET_RTX_FORMAT (code);
7829 count = 0;
7831 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7833 switch (*format_ptr++)
7835 case 'e':
7836 count += count_occurrences (XEXP (x, i), find);
7837 break;
7839 case 'E':
7840 if (XVEC (x, i) != NULL)
7842 for (j = 0; j < XVECLEN (x, i); j++)
7843 count += count_occurrences (XVECEXP (x, i, j), find);
7845 break;
7848 return count;
7851 /* This array holds values which are equivalent to a hard register
7852 during reload_cse_regs. Each array element is an EXPR_LIST of
7853 values. Each time a hard register is set, we set the corresponding
7854 array element to the value. Each time a hard register is copied
7855 into memory, we add the memory location to the corresponding array
7856 element. We don't store values or memory addresses with side
7857 effects in this array.
7859 If the value is a CONST_INT, then the mode of the containing
7860 EXPR_LIST is the mode in which that CONST_INT was referenced.
7862 We sometimes clobber a specific entry in a list. In that case, we
7863 just set XEXP (list-entry, 0) to 0. */
7865 static rtx *reg_values;
7867 /* This is a preallocated REG rtx which we use as a temporary in
7868 reload_cse_invalidate_regno, so that we don't need to allocate a
7869 new one each time through a loop in that function. */
7871 static rtx invalidate_regno_rtx;
7873 /* This is a set of registers for which we must remove REG_DEAD notes in
7874 previous insns, because our modifications made them invalid. That can
7875 happen if we introduced the register into the current insn, or we deleted
7876 the current insn which used to set the register. */
7878 static HARD_REG_SET no_longer_dead_regs;
7880 /* Invalidate any entries in reg_values which depend on REGNO,
7881 including those for REGNO itself. This is called if REGNO is
7882 changing. If CLOBBER is true, then always forget anything we
7883 currently know about REGNO. MODE is the mode of the assignment to
7884 REGNO, which is used to determine how many hard registers are being
7885 changed. If MODE is VOIDmode, then only REGNO is being changed;
7886 this is used when invalidating call clobbered registers across a
7887 call. */
7889 static void
7890 reload_cse_invalidate_regno (regno, mode, clobber)
7891 int regno;
7892 enum machine_mode mode;
7893 int clobber;
7895 int endregno;
7896 register int i;
7898 /* Our callers don't always go through true_regnum; we may see a
7899 pseudo-register here from a CLOBBER or the like. We probably
7900 won't ever see a pseudo-register that has a real register number,
7901 for we check anyhow for safety. */
7902 if (regno >= FIRST_PSEUDO_REGISTER)
7903 regno = reg_renumber[regno];
7904 if (regno < 0)
7905 return;
7907 if (mode == VOIDmode)
7908 endregno = regno + 1;
7909 else
7910 endregno = regno + HARD_REGNO_NREGS (regno, mode);
7912 if (clobber)
7913 for (i = regno; i < endregno; i++)
7914 reg_values[i] = 0;
7916 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7918 rtx x;
7920 for (x = reg_values[i]; x; x = XEXP (x, 1))
7922 if (XEXP (x, 0) != 0
7923 && refers_to_regno_p (regno, endregno, XEXP (x, 0), NULL_PTR))
7925 /* If this is the only entry on the list, clear
7926 reg_values[i]. Otherwise, just clear this entry on
7927 the list. */
7928 if (XEXP (x, 1) == 0 && x == reg_values[i])
7930 reg_values[i] = 0;
7931 break;
7933 XEXP (x, 0) = 0;
7938 /* We must look at earlier registers, in case REGNO is part of a
7939 multi word value but is not the first register. If an earlier
7940 register has a value in a mode which overlaps REGNO, then we must
7941 invalidate that earlier register. Note that we do not need to
7942 check REGNO or later registers (we must not check REGNO itself,
7943 because we would incorrectly conclude that there was a conflict). */
7945 for (i = 0; i < regno; i++)
7947 rtx x;
7949 for (x = reg_values[i]; x; x = XEXP (x, 1))
7951 if (XEXP (x, 0) != 0)
7953 PUT_MODE (invalidate_regno_rtx, GET_MODE (x));
7954 REGNO (invalidate_regno_rtx) = i;
7955 if (refers_to_regno_p (regno, endregno, invalidate_regno_rtx,
7956 NULL_PTR))
7958 reload_cse_invalidate_regno (i, VOIDmode, 1);
7959 break;
7966 /* The memory at address MEM_BASE is being changed.
7967 Return whether this change will invalidate VAL. */
7969 static int
7970 reload_cse_mem_conflict_p (mem_base, val)
7971 rtx mem_base;
7972 rtx val;
7974 enum rtx_code code;
7975 char *fmt;
7976 int i;
7978 code = GET_CODE (val);
7979 switch (code)
7981 /* Get rid of a few simple cases quickly. */
7982 case REG:
7983 case PC:
7984 case CC0:
7985 case SCRATCH:
7986 case CONST:
7987 case CONST_INT:
7988 case CONST_DOUBLE:
7989 case SYMBOL_REF:
7990 case LABEL_REF:
7991 return 0;
7993 case MEM:
7994 if (GET_MODE (mem_base) == BLKmode
7995 || GET_MODE (val) == BLKmode)
7996 return 1;
7997 if (anti_dependence (val, mem_base))
7998 return 1;
7999 /* The address may contain nested MEMs. */
8000 break;
8002 default:
8003 break;
8006 fmt = GET_RTX_FORMAT (code);
8008 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8010 if (fmt[i] == 'e')
8012 if (reload_cse_mem_conflict_p (mem_base, XEXP (val, i)))
8013 return 1;
8015 else if (fmt[i] == 'E')
8017 int j;
8019 for (j = 0; j < XVECLEN (val, i); j++)
8020 if (reload_cse_mem_conflict_p (mem_base, XVECEXP (val, i, j)))
8021 return 1;
8025 return 0;
8028 /* Invalidate any entries in reg_values which are changed because of a
8029 store to MEM_RTX. If this is called because of a non-const call
8030 instruction, MEM_RTX is (mem:BLK const0_rtx). */
8032 static void
8033 reload_cse_invalidate_mem (mem_rtx)
8034 rtx mem_rtx;
8036 register int i;
8038 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8040 rtx x;
8042 for (x = reg_values[i]; x; x = XEXP (x, 1))
8044 if (XEXP (x, 0) != 0
8045 && reload_cse_mem_conflict_p (mem_rtx, XEXP (x, 0)))
8047 /* If this is the only entry on the list, clear
8048 reg_values[i]. Otherwise, just clear this entry on
8049 the list. */
8050 if (XEXP (x, 1) == 0 && x == reg_values[i])
8052 reg_values[i] = 0;
8053 break;
8055 XEXP (x, 0) = 0;
8061 /* Invalidate DEST, which is being assigned to or clobbered. The
8062 second parameter exists so that this function can be passed to
8063 note_stores; it is ignored. */
8065 static void
8066 reload_cse_invalidate_rtx (dest, ignore)
8067 rtx dest;
8068 rtx ignore ATTRIBUTE_UNUSED;
8070 while (GET_CODE (dest) == STRICT_LOW_PART
8071 || GET_CODE (dest) == SIGN_EXTRACT
8072 || GET_CODE (dest) == ZERO_EXTRACT
8073 || GET_CODE (dest) == SUBREG)
8074 dest = XEXP (dest, 0);
8076 if (GET_CODE (dest) == REG)
8077 reload_cse_invalidate_regno (REGNO (dest), GET_MODE (dest), 1);
8078 else if (GET_CODE (dest) == MEM)
8079 reload_cse_invalidate_mem (dest);
8082 /* Possibly delete death notes on the insns before INSN if modifying INSN
8083 extended the lifespan of the registers. */
8085 static void
8086 reload_cse_delete_death_notes (insn)
8087 rtx insn;
8089 int dreg;
8091 for (dreg = 0; dreg < FIRST_PSEUDO_REGISTER; dreg++)
8093 rtx trial;
8095 if (! TEST_HARD_REG_BIT (no_longer_dead_regs, dreg))
8096 continue;
8098 for (trial = prev_nonnote_insn (insn);
8099 (trial
8100 && GET_CODE (trial) != CODE_LABEL
8101 && GET_CODE (trial) != BARRIER);
8102 trial = prev_nonnote_insn (trial))
8104 if (find_regno_note (trial, REG_DEAD, dreg))
8106 remove_death (dreg, trial);
8107 break;
8113 /* Record that the current insn uses hard reg REGNO in mode MODE. This
8114 will be used in reload_cse_delete_death_notes to delete prior REG_DEAD
8115 notes for this register. */
8117 static void
8118 reload_cse_no_longer_dead (regno, mode)
8119 int regno;
8120 enum machine_mode mode;
8122 int nregs = HARD_REGNO_NREGS (regno, mode);
8123 while (nregs-- > 0)
8125 SET_HARD_REG_BIT (no_longer_dead_regs, regno);
8126 regno++;
8131 /* Do a very simple CSE pass over the hard registers.
8133 This function detects no-op moves where we happened to assign two
8134 different pseudo-registers to the same hard register, and then
8135 copied one to the other. Reload will generate a useless
8136 instruction copying a register to itself.
8138 This function also detects cases where we load a value from memory
8139 into two different registers, and (if memory is more expensive than
8140 registers) changes it to simply copy the first register into the
8141 second register.
8143 Another optimization is performed that scans the operands of each
8144 instruction to see whether the value is already available in a
8145 hard register. It then replaces the operand with the hard register
8146 if possible, much like an optional reload would. */
8148 void
8149 reload_cse_regs (first)
8150 rtx first;
8152 char *firstobj;
8153 rtx callmem;
8154 register int i;
8155 rtx insn;
8157 init_alias_analysis ();
8159 reg_values = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
8160 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8161 reg_values[i] = 0;
8163 /* Create our EXPR_LIST structures on reload_obstack, so that we can
8164 free them when we are done. */
8165 push_obstacks (&reload_obstack, &reload_obstack);
8166 firstobj = (char *) obstack_alloc (&reload_obstack, 0);
8168 /* We pass this to reload_cse_invalidate_mem to invalidate all of
8169 memory for a non-const call instruction. */
8170 callmem = gen_rtx_MEM (BLKmode, const0_rtx);
8172 /* This is used in reload_cse_invalidate_regno to avoid consing a
8173 new REG in a loop in that function. */
8174 invalidate_regno_rtx = gen_rtx_REG (VOIDmode, 0);
8176 for (insn = first; insn; insn = NEXT_INSN (insn))
8178 rtx body;
8180 if (GET_CODE (insn) == CODE_LABEL)
8182 /* Forget all the register values at a code label. We don't
8183 try to do anything clever around jumps. */
8184 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8185 reg_values[i] = 0;
8187 continue;
8190 #ifdef NON_SAVING_SETJMP
8191 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
8192 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
8194 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8195 reg_values[i] = 0;
8197 continue;
8199 #endif
8201 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
8202 continue;
8204 CLEAR_HARD_REG_SET (no_longer_dead_regs);
8206 /* If this is a call instruction, forget anything stored in a
8207 call clobbered register, or, if this is not a const call, in
8208 memory. */
8209 if (GET_CODE (insn) == CALL_INSN)
8211 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8212 if (call_used_regs[i])
8213 reload_cse_invalidate_regno (i, VOIDmode, 1);
8215 if (! CONST_CALL_P (insn))
8216 reload_cse_invalidate_mem (callmem);
8219 body = PATTERN (insn);
8220 if (GET_CODE (body) == SET)
8222 int count = 0;
8223 if (reload_cse_noop_set_p (body, insn))
8225 PUT_CODE (insn, NOTE);
8226 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8227 NOTE_SOURCE_FILE (insn) = 0;
8228 reload_cse_delete_death_notes (insn);
8230 /* We're done with this insn. */
8231 continue;
8234 /* It's not a no-op, but we can try to simplify it. */
8235 CLEAR_HARD_REG_SET (no_longer_dead_regs);
8236 count += reload_cse_simplify_set (body, insn);
8238 if (count > 0 && apply_change_group ())
8239 reload_cse_delete_death_notes (insn);
8240 else if (reload_cse_simplify_operands (insn))
8241 reload_cse_delete_death_notes (insn);
8243 reload_cse_record_set (body, body);
8245 else if (GET_CODE (body) == PARALLEL)
8247 int count = 0;
8249 /* If every action in a PARALLEL is a noop, we can delete
8250 the entire PARALLEL. */
8251 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8252 if ((GET_CODE (XVECEXP (body, 0, i)) != SET
8253 || ! reload_cse_noop_set_p (XVECEXP (body, 0, i), insn))
8254 && GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
8255 break;
8256 if (i < 0)
8258 PUT_CODE (insn, NOTE);
8259 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8260 NOTE_SOURCE_FILE (insn) = 0;
8261 reload_cse_delete_death_notes (insn);
8263 /* We're done with this insn. */
8264 continue;
8267 /* It's not a no-op, but we can try to simplify it. */
8268 CLEAR_HARD_REG_SET (no_longer_dead_regs);
8269 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8270 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
8271 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
8273 if (count > 0 && apply_change_group ())
8274 reload_cse_delete_death_notes (insn);
8275 else if (reload_cse_simplify_operands (insn))
8276 reload_cse_delete_death_notes (insn);
8278 /* Look through the PARALLEL and record the values being
8279 set, if possible. Also handle any CLOBBERs. */
8280 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8282 rtx x = XVECEXP (body, 0, i);
8284 if (GET_CODE (x) == SET)
8285 reload_cse_record_set (x, body);
8286 else
8287 note_stores (x, reload_cse_invalidate_rtx);
8290 else
8291 note_stores (body, reload_cse_invalidate_rtx);
8293 #ifdef AUTO_INC_DEC
8294 /* Clobber any registers which appear in REG_INC notes. We
8295 could keep track of the changes to their values, but it is
8296 unlikely to help. */
8298 rtx x;
8300 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
8301 if (REG_NOTE_KIND (x) == REG_INC)
8302 reload_cse_invalidate_rtx (XEXP (x, 0), NULL_RTX);
8304 #endif
8306 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
8307 after we have processed the insn. */
8308 if (GET_CODE (insn) == CALL_INSN)
8310 rtx x;
8312 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
8313 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
8314 reload_cse_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX);
8318 /* Free all the temporary structures we created, and go back to the
8319 regular obstacks. */
8320 obstack_free (&reload_obstack, firstobj);
8321 pop_obstacks ();
8324 /* Return whether the values known for REGNO are equal to VAL. MODE
8325 is the mode of the object that VAL is being copied to; this matters
8326 if VAL is a CONST_INT. */
8328 static int
8329 reload_cse_regno_equal_p (regno, val, mode)
8330 int regno;
8331 rtx val;
8332 enum machine_mode mode;
8334 rtx x;
8336 if (val == 0)
8337 return 0;
8339 for (x = reg_values[regno]; x; x = XEXP (x, 1))
8340 if (XEXP (x, 0) != 0
8341 && rtx_equal_p (XEXP (x, 0), val)
8342 && (GET_CODE (val) != CONST_INT
8343 || mode == GET_MODE (x)
8344 || (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
8345 /* On a big endian machine if the value spans more than
8346 one register then this register holds the high part of
8347 it and we can't use it.
8349 ??? We should also compare with the high part of the
8350 value. */
8351 && !(WORDS_BIG_ENDIAN
8352 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
8353 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
8354 GET_MODE_BITSIZE (GET_MODE (x))))))
8355 return 1;
8357 return 0;
8360 /* See whether a single set is a noop. SET is the set instruction we
8361 are should check, and INSN is the instruction from which it came. */
8363 static int
8364 reload_cse_noop_set_p (set, insn)
8365 rtx set;
8366 rtx insn;
8368 rtx src, dest;
8369 enum machine_mode dest_mode;
8370 int dreg, sreg;
8371 int ret;
8373 src = SET_SRC (set);
8374 dest = SET_DEST (set);
8375 dest_mode = GET_MODE (dest);
8377 if (side_effects_p (src))
8378 return 0;
8380 dreg = true_regnum (dest);
8381 sreg = true_regnum (src);
8383 /* Check for setting a register to itself. In this case, we don't
8384 have to worry about REG_DEAD notes. */
8385 if (dreg >= 0 && dreg == sreg)
8386 return 1;
8388 ret = 0;
8389 if (dreg >= 0)
8391 /* Check for setting a register to itself. */
8392 if (dreg == sreg)
8393 ret = 1;
8395 /* Check for setting a register to a value which we already know
8396 is in the register. */
8397 else if (reload_cse_regno_equal_p (dreg, src, dest_mode))
8398 ret = 1;
8400 /* Check for setting a register DREG to another register SREG
8401 where SREG is equal to a value which is already in DREG. */
8402 else if (sreg >= 0)
8404 rtx x;
8406 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
8408 rtx tmp;
8410 if (XEXP (x, 0) == 0)
8411 continue;
8413 if (dest_mode == GET_MODE (x))
8414 tmp = XEXP (x, 0);
8415 else if (GET_MODE_BITSIZE (dest_mode)
8416 < GET_MODE_BITSIZE (GET_MODE (x)))
8417 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
8418 else
8419 continue;
8421 if (tmp
8422 && reload_cse_regno_equal_p (dreg, tmp, dest_mode))
8424 ret = 1;
8425 break;
8430 else if (GET_CODE (dest) == MEM)
8432 /* Check for storing a register to memory when we know that the
8433 register is equivalent to the memory location. */
8434 if (sreg >= 0
8435 && reload_cse_regno_equal_p (sreg, dest, dest_mode)
8436 && ! side_effects_p (dest))
8437 ret = 1;
8440 /* If we can delete this SET, then we need to look for an earlier
8441 REG_DEAD note on DREG, and remove it if it exists. */
8442 if (ret && dreg >= 0)
8444 if (! find_regno_note (insn, REG_UNUSED, dreg))
8445 reload_cse_no_longer_dead (dreg, dest_mode);
8448 return ret;
8451 /* Try to simplify a single SET instruction. SET is the set pattern.
8452 INSN is the instruction it came from.
8453 This function only handles one case: if we set a register to a value
8454 which is not a register, we try to find that value in some other register
8455 and change the set into a register copy. */
8457 static int
8458 reload_cse_simplify_set (set, insn)
8459 rtx set;
8460 rtx insn;
8462 int dreg;
8463 rtx src;
8464 enum machine_mode dest_mode;
8465 enum reg_class dclass;
8466 register int i;
8468 dreg = true_regnum (SET_DEST (set));
8469 if (dreg < 0)
8470 return 0;
8472 src = SET_SRC (set);
8473 if (side_effects_p (src) || true_regnum (src) >= 0)
8474 return 0;
8476 dclass = REGNO_REG_CLASS (dreg);
8478 /* If memory loads are cheaper than register copies, don't change
8479 them. */
8480 if (GET_CODE (src) == MEM
8481 && MEMORY_MOVE_COST (GET_MODE (src), dclass, 1) < 2)
8482 return 0;
8484 /* If the constant is cheaper than a register, don't change it. */
8485 if (CONSTANT_P (src)
8486 && rtx_cost (src, SET) < 2)
8487 return 0;
8489 dest_mode = GET_MODE (SET_DEST (set));
8490 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8492 if (i != dreg
8493 && REGISTER_MOVE_COST (REGNO_REG_CLASS (i), dclass) == 2
8494 && reload_cse_regno_equal_p (i, src, dest_mode))
8496 int validated;
8498 /* Pop back to the real obstacks while changing the insn. */
8499 pop_obstacks ();
8501 validated = validate_change (insn, &SET_SRC (set),
8502 gen_rtx_REG (dest_mode, i), 1);
8504 /* Go back to the obstack we are using for temporary
8505 storage. */
8506 push_obstacks (&reload_obstack, &reload_obstack);
8508 if (validated && ! find_regno_note (insn, REG_UNUSED, i))
8510 reload_cse_no_longer_dead (i, dest_mode);
8511 return 1;
8515 return 0;
8518 /* Try to replace operands in INSN with equivalent values that are already
8519 in registers. This can be viewed as optional reloading.
8521 For each non-register operand in the insn, see if any hard regs are
8522 known to be equivalent to that operand. Record the alternatives which
8523 can accept these hard registers. Among all alternatives, select the
8524 ones which are better or equal to the one currently matching, where
8525 "better" is in terms of '?' and '!' constraints. Among the remaining
8526 alternatives, select the one which replaces most operands with
8527 hard registers. */
8529 static int
8530 reload_cse_simplify_operands (insn)
8531 rtx insn;
8533 #ifdef REGISTER_CONSTRAINTS
8534 int insn_code_number, n_operands, n_alternatives;
8535 int i,j;
8537 char *constraints[MAX_RECOG_OPERANDS];
8539 /* Vector recording how bad an alternative is. */
8540 int *alternative_reject;
8541 /* Vector recording how many registers can be introduced by choosing
8542 this alternative. */
8543 int *alternative_nregs;
8544 /* Array of vectors recording, for each operand and each alternative,
8545 which hard register to substitute, or -1 if the operand should be
8546 left as it is. */
8547 int *op_alt_regno[MAX_RECOG_OPERANDS];
8548 /* Array of alternatives, sorted in order of decreasing desirability. */
8549 int *alternative_order;
8550 rtx reg = gen_rtx_REG (VOIDmode, -1);
8552 /* Find out some information about this insn. */
8553 insn_code_number = recog_memoized (insn);
8554 /* We don't modify asm instructions. */
8555 if (insn_code_number < 0)
8556 return 0;
8558 n_operands = insn_n_operands[insn_code_number];
8559 n_alternatives = insn_n_alternatives[insn_code_number];
8561 if (n_alternatives == 0 || n_operands == 0)
8562 return 0;
8563 insn_extract (insn);
8565 /* Figure out which alternative currently matches. */
8566 if (! constrain_operands (insn_code_number, 1))
8567 abort ();
8569 alternative_reject = (int *) alloca (n_alternatives * sizeof (int));
8570 alternative_nregs = (int *) alloca (n_alternatives * sizeof (int));
8571 alternative_order = (int *) alloca (n_alternatives * sizeof (int));
8572 bzero ((char *)alternative_reject, n_alternatives * sizeof (int));
8573 bzero ((char *)alternative_nregs, n_alternatives * sizeof (int));
8575 for (i = 0; i < n_operands; i++)
8577 enum machine_mode mode;
8578 int regno;
8579 char *p;
8581 op_alt_regno[i] = (int *) alloca (n_alternatives * sizeof (int));
8582 for (j = 0; j < n_alternatives; j++)
8583 op_alt_regno[i][j] = -1;
8585 p = constraints[i] = insn_operand_constraint[insn_code_number][i];
8586 mode = insn_operand_mode[insn_code_number][i];
8588 /* Add the reject values for each alternative given by the constraints
8589 for this operand. */
8590 j = 0;
8591 while (*p != '\0')
8593 char c = *p++;
8594 if (c == ',')
8595 j++;
8596 else if (c == '?')
8597 alternative_reject[j] += 3;
8598 else if (c == '!')
8599 alternative_reject[j] += 300;
8602 /* We won't change operands which are already registers. We
8603 also don't want to modify output operands. */
8604 regno = true_regnum (recog_operand[i]);
8605 if (regno >= 0
8606 || constraints[i][0] == '='
8607 || constraints[i][0] == '+')
8608 continue;
8610 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8612 int class = (int) NO_REGS;
8614 if (! reload_cse_regno_equal_p (regno, recog_operand[i], mode))
8615 continue;
8617 REGNO (reg) = regno;
8618 PUT_MODE (reg, mode);
8620 /* We found a register equal to this operand. Now look for all
8621 alternatives that can accept this register and have not been
8622 assigned a register they can use yet. */
8623 j = 0;
8624 p = constraints[i];
8625 for (;;)
8627 char c = *p++;
8629 switch (c)
8631 case '=': case '+': case '?':
8632 case '#': case '&': case '!':
8633 case '*': case '%':
8634 case '0': case '1': case '2': case '3': case '4':
8635 case 'm': case '<': case '>': case 'V': case 'o':
8636 case 'E': case 'F': case 'G': case 'H':
8637 case 's': case 'i': case 'n':
8638 case 'I': case 'J': case 'K': case 'L':
8639 case 'M': case 'N': case 'O': case 'P':
8640 #ifdef EXTRA_CONSTRAINT
8641 case 'Q': case 'R': case 'S': case 'T': case 'U':
8642 #endif
8643 case 'p': case 'X':
8644 /* These don't say anything we care about. */
8645 break;
8647 case 'g': case 'r':
8648 class = reg_class_subunion[(int) class][(int) GENERAL_REGS];
8649 break;
8651 default:
8652 class
8653 = reg_class_subunion[(int) class][(int) REG_CLASS_FROM_LETTER (c)];
8654 break;
8656 case ',': case '\0':
8657 /* See if REGNO fits this alternative, and set it up as the
8658 replacement register if we don't have one for this
8659 alternative yet and the operand being replaced is not
8660 a cheap CONST_INT. */
8661 if (op_alt_regno[i][j] == -1
8662 && reg_fits_class_p (reg, class, 0, mode)
8663 && (GET_CODE (recog_operand[i]) != CONST_INT
8664 || rtx_cost (recog_operand[i], SET) > rtx_cost (reg, SET)))
8666 alternative_nregs[j]++;
8667 op_alt_regno[i][j] = regno;
8669 j++;
8670 break;
8673 if (c == '\0')
8674 break;
8679 /* Record all alternatives which are better or equal to the currently
8680 matching one in the alternative_order array. */
8681 for (i = j = 0; i < n_alternatives; i++)
8682 if (alternative_reject[i] <= alternative_reject[which_alternative])
8683 alternative_order[j++] = i;
8684 n_alternatives = j;
8686 /* Sort it. Given a small number of alternatives, a dumb algorithm
8687 won't hurt too much. */
8688 for (i = 0; i < n_alternatives - 1; i++)
8690 int best = i;
8691 int best_reject = alternative_reject[alternative_order[i]];
8692 int best_nregs = alternative_nregs[alternative_order[i]];
8693 int tmp;
8695 for (j = i + 1; j < n_alternatives; j++)
8697 int this_reject = alternative_reject[alternative_order[j]];
8698 int this_nregs = alternative_nregs[alternative_order[j]];
8700 if (this_reject < best_reject
8701 || (this_reject == best_reject && this_nregs < best_nregs))
8703 best = j;
8704 best_reject = this_reject;
8705 best_nregs = this_nregs;
8709 tmp = alternative_order[best];
8710 alternative_order[best] = alternative_order[i];
8711 alternative_order[i] = tmp;
8714 /* Substitute the operands as determined by op_alt_regno for the best
8715 alternative. */
8716 j = alternative_order[0];
8717 CLEAR_HARD_REG_SET (no_longer_dead_regs);
8719 /* Pop back to the real obstacks while changing the insn. */
8720 pop_obstacks ();
8722 for (i = 0; i < n_operands; i++)
8724 enum machine_mode mode = insn_operand_mode[insn_code_number][i];
8725 if (op_alt_regno[i][j] == -1)
8726 continue;
8728 reload_cse_no_longer_dead (op_alt_regno[i][j], mode);
8729 validate_change (insn, recog_operand_loc[i],
8730 gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
8733 for (i = insn_n_dups[insn_code_number] - 1; i >= 0; i--)
8735 int op = recog_dup_num[i];
8736 enum machine_mode mode = insn_operand_mode[insn_code_number][op];
8738 if (op_alt_regno[op][j] == -1)
8739 continue;
8741 reload_cse_no_longer_dead (op_alt_regno[op][j], mode);
8742 validate_change (insn, recog_dup_loc[i],
8743 gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
8746 /* Go back to the obstack we are using for temporary
8747 storage. */
8748 push_obstacks (&reload_obstack, &reload_obstack);
8750 return apply_change_group ();
8751 #else
8752 return 0;
8753 #endif
8756 /* These two variables are used to pass information from
8757 reload_cse_record_set to reload_cse_check_clobber. */
8759 static int reload_cse_check_clobbered;
8760 static rtx reload_cse_check_src;
8762 /* See if DEST overlaps with RELOAD_CSE_CHECK_SRC. If it does, set
8763 RELOAD_CSE_CHECK_CLOBBERED. This is called via note_stores. The
8764 second argument, which is passed by note_stores, is ignored. */
8766 static void
8767 reload_cse_check_clobber (dest, ignore)
8768 rtx dest;
8769 rtx ignore ATTRIBUTE_UNUSED;
8771 if (reg_overlap_mentioned_p (dest, reload_cse_check_src))
8772 reload_cse_check_clobbered = 1;
8775 /* Record the result of a SET instruction. SET is the set pattern.
8776 BODY is the pattern of the insn that it came from. */
8778 static void
8779 reload_cse_record_set (set, body)
8780 rtx set;
8781 rtx body;
8783 rtx dest, src, x;
8784 int dreg, sreg;
8785 enum machine_mode dest_mode;
8787 dest = SET_DEST (set);
8788 src = SET_SRC (set);
8789 dreg = true_regnum (dest);
8790 sreg = true_regnum (src);
8791 dest_mode = GET_MODE (dest);
8793 /* Some machines don't define AUTO_INC_DEC, but they still use push
8794 instructions. We need to catch that case here in order to
8795 invalidate the stack pointer correctly. Note that invalidating
8796 the stack pointer is different from invalidating DEST. */
8797 x = dest;
8798 while (GET_CODE (x) == SUBREG
8799 || GET_CODE (x) == ZERO_EXTRACT
8800 || GET_CODE (x) == SIGN_EXTRACT
8801 || GET_CODE (x) == STRICT_LOW_PART)
8802 x = XEXP (x, 0);
8803 if (push_operand (x, GET_MODE (x)))
8805 reload_cse_invalidate_rtx (stack_pointer_rtx, NULL_RTX);
8806 reload_cse_invalidate_rtx (dest, NULL_RTX);
8807 return;
8810 /* We can only handle an assignment to a register, or a store of a
8811 register to a memory location. For other cases, we just clobber
8812 the destination. We also have to just clobber if there are side
8813 effects in SRC or DEST. */
8814 if ((dreg < 0 && GET_CODE (dest) != MEM)
8815 || side_effects_p (src)
8816 || side_effects_p (dest))
8818 reload_cse_invalidate_rtx (dest, NULL_RTX);
8819 return;
8822 #ifdef HAVE_cc0
8823 /* We don't try to handle values involving CC, because it's a pain
8824 to keep track of when they have to be invalidated. */
8825 if (reg_mentioned_p (cc0_rtx, src)
8826 || reg_mentioned_p (cc0_rtx, dest))
8828 reload_cse_invalidate_rtx (dest, NULL_RTX);
8829 return;
8831 #endif
8833 /* If BODY is a PARALLEL, then we need to see whether the source of
8834 SET is clobbered by some other instruction in the PARALLEL. */
8835 if (GET_CODE (body) == PARALLEL)
8837 int i;
8839 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8841 rtx x;
8843 x = XVECEXP (body, 0, i);
8844 if (x == set)
8845 continue;
8847 reload_cse_check_clobbered = 0;
8848 reload_cse_check_src = src;
8849 note_stores (x, reload_cse_check_clobber);
8850 if (reload_cse_check_clobbered)
8852 reload_cse_invalidate_rtx (dest, NULL_RTX);
8853 return;
8858 if (dreg >= 0)
8860 int i;
8862 /* This is an assignment to a register. Update the value we
8863 have stored for the register. */
8864 if (sreg >= 0)
8866 rtx x;
8868 /* This is a copy from one register to another. Any values
8869 which were valid for SREG are now valid for DREG. If the
8870 mode changes, we use gen_lowpart_common to extract only
8871 the part of the value that is copied. */
8872 reg_values[dreg] = 0;
8873 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
8875 rtx tmp;
8877 if (XEXP (x, 0) == 0)
8878 continue;
8879 if (dest_mode == GET_MODE (XEXP (x, 0)))
8880 tmp = XEXP (x, 0);
8881 else if (GET_MODE_BITSIZE (dest_mode)
8882 > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
8883 continue;
8884 else
8885 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
8886 if (tmp)
8887 reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, tmp,
8888 reg_values[dreg]);
8891 else
8892 reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, src, NULL_RTX);
8894 /* We've changed DREG, so invalidate any values held by other
8895 registers that depend upon it. */
8896 reload_cse_invalidate_regno (dreg, dest_mode, 0);
8898 /* If this assignment changes more than one hard register,
8899 forget anything we know about the others. */
8900 for (i = 1; i < HARD_REGNO_NREGS (dreg, dest_mode); i++)
8901 reg_values[dreg + i] = 0;
8903 else if (GET_CODE (dest) == MEM)
8905 /* Invalidate conflicting memory locations. */
8906 reload_cse_invalidate_mem (dest);
8908 /* If we're storing a register to memory, add DEST to the list
8909 in REG_VALUES. */
8910 if (sreg >= 0 && ! side_effects_p (dest))
8911 reg_values[sreg] = gen_rtx_EXPR_LIST (dest_mode, dest,
8912 reg_values[sreg]);
8914 else
8916 /* We should have bailed out earlier. */
8917 abort ();