1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
26 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "insn-flags.h"
31 #include "insn-codes.h"
37 #include "basic-block.h"
42 /* This file contains the reload pass of the compiler, which is
43 run after register allocation has been done. It checks that
44 each insn is valid (operands required to be in registers really
45 are in registers of the proper class) and fixes up invalid ones
46 by copying values temporarily into registers for the insns
49 The results of register allocation are described by the vector
50 reg_renumber; the insns still contain pseudo regs, but reg_renumber
51 can be used to find which hard reg, if any, a pseudo reg is in.
53 The technique we always use is to free up a few hard regs that are
54 called ``reload regs'', and for each place where a pseudo reg
55 must be in a hard reg, copy it temporarily into one of the reload regs.
57 All the pseudos that were formerly allocated to the hard regs that
58 are now in use as reload regs must be ``spilled''. This means
59 that they go to other hard regs, or to stack slots if no other
60 available hard regs can be found. Spilling can invalidate more
61 insns, requiring additional need for reloads, so we must keep checking
62 until the process stabilizes.
64 For machines with different classes of registers, we must keep track
65 of the register class needed for each reload, and make sure that
66 we allocate enough reload registers of each class.
68 The file reload.c contains the code that checks one insn for
69 validity and reports the reloads that it needs. This file
70 is in charge of scanning the entire rtl code, accumulating the
71 reload needs, spilling, assigning reload registers to use for
72 fixing up each insn, and generating the new insns to copy values
73 into the reload registers. */
76 #ifndef REGISTER_MOVE_COST
77 #define REGISTER_MOVE_COST(x, y) 2
80 /* During reload_as_needed, element N contains a REG rtx for the hard reg
81 into which reg N has been reloaded (perhaps for a previous insn). */
82 static rtx
*reg_last_reload_reg
;
84 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
85 for an output reload that stores into reg N. */
86 static char *reg_has_output_reload
;
88 /* Indicates which hard regs are reload-registers for an output reload
89 in the current insn. */
90 static HARD_REG_SET reg_is_output_reload
;
92 /* Element N is the constant value to which pseudo reg N is equivalent,
93 or zero if pseudo reg N is not equivalent to a constant.
94 find_reloads looks at this in order to replace pseudo reg N
95 with the constant it stands for. */
96 rtx
*reg_equiv_constant
;
98 /* Element N is a memory location to which pseudo reg N is equivalent,
99 prior to any register elimination (such as frame pointer to stack
100 pointer). Depending on whether or not it is a valid address, this value
101 is transferred to either reg_equiv_address or reg_equiv_mem. */
102 rtx
*reg_equiv_memory_loc
;
104 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
105 This is used when the address is not valid as a memory address
106 (because its displacement is too big for the machine.) */
107 rtx
*reg_equiv_address
;
109 /* Element N is the memory slot to which pseudo reg N is equivalent,
110 or zero if pseudo reg N is not equivalent to a memory slot. */
113 /* Widest width in which each pseudo reg is referred to (via subreg). */
114 static int *reg_max_ref_width
;
116 /* Element N is the insn that initialized reg N from its equivalent
117 constant or memory slot. */
118 static rtx
*reg_equiv_init
;
120 /* During reload_as_needed, element N contains the last pseudo regno reloaded
121 into hard register N. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124 static int reg_reloaded_contents
[FIRST_PSEUDO_REGISTER
];
126 /* During reload_as_needed, element N contains the insn for which
127 hard register N was last used. Its contents are significant only
128 when reg_reloaded_valid is set for this register. */
129 static rtx reg_reloaded_insn
[FIRST_PSEUDO_REGISTER
];
131 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid */
132 static HARD_REG_SET reg_reloaded_valid
;
133 /* Indicate if the register was dead at the end of the reload.
134 This is only valid if reg_reloaded_contents is set and valid. */
135 static HARD_REG_SET reg_reloaded_dead
;
137 /* Number of spill-regs so far; number of valid elements of spill_regs. */
140 /* In parallel with spill_regs, contains REG rtx's for those regs.
141 Holds the last rtx used for any given reg, or 0 if it has never
142 been used for spilling yet. This rtx is reused, provided it has
144 static rtx spill_reg_rtx
[FIRST_PSEUDO_REGISTER
];
146 /* In parallel with spill_regs, contains nonzero for a spill reg
147 that was stored after the last time it was used.
148 The precise value is the insn generated to do the store. */
149 static rtx spill_reg_store
[FIRST_PSEUDO_REGISTER
];
151 /* This table is the inverse mapping of spill_regs:
152 indexed by hard reg number,
153 it contains the position of that reg in spill_regs,
154 or -1 for something that is not in spill_regs. */
155 static short spill_reg_order
[FIRST_PSEUDO_REGISTER
];
157 /* This reg set indicates registers that may not be used for retrying global
158 allocation. The registers that may not be used include all spill registers
159 and the frame pointer (if we are using one). */
160 HARD_REG_SET forbidden_regs
;
162 /* This reg set indicates registers that are not good for spill registers.
163 They will not be used to complete groups of spill registers. This includes
164 all fixed registers, registers that may be eliminated, and, if
165 SMALL_REGISTER_CLASSES is zero, registers explicitly used in the rtl.
167 (spill_reg_order prevents these registers from being used to start a
169 static HARD_REG_SET bad_spill_regs
;
171 /* Describes order of use of registers for reloading
172 of spilled pseudo-registers. `spills' is the number of
173 elements that are actually valid; new ones are added at the end. */
174 static short spill_regs
[FIRST_PSEUDO_REGISTER
];
176 /* This reg set indicates those registers that have been used a spill
177 registers. This information is used in reorg.c, to help figure out
178 what registers are live at any point. It is assumed that all spill_regs
179 are dead at every CODE_LABEL. */
181 HARD_REG_SET used_spill_regs
;
183 /* Index of last register assigned as a spill register. We allocate in
184 a round-robin fashion. */
186 static int last_spill_reg
;
188 /* Describes order of preference for putting regs into spill_regs.
189 Contains the numbers of all the hard regs, in order most preferred first.
190 This order is different for each function.
191 It is set up by order_regs_for_reload.
192 Empty elements at the end contain -1. */
193 static short potential_reload_regs
[FIRST_PSEUDO_REGISTER
];
195 /* 1 for a hard register that appears explicitly in the rtl
196 (for example, function value registers, special registers
197 used by insns, structure value pointer registers). */
198 static char regs_explicitly_used
[FIRST_PSEUDO_REGISTER
];
200 /* Indicates if a register was counted against the need for
201 groups. 0 means it can count against max_nongroup instead. */
202 static HARD_REG_SET counted_for_groups
;
204 /* Indicates if a register was counted against the need for
205 non-groups. 0 means it can become part of a new group.
206 During choose_reload_regs, 1 here means don't use this reg
207 as part of a group, even if it seems to be otherwise ok. */
208 static HARD_REG_SET counted_for_nongroups
;
210 /* Nonzero if indirect addressing is supported on the machine; this means
211 that spilling (REG n) does not require reloading it into a register in
212 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
213 value indicates the level of indirect addressing supported, e.g., two
214 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
217 static char spill_indirect_levels
;
219 /* Nonzero if indirect addressing is supported when the innermost MEM is
220 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
221 which these are valid is the same as spill_indirect_levels, above. */
223 char indirect_symref_ok
;
225 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
227 char double_reg_address_ok
;
229 /* Record the stack slot for each spilled hard register. */
231 static rtx spill_stack_slot
[FIRST_PSEUDO_REGISTER
];
233 /* Width allocated so far for that stack slot. */
235 static int spill_stack_slot_width
[FIRST_PSEUDO_REGISTER
];
237 /* Indexed by register class and basic block number, nonzero if there is
238 any need for a spill register of that class in that basic block.
239 The pointer is 0 if we did stupid allocation and don't know
240 the structure of basic blocks. */
242 char *basic_block_needs
[N_REG_CLASSES
];
244 /* First uid used by insns created by reload in this function.
245 Used in find_equiv_reg. */
246 int reload_first_uid
;
248 /* Flag set by local-alloc or global-alloc if anything is live in
249 a call-clobbered reg across calls. */
251 int caller_save_needed
;
253 /* The register class to use for a base register when reloading an
254 address. This is normally BASE_REG_CLASS, but it may be different
255 when using SMALL_REGISTER_CLASSES and passing parameters in
257 enum reg_class reload_address_base_reg_class
;
259 /* The register class to use for an index register when reloading an
260 address. This is normally INDEX_REG_CLASS, but it may be different
261 when using SMALL_REGISTER_CLASSES and passing parameters in
263 enum reg_class reload_address_index_reg_class
;
265 /* Set to 1 while reload_as_needed is operating.
266 Required by some machines to handle any generated moves differently. */
268 int reload_in_progress
= 0;
270 /* These arrays record the insn_code of insns that may be needed to
271 perform input and output reloads of special objects. They provide a
272 place to pass a scratch register. */
274 enum insn_code reload_in_optab
[NUM_MACHINE_MODES
];
275 enum insn_code reload_out_optab
[NUM_MACHINE_MODES
];
277 /* This obstack is used for allocation of rtl during register elimination.
278 The allocated storage can be freed once find_reloads has processed the
281 struct obstack reload_obstack
;
282 char *reload_firstobj
;
284 #define obstack_chunk_alloc xmalloc
285 #define obstack_chunk_free free
287 /* List of labels that must never be deleted. */
288 extern rtx forced_labels
;
290 /* This structure is used to record information about register eliminations.
291 Each array entry describes one possible way of eliminating a register
292 in favor of another. If there is more than one way of eliminating a
293 particular register, the most preferred should be specified first. */
295 static struct elim_table
297 int from
; /* Register number to be eliminated. */
298 int to
; /* Register number used as replacement. */
299 int initial_offset
; /* Initial difference between values. */
300 int can_eliminate
; /* Non-zero if this elimination can be done. */
301 int can_eliminate_previous
; /* Value of CAN_ELIMINATE in previous scan over
302 insns made by reload. */
303 int offset
; /* Current offset between the two regs. */
304 int max_offset
; /* Maximum offset between the two regs. */
305 int previous_offset
; /* Offset at end of previous insn. */
306 int ref_outside_mem
; /* "to" has been referenced outside a MEM. */
307 rtx from_rtx
; /* REG rtx for the register to be eliminated.
308 We cannot simply compare the number since
309 we might then spuriously replace a hard
310 register corresponding to a pseudo
311 assigned to the reg to be eliminated. */
312 rtx to_rtx
; /* REG rtx for the replacement. */
315 /* If a set of eliminable registers was specified, define the table from it.
316 Otherwise, default to the normal case of the frame pointer being
317 replaced by the stack pointer. */
319 #ifdef ELIMINABLE_REGS
322 {{ FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
}};
325 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
327 /* Record the number of pending eliminations that have an offset not equal
328 to their initial offset. If non-zero, we use a new copy of each
329 replacement result in any insns encountered. */
330 static int num_not_at_initial_offset
;
332 /* Count the number of registers that we may be able to eliminate. */
333 static int num_eliminable
;
335 /* For each label, we record the offset of each elimination. If we reach
336 a label by more than one path and an offset differs, we cannot do the
337 elimination. This information is indexed by the number of the label.
338 The first table is an array of flags that records whether we have yet
339 encountered a label and the second table is an array of arrays, one
340 entry in the latter array for each elimination. */
342 static char *offsets_known_at
;
343 static int (*offsets_at
)[NUM_ELIMINABLE_REGS
];
345 /* Number of labels in the current function. */
347 static int num_labels
;
349 struct hard_reg_n_uses
{ int regno
; int uses
; };
351 static int calculate_needs_all_insns
PROTO((rtx
, int));
352 static int calculate_needs
PROTO((int, rtx
, rtx
, int));
353 static int find_reload_regs
PROTO((int, FILE *));
354 static int find_tworeg_group
PROTO((int, int, FILE *));
355 static int find_group
PROTO((int, int, FILE *));
356 static int possible_group_p
PROTO((int, int *));
357 static void count_possible_groups
PROTO((int *, enum machine_mode
*,
359 static int modes_equiv_for_class_p
PROTO((enum machine_mode
,
362 static void spill_failure
PROTO((rtx
));
363 static int new_spill_reg
PROTO((int, int, int *, int *, int,
365 static void delete_dead_insn
PROTO((rtx
));
366 static void alter_reg
PROTO((int, int));
367 static void mark_scratch_live
PROTO((rtx
));
368 static void set_label_offsets
PROTO((rtx
, rtx
, int));
369 static int eliminate_regs_in_insn
PROTO((rtx
, int));
370 static void mark_not_eliminable
PROTO((rtx
, rtx
));
371 static int spill_hard_reg
PROTO((int, int, FILE *, int));
372 static void scan_paradoxical_subregs
PROTO((rtx
));
373 static int hard_reg_use_compare
PROTO((const GENERIC_PTR
, const GENERIC_PTR
));
374 static void order_regs_for_reload
PROTO((void));
375 static int compare_spill_regs
PROTO((const GENERIC_PTR
, const GENERIC_PTR
));
376 static void reload_as_needed
PROTO((rtx
, int));
377 static void forget_old_reloads_1
PROTO((rtx
, rtx
));
378 static int reload_reg_class_lower
PROTO((const GENERIC_PTR
, const GENERIC_PTR
));
379 static void mark_reload_reg_in_use
PROTO((int, int, enum reload_type
,
381 static void clear_reload_reg_in_use
PROTO((int, int, enum reload_type
,
383 static int reload_reg_free_p
PROTO((int, int, enum reload_type
));
384 static int reload_reg_free_before_p
PROTO((int, int, enum reload_type
, int));
385 static int reload_reg_free_for_value_p
PROTO((int, int, enum reload_type
, rtx
, rtx
, int));
386 static int reload_reg_reaches_end_p
PROTO((int, int, enum reload_type
));
387 static int allocate_reload_reg
PROTO((int, rtx
, int, int));
388 static void choose_reload_regs
PROTO((rtx
, rtx
));
389 static void merge_assigned_reloads
PROTO((rtx
));
390 static void emit_reload_insns
PROTO((rtx
));
391 static void delete_output_reload
PROTO((rtx
, int, rtx
));
392 static void inc_for_reload
PROTO((rtx
, rtx
, int));
393 static int constraint_accepts_reg_p
PROTO((char *, rtx
));
394 static void reload_cse_invalidate_regno
PROTO((int, enum machine_mode
, int));
395 static int reload_cse_mem_conflict_p
PROTO((rtx
, rtx
));
396 static void reload_cse_invalidate_mem
PROTO((rtx
));
397 static void reload_cse_invalidate_rtx
PROTO((rtx
, rtx
));
398 static int reload_cse_regno_equal_p
PROTO((int, rtx
, enum machine_mode
));
399 static int reload_cse_noop_set_p
PROTO((rtx
, rtx
));
400 static int reload_cse_simplify_set
PROTO((rtx
, rtx
));
401 static int reload_cse_simplify_operands
PROTO((rtx
));
402 static void reload_cse_check_clobber
PROTO((rtx
, rtx
));
403 static void reload_cse_record_set
PROTO((rtx
, rtx
));
404 static void reload_cse_delete_death_notes
PROTO((rtx
));
405 static void reload_cse_no_longer_dead
PROTO((int, enum machine_mode
));
407 /* Initialize the reload pass once per compilation. */
414 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
415 Set spill_indirect_levels to the number of levels such addressing is
416 permitted, zero if it is not permitted at all. */
419 = gen_rtx_MEM (Pmode
,
421 gen_rtx_REG (Pmode
, LAST_VIRTUAL_REGISTER
+ 1),
423 spill_indirect_levels
= 0;
425 while (memory_address_p (QImode
, tem
))
427 spill_indirect_levels
++;
428 tem
= gen_rtx_MEM (Pmode
, tem
);
431 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
433 tem
= gen_rtx_MEM (Pmode
, gen_rtx_SYMBOL_REF (Pmode
, "foo"));
434 indirect_symref_ok
= memory_address_p (QImode
, tem
);
436 /* See if reg+reg is a valid (and offsettable) address. */
438 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
440 tem
= gen_rtx_PLUS (Pmode
,
441 gen_rtx_REG (Pmode
, HARD_FRAME_POINTER_REGNUM
),
442 gen_rtx_REG (Pmode
, i
));
443 /* This way, we make sure that reg+reg is an offsettable address. */
444 tem
= plus_constant (tem
, 4);
446 if (memory_address_p (QImode
, tem
))
448 double_reg_address_ok
= 1;
453 /* Initialize obstack for our rtl allocation. */
454 gcc_obstack_init (&reload_obstack
);
455 reload_firstobj
= (char *) obstack_alloc (&reload_obstack
, 0);
457 /* Decide which register class should be used when reloading
458 addresses. If we are using SMALL_REGISTER_CLASSES, and any
459 parameters are passed in registers, then we do not want to use
460 those registers when reloading an address. Otherwise, if a
461 function argument needs a reload, we may wind up clobbering
462 another argument to the function which was already computed. If
463 we find a subset class which simply avoids those registers, we
464 use it instead. ??? It would be better to only use the
465 restricted class when we actually are loading function arguments,
466 but that is hard to determine. */
467 reload_address_base_reg_class
= BASE_REG_CLASS
;
468 reload_address_index_reg_class
= INDEX_REG_CLASS
;
469 if (SMALL_REGISTER_CLASSES
)
472 HARD_REG_SET base
, index
;
475 COPY_HARD_REG_SET (base
, reg_class_contents
[BASE_REG_CLASS
]);
476 COPY_HARD_REG_SET (index
, reg_class_contents
[INDEX_REG_CLASS
]);
477 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
479 if (FUNCTION_ARG_REGNO_P (regno
))
481 CLEAR_HARD_REG_BIT (base
, regno
);
482 CLEAR_HARD_REG_BIT (index
, regno
);
486 GO_IF_HARD_REG_EQUAL (base
, reg_class_contents
[BASE_REG_CLASS
],
488 for (p
= reg_class_subclasses
[BASE_REG_CLASS
];
489 *p
!= LIM_REG_CLASSES
;
492 GO_IF_HARD_REG_EQUAL (base
, reg_class_contents
[*p
], usebase
);
495 reload_address_base_reg_class
= *p
;
500 GO_IF_HARD_REG_EQUAL (index
, reg_class_contents
[INDEX_REG_CLASS
],
502 for (p
= reg_class_subclasses
[INDEX_REG_CLASS
];
503 *p
!= LIM_REG_CLASSES
;
506 GO_IF_HARD_REG_EQUAL (index
, reg_class_contents
[*p
], useindex
);
509 reload_address_index_reg_class
= *p
;
516 /* Global variables used by reload and its subroutines. */
518 /* Set during calculate_needs if an insn needs reloading. */
519 static int something_needs_reloads
;
520 /* Set during calculate_needs if an insn needs register elimination. */
521 static int something_needs_elimination
;
523 /* Indicate whether caller saves need a spill register. */
524 static enum reg_class caller_save_spill_class
= NO_REGS
;
525 static int caller_save_group_size
= 1;
527 /* For each class, number of reload regs needed in that class.
528 This is the maximum over all insns of the needs in that class
529 of the individual insn. */
530 static int max_needs
[N_REG_CLASSES
];
532 /* For each class, size of group of consecutive regs
533 that is needed for the reloads of this class. */
534 static int group_size
[N_REG_CLASSES
];
536 /* For each class, max number of consecutive groups needed.
537 (Each group contains group_size[CLASS] consecutive registers.) */
538 static int max_groups
[N_REG_CLASSES
];
540 /* For each class, max number needed of regs that don't belong
541 to any of the groups. */
542 static int max_nongroups
[N_REG_CLASSES
];
544 /* For each class, the machine mode which requires consecutive
545 groups of regs of that class.
546 If two different modes ever require groups of one class,
547 they must be the same size and equally restrictive for that class,
548 otherwise we can't handle the complexity. */
549 static enum machine_mode group_mode
[N_REG_CLASSES
];
551 /* Record the insn where each maximum need is first found. */
552 static rtx max_needs_insn
[N_REG_CLASSES
];
553 static rtx max_groups_insn
[N_REG_CLASSES
];
554 static rtx max_nongroups_insn
[N_REG_CLASSES
];
556 /* Nonzero means we couldn't get enough spill regs. */
559 /* Main entry point for the reload pass.
561 FIRST is the first insn of the function being compiled.
563 GLOBAL nonzero means we were called from global_alloc
564 and should attempt to reallocate any pseudoregs that we
565 displace from hard regs we will use for reloads.
566 If GLOBAL is zero, we do not have enough information to do that,
567 so any pseudo reg that is spilled must go to the stack.
569 DUMPFILE is the global-reg debugging dump file stream, or 0.
570 If it is nonzero, messages are written to it to describe
571 which registers are seized as reload regs, which pseudo regs
572 are spilled from them, and where the pseudo regs are reallocated to.
574 Return value is nonzero if reload failed
575 and we must not do any more for this function. */
578 reload (first
, global
, dumpfile
)
585 register struct elim_table
*ep
;
587 /* The two pointers used to track the true location of the memory used
588 for label offsets. */
589 char *real_known_ptr
= NULL_PTR
;
590 int (*real_at_ptr
)[NUM_ELIMINABLE_REGS
];
592 int something_changed
;
594 /* Make sure even insns with volatile mem refs are recognizable. */
599 /* Enable find_equiv_reg to distinguish insns made by reload. */
600 reload_first_uid
= get_max_uid ();
602 caller_save_spill_class
= NO_REGS
;
603 caller_save_group_size
= 1;
605 for (i
= 0; i
< N_REG_CLASSES
; i
++)
606 basic_block_needs
[i
] = 0;
608 #ifdef SECONDARY_MEMORY_NEEDED
609 /* Initialize the secondary memory table. */
610 clear_secondary_mem ();
613 /* Remember which hard regs appear explicitly
614 before we merge into `regs_ever_live' the ones in which
615 pseudo regs have been allocated. */
616 bcopy (regs_ever_live
, regs_explicitly_used
, sizeof regs_ever_live
);
618 /* We don't have a stack slot for any spill reg yet. */
619 bzero ((char *) spill_stack_slot
, sizeof spill_stack_slot
);
620 bzero ((char *) spill_stack_slot_width
, sizeof spill_stack_slot_width
);
622 /* Initialize the save area information for caller-save, in case some
626 /* Compute which hard registers are now in use
627 as homes for pseudo registers.
628 This is done here rather than (eg) in global_alloc
629 because this point is reached even if not optimizing. */
630 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
633 /* A function that receives a nonlocal goto must save all call-saved
635 if (current_function_has_nonlocal_label
)
636 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
638 if (! call_used_regs
[i
] && ! fixed_regs
[i
])
639 regs_ever_live
[i
] = 1;
642 for (i
= 0; i
< scratch_list_length
; i
++)
644 mark_scratch_live (scratch_list
[i
]);
646 /* Make sure that the last insn in the chain
647 is not something that needs reloading. */
648 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
650 /* Find all the pseudo registers that didn't get hard regs
651 but do have known equivalent constants or memory slots.
652 These include parameters (known equivalent to parameter slots)
653 and cse'd or loop-moved constant memory addresses.
655 Record constant equivalents in reg_equiv_constant
656 so they will be substituted by find_reloads.
657 Record memory equivalents in reg_mem_equiv so they can
658 be substituted eventually by altering the REG-rtx's. */
660 reg_equiv_constant
= (rtx
*) xmalloc (max_regno
* sizeof (rtx
));
661 bzero ((char *) reg_equiv_constant
, max_regno
* sizeof (rtx
));
662 reg_equiv_memory_loc
= (rtx
*) xmalloc (max_regno
* sizeof (rtx
));
663 bzero ((char *) reg_equiv_memory_loc
, max_regno
* sizeof (rtx
));
664 reg_equiv_mem
= (rtx
*) xmalloc (max_regno
* sizeof (rtx
));
665 bzero ((char *) reg_equiv_mem
, max_regno
* sizeof (rtx
));
666 reg_equiv_init
= (rtx
*) xmalloc (max_regno
* sizeof (rtx
));
667 bzero ((char *) reg_equiv_init
, max_regno
* sizeof (rtx
));
668 reg_equiv_address
= (rtx
*) xmalloc (max_regno
* sizeof (rtx
));
669 bzero ((char *) reg_equiv_address
, max_regno
* sizeof (rtx
));
670 reg_max_ref_width
= (int *) xmalloc (max_regno
* sizeof (int));
671 bzero ((char *) reg_max_ref_width
, max_regno
* sizeof (int));
673 if (SMALL_REGISTER_CLASSES
)
674 CLEAR_HARD_REG_SET (forbidden_regs
);
676 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
677 Also find all paradoxical subregs and find largest such for each pseudo.
678 On machines with small register classes, record hard registers that
679 are used for user variables. These can never be used for spills.
680 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
681 caller-saved registers must be marked live. */
683 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
685 rtx set
= single_set (insn
);
687 if (GET_CODE (insn
) == NOTE
&& CONST_CALL_P (insn
)
688 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_SETJMP
)
689 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
690 if (! call_used_regs
[i
])
691 regs_ever_live
[i
] = 1;
693 if (set
!= 0 && GET_CODE (SET_DEST (set
)) == REG
)
695 rtx note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
);
697 #ifdef LEGITIMATE_PIC_OPERAND_P
698 && (! CONSTANT_P (XEXP (note
, 0)) || ! flag_pic
699 || LEGITIMATE_PIC_OPERAND_P (XEXP (note
, 0)))
703 rtx x
= XEXP (note
, 0);
704 i
= REGNO (SET_DEST (set
));
705 if (i
> LAST_VIRTUAL_REGISTER
)
707 if (GET_CODE (x
) == MEM
)
709 /* If the operand is a PLUS, the MEM may be shared,
710 so make sure we have an unshared copy here. */
711 if (GET_CODE (XEXP (x
, 0)) == PLUS
)
714 reg_equiv_memory_loc
[i
] = x
;
716 else if (CONSTANT_P (x
))
718 if (LEGITIMATE_CONSTANT_P (x
))
719 reg_equiv_constant
[i
] = x
;
721 reg_equiv_memory_loc
[i
]
722 = force_const_mem (GET_MODE (SET_DEST (set
)), x
);
727 /* If this register is being made equivalent to a MEM
728 and the MEM is not SET_SRC, the equivalencing insn
729 is one with the MEM as a SET_DEST and it occurs later.
730 So don't mark this insn now. */
731 if (GET_CODE (x
) != MEM
732 || rtx_equal_p (SET_SRC (set
), x
))
733 reg_equiv_init
[i
] = insn
;
738 /* If this insn is setting a MEM from a register equivalent to it,
739 this is the equivalencing insn. */
740 else if (set
&& GET_CODE (SET_DEST (set
)) == MEM
741 && GET_CODE (SET_SRC (set
)) == REG
742 && reg_equiv_memory_loc
[REGNO (SET_SRC (set
))]
743 && rtx_equal_p (SET_DEST (set
),
744 reg_equiv_memory_loc
[REGNO (SET_SRC (set
))]))
745 reg_equiv_init
[REGNO (SET_SRC (set
))] = insn
;
747 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
748 scan_paradoxical_subregs (PATTERN (insn
));
751 /* Does this function require a frame pointer? */
753 frame_pointer_needed
= (! flag_omit_frame_pointer
754 #ifdef EXIT_IGNORE_STACK
755 /* ?? If EXIT_IGNORE_STACK is set, we will not save
756 and restore sp for alloca. So we can't eliminate
757 the frame pointer in that case. At some point,
758 we should improve this by emitting the
759 sp-adjusting insns for this case. */
760 || (current_function_calls_alloca
761 && EXIT_IGNORE_STACK
)
763 || FRAME_POINTER_REQUIRED
);
767 /* Initialize the table of registers to eliminate. The way we do this
768 depends on how the eliminable registers were defined. */
769 #ifdef ELIMINABLE_REGS
770 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
772 ep
->can_eliminate
= ep
->can_eliminate_previous
773 = (CAN_ELIMINATE (ep
->from
, ep
->to
)
774 && ! (ep
->to
== STACK_POINTER_REGNUM
&& frame_pointer_needed
));
777 reg_eliminate
[0].can_eliminate
= reg_eliminate
[0].can_eliminate_previous
778 = ! frame_pointer_needed
;
781 /* Count the number of eliminable registers and build the FROM and TO
782 REG rtx's. Note that code in gen_rtx will cause, e.g.,
783 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
784 We depend on this. */
785 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
787 num_eliminable
+= ep
->can_eliminate
;
788 ep
->from_rtx
= gen_rtx_REG (Pmode
, ep
->from
);
789 ep
->to_rtx
= gen_rtx_REG (Pmode
, ep
->to
);
792 num_labels
= max_label_num () - get_first_label_num ();
794 /* Allocate the tables used to store offset information at labels. */
795 /* We used to use alloca here, but the size of what it would try to
796 allocate would occasionally cause it to exceed the stack limit and
797 cause a core dump. */
798 real_known_ptr
= xmalloc (num_labels
);
800 = (int (*)[NUM_ELIMINABLE_REGS
])
801 xmalloc (num_labels
* NUM_ELIMINABLE_REGS
* sizeof (int));
803 offsets_known_at
= real_known_ptr
- get_first_label_num ();
805 = (int (*)[NUM_ELIMINABLE_REGS
]) (real_at_ptr
- get_first_label_num ());
807 /* Alter each pseudo-reg rtx to contain its hard reg number.
808 Assign stack slots to the pseudos that lack hard regs or equivalents.
809 Do not touch virtual registers. */
811 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_regno
; i
++)
814 /* If we have some registers we think can be eliminated, scan all insns to
815 see if there is an insn that sets one of these registers to something
816 other than itself plus a constant. If so, the register cannot be
817 eliminated. Doing this scan here eliminates an extra pass through the
818 main reload loop in the most common case where register elimination
820 for (insn
= first
; insn
&& num_eliminable
; insn
= NEXT_INSN (insn
))
821 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
822 || GET_CODE (insn
) == CALL_INSN
)
823 note_stores (PATTERN (insn
), mark_not_eliminable
);
825 #ifndef REGISTER_CONSTRAINTS
826 /* If all the pseudo regs have hard regs,
827 except for those that are never referenced,
828 we know that no reloads are needed. */
829 /* But that is not true if there are register constraints, since
830 in that case some pseudos might be in the wrong kind of hard reg. */
832 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
833 if (reg_renumber
[i
] == -1 && REG_N_REFS (i
) != 0)
836 if (i
== max_regno
&& num_eliminable
== 0 && ! caller_save_needed
)
838 free (real_known_ptr
);
840 free (reg_equiv_constant
);
841 free (reg_equiv_memory_loc
);
842 free (reg_equiv_mem
);
843 free (reg_equiv_init
);
844 free (reg_equiv_address
);
845 free (reg_max_ref_width
);
850 /* Compute the order of preference for hard registers to spill.
851 Store them by decreasing preference in potential_reload_regs. */
853 order_regs_for_reload ();
855 /* So far, no hard regs have been spilled. */
857 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
858 spill_reg_order
[i
] = -1;
860 /* Initialize to -1, which means take the first spill register. */
863 /* On most machines, we can't use any register explicitly used in the
864 rtl as a spill register. But on some, we have to. Those will have
865 taken care to keep the life of hard regs as short as possible. */
867 if (! SMALL_REGISTER_CLASSES
)
868 COPY_HARD_REG_SET (forbidden_regs
, bad_spill_regs
);
870 /* Spill any hard regs that we know we can't eliminate. */
871 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
872 if (! ep
->can_eliminate
)
873 spill_hard_reg (ep
->from
, global
, dumpfile
, 1);
875 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
876 if (frame_pointer_needed
)
877 spill_hard_reg (HARD_FRAME_POINTER_REGNUM
, global
, dumpfile
, 1);
881 for (i
= 0; i
< N_REG_CLASSES
; i
++)
883 basic_block_needs
[i
] = (char *) alloca (n_basic_blocks
);
884 bzero (basic_block_needs
[i
], n_basic_blocks
);
887 /* From now on, we need to emit any moves without making new pseudos. */
888 reload_in_progress
= 1;
890 /* This loop scans the entire function each go-round
891 and repeats until one repetition spills no additional hard regs. */
893 /* This flag is set when a pseudo reg is spilled,
894 to require another pass. Note that getting an additional reload
895 reg does not necessarily imply any pseudo reg was spilled;
896 sometimes we find a reload reg that no pseudo reg was allocated in. */
897 something_changed
= 1;
898 /* This flag is set if there are any insns that require reloading. */
899 something_needs_reloads
= 0;
900 /* This flag is set if there are any insns that require register
902 something_needs_elimination
= 0;
903 while (something_changed
)
906 HOST_WIDE_INT starting_frame_size
;
907 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
908 int previous_frame_pointer_needed
= frame_pointer_needed
;
910 static char *reg_class_names
[] = REG_CLASS_NAMES
;
912 something_changed
= 0;
913 bzero ((char *) max_needs
, sizeof max_needs
);
914 bzero ((char *) max_groups
, sizeof max_groups
);
915 bzero ((char *) max_nongroups
, sizeof max_nongroups
);
916 bzero ((char *) max_needs_insn
, sizeof max_needs_insn
);
917 bzero ((char *) max_groups_insn
, sizeof max_groups_insn
);
918 bzero ((char *) max_nongroups_insn
, sizeof max_nongroups_insn
);
919 bzero ((char *) group_size
, sizeof group_size
);
920 for (i
= 0; i
< N_REG_CLASSES
; i
++)
921 group_mode
[i
] = VOIDmode
;
923 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done
924 here because the stack size may be a part of the offset computation
925 for register elimination, and there might have been new stack slots
926 created in the last iteration of this loop. */
927 assign_stack_local (BLKmode
, 0, 0);
929 starting_frame_size
= get_frame_size ();
931 /* Reset all offsets on eliminable registers to their initial values. */
932 #ifdef ELIMINABLE_REGS
933 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
935 INITIAL_ELIMINATION_OFFSET (ep
->from
, ep
->to
, ep
->initial_offset
);
936 ep
->previous_offset
= ep
->offset
937 = ep
->max_offset
= ep
->initial_offset
;
940 #ifdef INITIAL_FRAME_POINTER_OFFSET
941 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate
[0].initial_offset
);
943 if (!FRAME_POINTER_REQUIRED
)
945 reg_eliminate
[0].initial_offset
= 0;
947 reg_eliminate
[0].previous_offset
= reg_eliminate
[0].max_offset
948 = reg_eliminate
[0].offset
= reg_eliminate
[0].initial_offset
;
951 num_not_at_initial_offset
= 0;
953 bzero ((char *) &offsets_known_at
[get_first_label_num ()], num_labels
);
955 /* Set a known offset for each forced label to be at the initial offset
956 of each elimination. We do this because we assume that all
957 computed jumps occur from a location where each elimination is
958 at its initial offset. */
960 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
962 set_label_offsets (XEXP (x
, 0), NULL_RTX
, 1);
964 /* For each pseudo register that has an equivalent location defined,
965 try to eliminate any eliminable registers (such as the frame pointer)
966 assuming initial offsets for the replacement register, which
969 If the resulting location is directly addressable, substitute
970 the MEM we just got directly for the old REG.
972 If it is not addressable but is a constant or the sum of a hard reg
973 and constant, it is probably not addressable because the constant is
974 out of range, in that case record the address; we will generate
975 hairy code to compute the address in a register each time it is
976 needed. Similarly if it is a hard register, but one that is not
977 valid as an address register.
979 If the location is not addressable, but does not have one of the
980 above forms, assign a stack slot. We have to do this to avoid the
981 potential of producing lots of reloads if, e.g., a location involves
982 a pseudo that didn't get a hard register and has an equivalent memory
983 location that also involves a pseudo that didn't get a hard register.
985 Perhaps at some point we will improve reload_when_needed handling
986 so this problem goes away. But that's very hairy. */
988 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
989 if (reg_renumber
[i
] < 0 && reg_equiv_memory_loc
[i
])
991 rtx x
= eliminate_regs (reg_equiv_memory_loc
[i
], 0, NULL_RTX
);
993 if (strict_memory_address_p (GET_MODE (regno_reg_rtx
[i
]),
995 reg_equiv_mem
[i
] = x
, reg_equiv_address
[i
] = 0;
996 else if (CONSTANT_P (XEXP (x
, 0))
997 || (GET_CODE (XEXP (x
, 0)) == REG
998 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
)
999 || (GET_CODE (XEXP (x
, 0)) == PLUS
1000 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
1001 && (REGNO (XEXP (XEXP (x
, 0), 0))
1002 < FIRST_PSEUDO_REGISTER
)
1003 && CONSTANT_P (XEXP (XEXP (x
, 0), 1))))
1004 reg_equiv_address
[i
] = XEXP (x
, 0), reg_equiv_mem
[i
] = 0;
1007 /* Make a new stack slot. Then indicate that something
1008 changed so we go back and recompute offsets for
1009 eliminable registers because the allocation of memory
1010 below might change some offset. reg_equiv_{mem,address}
1011 will be set up for this pseudo on the next pass around
1013 reg_equiv_memory_loc
[i
] = 0;
1014 reg_equiv_init
[i
] = 0;
1016 something_changed
= 1;
1020 /* If we allocated another pseudo to the stack, redo elimination
1022 if (something_changed
)
1025 /* If caller-saves needs a group, initialize the group to include
1026 the size and mode required for caller-saves. */
1028 if (caller_save_group_size
> 1)
1030 group_mode
[(int) caller_save_spill_class
] = Pmode
;
1031 group_size
[(int) caller_save_spill_class
] = caller_save_group_size
;
1034 something_changed
|= calculate_needs_all_insns (first
, global
);
1036 /* If we allocated any new memory locations, make another pass
1037 since it might have changed elimination offsets. */
1038 if (starting_frame_size
!= get_frame_size ())
1039 something_changed
= 1;
1042 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1044 if (max_needs
[i
] > 0)
1046 ";; Need %d reg%s of class %s (for insn %d).\n",
1047 max_needs
[i
], max_needs
[i
] == 1 ? "" : "s",
1048 reg_class_names
[i
], INSN_UID (max_needs_insn
[i
]));
1049 if (max_nongroups
[i
] > 0)
1051 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1052 max_nongroups
[i
], max_nongroups
[i
] == 1 ? "" : "s",
1053 reg_class_names
[i
], INSN_UID (max_nongroups_insn
[i
]));
1054 if (max_groups
[i
] > 0)
1056 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1057 max_groups
[i
], max_groups
[i
] == 1 ? "" : "s",
1058 mode_name
[(int) group_mode
[i
]],
1059 reg_class_names
[i
], INSN_UID (max_groups_insn
[i
]));
1062 /* If we have caller-saves, set up the save areas and see if caller-save
1063 will need a spill register. */
1065 if (caller_save_needed
)
1067 /* Set the offsets for setup_save_areas. */
1068 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
1070 ep
->previous_offset
= ep
->max_offset
;
1072 if ( ! setup_save_areas (&something_changed
)
1073 && caller_save_spill_class
== NO_REGS
)
1075 /* The class we will need depends on whether the machine
1076 supports the sum of two registers for an address; see
1077 find_address_reloads for details. */
1079 caller_save_spill_class
1080 = double_reg_address_ok
? INDEX_REG_CLASS
: BASE_REG_CLASS
;
1081 caller_save_group_size
1082 = CLASS_MAX_NREGS (caller_save_spill_class
, Pmode
);
1083 something_changed
= 1;
1087 /* See if anything that happened changes which eliminations are valid.
1088 For example, on the Sparc, whether or not the frame pointer can
1089 be eliminated can depend on what registers have been used. We need
1090 not check some conditions again (such as flag_omit_frame_pointer)
1091 since they can't have changed. */
1093 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
1094 if ((ep
->from
== HARD_FRAME_POINTER_REGNUM
&& FRAME_POINTER_REQUIRED
)
1095 #ifdef ELIMINABLE_REGS
1096 || ! CAN_ELIMINATE (ep
->from
, ep
->to
)
1099 ep
->can_eliminate
= 0;
1101 /* Look for the case where we have discovered that we can't replace
1102 register A with register B and that means that we will now be
1103 trying to replace register A with register C. This means we can
1104 no longer replace register C with register B and we need to disable
1105 such an elimination, if it exists. This occurs often with A == ap,
1106 B == sp, and C == fp. */
1108 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
1110 struct elim_table
*op
;
1111 register int new_to
= -1;
1113 if (! ep
->can_eliminate
&& ep
->can_eliminate_previous
)
1115 /* Find the current elimination for ep->from, if there is a
1117 for (op
= reg_eliminate
;
1118 op
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; op
++)
1119 if (op
->from
== ep
->from
&& op
->can_eliminate
)
1125 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1127 for (op
= reg_eliminate
;
1128 op
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; op
++)
1129 if (op
->from
== new_to
&& op
->to
== ep
->to
)
1130 op
->can_eliminate
= 0;
1134 /* See if any registers that we thought we could eliminate the previous
1135 time are no longer eliminable. If so, something has changed and we
1136 must spill the register. Also, recompute the number of eliminable
1137 registers and see if the frame pointer is needed; it is if there is
1138 no elimination of the frame pointer that we can perform. */
1140 frame_pointer_needed
= 1;
1141 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
1143 if (ep
->can_eliminate
&& ep
->from
== FRAME_POINTER_REGNUM
1144 && ep
->to
!= HARD_FRAME_POINTER_REGNUM
)
1145 frame_pointer_needed
= 0;
1147 if (! ep
->can_eliminate
&& ep
->can_eliminate_previous
)
1149 ep
->can_eliminate_previous
= 0;
1150 spill_hard_reg (ep
->from
, global
, dumpfile
, 1);
1151 something_changed
= 1;
1156 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1157 /* If we didn't need a frame pointer last time, but we do now, spill
1158 the hard frame pointer. */
1159 if (frame_pointer_needed
&& ! previous_frame_pointer_needed
)
1161 spill_hard_reg (HARD_FRAME_POINTER_REGNUM
, global
, dumpfile
, 1);
1162 something_changed
= 1;
1166 /* If all needs are met, we win. */
1168 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1169 if (max_needs
[i
] > 0 || max_groups
[i
] > 0 || max_nongroups
[i
] > 0)
1171 if (i
== N_REG_CLASSES
&& ! something_changed
)
1174 /* Not all needs are met; must spill some hard regs. */
1176 /* Put all registers spilled so far back in potential_reload_regs, but
1177 put them at the front, since we've already spilled most of the
1178 pseudos in them (we might have left some pseudos unspilled if they
1179 were in a block that didn't need any spill registers of a conflicting
1180 class. We used to try to mark off the need for those registers,
1181 but doing so properly is very complex and reallocating them is the
1182 simpler approach. First, "pack" potential_reload_regs by pushing
1183 any nonnegative entries towards the end. That will leave room
1184 for the registers we already spilled.
1186 Also, undo the marking of the spill registers from the last time
1187 around in FORBIDDEN_REGS since we will be probably be allocating
1190 ??? It is theoretically possible that we might end up not using one
1191 of our previously-spilled registers in this allocation, even though
1192 they are at the head of the list. It's not clear what to do about
1193 this, but it was no better before, when we marked off the needs met
1194 by the previously-spilled registers. With the current code, globals
1195 can be allocated into these registers, but locals cannot. */
1199 for (i
= j
= FIRST_PSEUDO_REGISTER
- 1; i
>= 0; i
--)
1200 if (potential_reload_regs
[i
] != -1)
1201 potential_reload_regs
[j
--] = potential_reload_regs
[i
];
1203 for (i
= 0; i
< n_spills
; i
++)
1205 potential_reload_regs
[i
] = spill_regs
[i
];
1206 spill_reg_order
[spill_regs
[i
]] = -1;
1207 CLEAR_HARD_REG_BIT (forbidden_regs
, spill_regs
[i
]);
1213 something_changed
|= find_reload_regs (global
, dumpfile
);
1218 /* If global-alloc was run, notify it of any register eliminations we have
1221 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
1222 if (ep
->can_eliminate
)
1223 mark_elimination (ep
->from
, ep
->to
);
1225 /* Insert code to save and restore call-clobbered hard regs
1226 around calls. Tell if what mode to use so that we will process
1227 those insns in reload_as_needed if we have to. */
1229 if (caller_save_needed
)
1230 save_call_clobbered_regs (num_eliminable
? QImode
1231 : caller_save_spill_class
!= NO_REGS
? HImode
1234 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1235 If that insn didn't set the register (i.e., it copied the register to
1236 memory), just delete that insn instead of the equivalencing insn plus
1237 anything now dead. If we call delete_dead_insn on that insn, we may
1238 delete the insn that actually sets the register if the register die
1239 there and that is incorrect. */
1241 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1242 if (reg_renumber
[i
] < 0 && reg_equiv_init
[i
] != 0
1243 && GET_CODE (reg_equiv_init
[i
]) != NOTE
)
1245 if (reg_set_p (regno_reg_rtx
[i
], PATTERN (reg_equiv_init
[i
])))
1246 delete_dead_insn (reg_equiv_init
[i
]);
1249 PUT_CODE (reg_equiv_init
[i
], NOTE
);
1250 NOTE_SOURCE_FILE (reg_equiv_init
[i
]) = 0;
1251 NOTE_LINE_NUMBER (reg_equiv_init
[i
]) = NOTE_INSN_DELETED
;
1255 /* Use the reload registers where necessary
1256 by generating move instructions to move the must-be-register
1257 values into or out of the reload registers. */
1259 if (something_needs_reloads
|| something_needs_elimination
1260 || (caller_save_needed
&& num_eliminable
)
1261 || caller_save_spill_class
!= NO_REGS
)
1262 reload_as_needed (first
, global
);
1264 /* If we were able to eliminate the frame pointer, show that it is no
1265 longer live at the start of any basic block. If it ls live by
1266 virtue of being in a pseudo, that pseudo will be marked live
1267 and hence the frame pointer will be known to be live via that
1270 if (! frame_pointer_needed
)
1271 for (i
= 0; i
< n_basic_blocks
; i
++)
1272 CLEAR_REGNO_REG_SET (basic_block_live_at_start
[i
],
1273 HARD_FRAME_POINTER_REGNUM
);
1275 /* Come here (with failure set nonzero) if we can't get enough spill regs
1276 and we decide not to abort about it. */
1279 reload_in_progress
= 0;
1281 /* Now eliminate all pseudo regs by modifying them into
1282 their equivalent memory references.
1283 The REG-rtx's for the pseudos are modified in place,
1284 so all insns that used to refer to them now refer to memory.
1286 For a reg that has a reg_equiv_address, all those insns
1287 were changed by reloading so that no insns refer to it any longer;
1288 but the DECL_RTL of a variable decl may refer to it,
1289 and if so this causes the debugging info to mention the variable. */
1291 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1295 int is_readonly
= 0;
1297 if (reg_equiv_memory_loc
[i
])
1299 in_struct
= MEM_IN_STRUCT_P (reg_equiv_memory_loc
[i
]);
1300 is_readonly
= RTX_UNCHANGING_P (reg_equiv_memory_loc
[i
]);
1303 if (reg_equiv_mem
[i
])
1304 addr
= XEXP (reg_equiv_mem
[i
], 0);
1306 if (reg_equiv_address
[i
])
1307 addr
= reg_equiv_address
[i
];
1311 if (reg_renumber
[i
] < 0)
1313 rtx reg
= regno_reg_rtx
[i
];
1314 XEXP (reg
, 0) = addr
;
1315 REG_USERVAR_P (reg
) = 0;
1316 RTX_UNCHANGING_P (reg
) = is_readonly
;
1317 MEM_IN_STRUCT_P (reg
) = in_struct
;
1318 /* We have no alias information about this newly created
1320 MEM_ALIAS_SET (reg
) = 0;
1321 PUT_CODE (reg
, MEM
);
1323 else if (reg_equiv_mem
[i
])
1324 XEXP (reg_equiv_mem
[i
], 0) = addr
;
1328 /* Make a pass over all the insns and delete all USEs which we inserted
1329 only to tag a REG_EQUAL note on them; if PRESERVE_DEATH_INFO_REGNO_P
1330 is defined, also remove death notes for things that are no longer
1331 registers or no longer die in the insn (e.g., an input and output
1332 pseudo being tied). */
1334 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
1335 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
1337 #ifdef PRESERVE_DEATH_INFO_REGNO_P
1341 if (GET_CODE (PATTERN (insn
)) == USE
1342 && find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
1344 PUT_CODE (insn
, NOTE
);
1345 NOTE_SOURCE_FILE (insn
) = 0;
1346 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1349 #ifdef PRESERVE_DEATH_INFO_REGNO_P
1350 for (note
= REG_NOTES (insn
); note
; note
= next
)
1352 next
= XEXP (note
, 1);
1353 if (REG_NOTE_KIND (note
) == REG_DEAD
1354 && (GET_CODE (XEXP (note
, 0)) != REG
1355 || reg_set_p (XEXP (note
, 0), PATTERN (insn
))))
1356 remove_note (insn
, note
);
1361 /* If we are doing stack checking, give a warning if this function's
1362 frame size is larger than we expect. */
1363 if (flag_stack_check
&& ! STACK_CHECK_BUILTIN
)
1365 HOST_WIDE_INT size
= get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE
;
1367 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1368 if (regs_ever_live
[i
] && ! fixed_regs
[i
] && call_used_regs
[i
])
1369 size
+= UNITS_PER_WORD
;
1371 if (size
> STACK_CHECK_MAX_FRAME_SIZE
)
1372 warning ("frame size too large for reliable stack checking");
1375 /* Indicate that we no longer have known memory locations or constants. */
1376 reg_equiv_constant
= 0;
1377 reg_equiv_memory_loc
= 0;
1380 free (real_known_ptr
);
1385 free (scratch_list
);
1388 free (scratch_block
);
1391 free (reg_equiv_constant
);
1392 free (reg_equiv_memory_loc
);
1393 free (reg_equiv_mem
);
1394 free (reg_equiv_init
);
1395 free (reg_equiv_address
);
1396 free (reg_max_ref_width
);
1398 CLEAR_HARD_REG_SET (used_spill_regs
);
1399 for (i
= 0; i
< n_spills
; i
++)
1400 SET_HARD_REG_BIT (used_spill_regs
, spill_regs
[i
]);
1405 /* Walk the insns of the current function, starting with FIRST, and collect
1406 information about the need to do register elimination and the need to
1409 calculate_needs_all_insns (first
, global
)
1414 int something_changed
= 0;
1416 /* Keep track of which basic blocks are needing the reloads. */
1419 /* Compute the most additional registers needed by any instruction.
1420 Collect information separately for each class of regs. */
1422 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
1424 if (global
&& this_block
+ 1 < n_basic_blocks
1425 && insn
== basic_block_head
[this_block
+1])
1428 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
1429 might include REG_LABEL), we need to see what effects this
1430 has on the known offsets at labels. */
1432 if (GET_CODE (insn
) == CODE_LABEL
|| GET_CODE (insn
) == JUMP_INSN
1433 || (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
1434 && REG_NOTES (insn
) != 0))
1435 set_label_offsets (insn
, insn
, 0);
1437 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
1439 rtx old_body
= PATTERN (insn
);
1440 int old_code
= INSN_CODE (insn
);
1441 rtx old_notes
= REG_NOTES (insn
);
1442 int did_elimination
= 0;
1444 /* Nonzero means don't use a reload reg that overlaps
1445 the place where a function value can be returned. */
1446 rtx avoid_return_reg
= 0;
1448 /* Set avoid_return_reg if this is an insn
1449 that might use the value of a function call. */
1450 if (SMALL_REGISTER_CLASSES
&& GET_CODE (insn
) == CALL_INSN
)
1452 if (GET_CODE (PATTERN (insn
)) == SET
)
1453 after_call
= SET_DEST (PATTERN (insn
));
1454 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
1455 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1456 after_call
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 0));
1460 else if (SMALL_REGISTER_CLASSES
&& after_call
!= 0
1461 && !(GET_CODE (PATTERN (insn
)) == SET
1462 && SET_DEST (PATTERN (insn
)) == stack_pointer_rtx
)
1463 && GET_CODE (PATTERN (insn
)) != USE
)
1465 if (reg_referenced_p (after_call
, PATTERN (insn
)))
1466 avoid_return_reg
= after_call
;
1470 /* If needed, eliminate any eliminable registers. */
1472 did_elimination
= eliminate_regs_in_insn (insn
, 0);
1474 /* Analyze the instruction. */
1475 find_reloads (insn
, 0, spill_indirect_levels
, global
,
1478 /* Remember for later shortcuts which insns had any reloads or
1479 register eliminations.
1481 One might think that it would be worthwhile to mark insns
1482 that need register replacements but not reloads, but this is
1483 not safe because find_reloads may do some manipulation of
1484 the insn (such as swapping commutative operands), which would
1485 be lost when we restore the old pattern after register
1486 replacement. So the actions of find_reloads must be redone in
1487 subsequent passes or in reload_as_needed.
1489 However, it is safe to mark insns that need reloads
1490 but not register replacement. */
1492 PUT_MODE (insn
, (did_elimination
? QImode
1493 : n_reloads
? HImode
1494 : GET_MODE (insn
) == DImode
? DImode
1497 /* Discard any register replacements done. */
1498 if (did_elimination
)
1500 obstack_free (&reload_obstack
, reload_firstobj
);
1501 PATTERN (insn
) = old_body
;
1502 INSN_CODE (insn
) = old_code
;
1503 REG_NOTES (insn
) = old_notes
;
1504 something_needs_elimination
= 1;
1507 /* If this insn has no reloads, we need not do anything except
1508 in the case of a CALL_INSN when we have caller-saves and
1509 caller-save needs reloads. */
1512 || (GET_CODE (insn
) == CALL_INSN
1513 && caller_save_spill_class
!= NO_REGS
))
1514 something_changed
|= calculate_needs (this_block
, insn
,
1515 avoid_return_reg
, global
);
1518 /* Note that there is a continue statement above. */
1520 return something_changed
;
1523 /* To compute the number of reload registers of each class
1524 needed for an insn, we must simulate what choose_reload_regs
1525 can do. We do this by splitting an insn into an "input" and
1526 an "output" part. RELOAD_OTHER reloads are used in both.
1527 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
1528 which must be live over the entire input section of reloads,
1529 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
1530 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
1533 The registers needed for output are RELOAD_OTHER and
1534 RELOAD_FOR_OUTPUT, which are live for the entire output
1535 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
1536 reloads for each operand.
1538 The total number of registers needed is the maximum of the
1539 inputs and outputs. */
1542 calculate_needs (this_block
, insn
, avoid_return_reg
, global
)
1544 rtx insn
, avoid_return_reg
;
1547 int something_changed
= 0;
1552 /* [0] is normal, [1] is nongroup. */
1553 int regs
[2][N_REG_CLASSES
];
1554 int groups
[N_REG_CLASSES
];
1557 /* Each `struct needs' corresponds to one RELOAD_... type. */
1561 struct needs output
;
1563 struct needs other_addr
;
1564 struct needs op_addr
;
1565 struct needs op_addr_reload
;
1566 struct needs in_addr
[MAX_RECOG_OPERANDS
];
1567 struct needs in_addr_addr
[MAX_RECOG_OPERANDS
];
1568 struct needs out_addr
[MAX_RECOG_OPERANDS
];
1569 struct needs out_addr_addr
[MAX_RECOG_OPERANDS
];
1572 something_needs_reloads
= 1;
1573 bzero ((char *) &insn_needs
, sizeof insn_needs
);
1575 /* Count each reload once in every class
1576 containing the reload's own class. */
1578 for (i
= 0; i
< n_reloads
; i
++)
1580 register enum reg_class
*p
;
1581 enum reg_class
class = reload_reg_class
[i
];
1583 enum machine_mode mode
;
1584 struct needs
*this_needs
;
1586 /* Don't count the dummy reloads, for which one of the
1587 regs mentioned in the insn can be used for reloading.
1588 Don't count optional reloads.
1589 Don't count reloads that got combined with others. */
1590 if (reload_reg_rtx
[i
] != 0
1591 || reload_optional
[i
] != 0
1592 || (reload_out
[i
] == 0 && reload_in
[i
] == 0
1593 && ! reload_secondary_p
[i
]))
1596 /* Show that a reload register of this class is needed
1597 in this basic block. We do not use insn_needs and
1598 insn_groups because they are overly conservative for
1600 if (global
&& ! basic_block_needs
[(int) class][this_block
])
1602 basic_block_needs
[(int) class][this_block
] = 1;
1603 something_changed
= 1;
1606 mode
= reload_inmode
[i
];
1607 if (GET_MODE_SIZE (reload_outmode
[i
]) > GET_MODE_SIZE (mode
))
1608 mode
= reload_outmode
[i
];
1609 size
= CLASS_MAX_NREGS (class, mode
);
1611 /* Decide which time-of-use to count this reload for. */
1612 switch (reload_when_needed
[i
])
1615 this_needs
= &insn_needs
.other
;
1617 case RELOAD_FOR_INPUT
:
1618 this_needs
= &insn_needs
.input
;
1620 case RELOAD_FOR_OUTPUT
:
1621 this_needs
= &insn_needs
.output
;
1623 case RELOAD_FOR_INSN
:
1624 this_needs
= &insn_needs
.insn
;
1626 case RELOAD_FOR_OTHER_ADDRESS
:
1627 this_needs
= &insn_needs
.other_addr
;
1629 case RELOAD_FOR_INPUT_ADDRESS
:
1630 this_needs
= &insn_needs
.in_addr
[reload_opnum
[i
]];
1632 case RELOAD_FOR_INPADDR_ADDRESS
:
1633 this_needs
= &insn_needs
.in_addr_addr
[reload_opnum
[i
]];
1635 case RELOAD_FOR_OUTPUT_ADDRESS
:
1636 this_needs
= &insn_needs
.out_addr
[reload_opnum
[i
]];
1638 case RELOAD_FOR_OUTADDR_ADDRESS
:
1639 this_needs
= &insn_needs
.out_addr_addr
[reload_opnum
[i
]];
1641 case RELOAD_FOR_OPERAND_ADDRESS
:
1642 this_needs
= &insn_needs
.op_addr
;
1644 case RELOAD_FOR_OPADDR_ADDR
:
1645 this_needs
= &insn_needs
.op_addr_reload
;
1651 enum machine_mode other_mode
, allocate_mode
;
1653 /* Count number of groups needed separately from
1654 number of individual regs needed. */
1655 this_needs
->groups
[(int) class]++;
1656 p
= reg_class_superclasses
[(int) class];
1657 while (*p
!= LIM_REG_CLASSES
)
1658 this_needs
->groups
[(int) *p
++]++;
1660 /* Record size and mode of a group of this class. */
1661 /* If more than one size group is needed,
1662 make all groups the largest needed size. */
1663 if (group_size
[(int) class] < size
)
1665 other_mode
= group_mode
[(int) class];
1666 allocate_mode
= mode
;
1668 group_size
[(int) class] = size
;
1669 group_mode
[(int) class] = mode
;
1674 allocate_mode
= group_mode
[(int) class];
1677 /* Crash if two dissimilar machine modes both need
1678 groups of consecutive regs of the same class. */
1680 if (other_mode
!= VOIDmode
&& other_mode
!= allocate_mode
1681 && ! modes_equiv_for_class_p (allocate_mode
,
1683 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1688 this_needs
->regs
[reload_nongroup
[i
]][(int) class] += 1;
1689 p
= reg_class_superclasses
[(int) class];
1690 while (*p
!= LIM_REG_CLASSES
)
1691 this_needs
->regs
[reload_nongroup
[i
]][(int) *p
++] += 1;
1697 /* All reloads have been counted for this insn;
1698 now merge the various times of use.
1699 This sets insn_needs, etc., to the maximum total number
1700 of registers needed at any point in this insn. */
1702 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1704 int j
, in_max
, out_max
;
1706 /* Compute normal and nongroup needs. */
1707 for (j
= 0; j
<= 1; j
++)
1710 for (in_max
= 0, out_max
= 0, k
= 0; k
< reload_n_operands
; k
++)
1712 in_max
= MAX (in_max
,
1713 (insn_needs
.in_addr
[k
].regs
[j
][i
]
1714 + insn_needs
.in_addr_addr
[k
].regs
[j
][i
]));
1715 out_max
= MAX (out_max
, insn_needs
.out_addr
[k
].regs
[j
][i
]);
1716 out_max
= MAX (out_max
,
1717 insn_needs
.out_addr_addr
[k
].regs
[j
][i
]);
1720 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1721 and operand addresses but not things used to reload
1722 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1723 don't conflict with things needed to reload inputs or
1726 in_max
= MAX (MAX (insn_needs
.op_addr
.regs
[j
][i
],
1727 insn_needs
.op_addr_reload
.regs
[j
][i
]),
1730 out_max
= MAX (out_max
, insn_needs
.insn
.regs
[j
][i
]);
1732 insn_needs
.input
.regs
[j
][i
]
1733 = MAX (insn_needs
.input
.regs
[j
][i
]
1734 + insn_needs
.op_addr
.regs
[j
][i
]
1735 + insn_needs
.insn
.regs
[j
][i
],
1736 in_max
+ insn_needs
.input
.regs
[j
][i
]);
1738 insn_needs
.output
.regs
[j
][i
] += out_max
;
1739 insn_needs
.other
.regs
[j
][i
]
1740 += MAX (MAX (insn_needs
.input
.regs
[j
][i
],
1741 insn_needs
.output
.regs
[j
][i
]),
1742 insn_needs
.other_addr
.regs
[j
][i
]);
1746 /* Now compute group needs. */
1747 for (in_max
= 0, out_max
= 0, j
= 0; j
< reload_n_operands
; j
++)
1749 in_max
= MAX (in_max
, insn_needs
.in_addr
[j
].groups
[i
]);
1750 in_max
= MAX (in_max
, insn_needs
.in_addr_addr
[j
].groups
[i
]);
1751 out_max
= MAX (out_max
, insn_needs
.out_addr
[j
].groups
[i
]);
1752 out_max
= MAX (out_max
, insn_needs
.out_addr_addr
[j
].groups
[i
]);
1755 in_max
= MAX (MAX (insn_needs
.op_addr
.groups
[i
],
1756 insn_needs
.op_addr_reload
.groups
[i
]),
1758 out_max
= MAX (out_max
, insn_needs
.insn
.groups
[i
]);
1760 insn_needs
.input
.groups
[i
]
1761 = MAX (insn_needs
.input
.groups
[i
]
1762 + insn_needs
.op_addr
.groups
[i
]
1763 + insn_needs
.insn
.groups
[i
],
1764 in_max
+ insn_needs
.input
.groups
[i
]);
1766 insn_needs
.output
.groups
[i
] += out_max
;
1767 insn_needs
.other
.groups
[i
]
1768 += MAX (MAX (insn_needs
.input
.groups
[i
],
1769 insn_needs
.output
.groups
[i
]),
1770 insn_needs
.other_addr
.groups
[i
]);
1773 /* If this is a CALL_INSN and caller-saves will need
1774 a spill register, act as if the spill register is
1775 needed for this insn. However, the spill register
1776 can be used by any reload of this insn, so we only
1777 need do something if no need for that class has
1780 The assumption that every CALL_INSN will trigger a
1781 caller-save is highly conservative, however, the number
1782 of cases where caller-saves will need a spill register but
1783 a block containing a CALL_INSN won't need a spill register
1784 of that class should be quite rare.
1786 If a group is needed, the size and mode of the group will
1787 have been set up at the beginning of this loop. */
1789 if (GET_CODE (insn
) == CALL_INSN
1790 && caller_save_spill_class
!= NO_REGS
)
1793 /* See if this register would conflict with any reload that
1794 needs a group or any reload that needs a nongroup. */
1795 int nongroup_need
= 0;
1796 int *caller_save_needs
;
1798 for (j
= 0; j
< n_reloads
; j
++)
1799 if (reg_classes_intersect_p (caller_save_spill_class
,
1800 reload_reg_class
[j
])
1801 && ((CLASS_MAX_NREGS
1802 (reload_reg_class
[j
],
1803 (GET_MODE_SIZE (reload_outmode
[j
])
1804 > GET_MODE_SIZE (reload_inmode
[j
]))
1805 ? reload_outmode
[j
] : reload_inmode
[j
])
1807 || reload_nongroup
[j
]))
1814 = (caller_save_group_size
> 1
1815 ? insn_needs
.other
.groups
1816 : insn_needs
.other
.regs
[nongroup_need
]);
1818 if (caller_save_needs
[(int) caller_save_spill_class
] == 0)
1820 register enum reg_class
*p
1821 = reg_class_superclasses
[(int) caller_save_spill_class
];
1823 caller_save_needs
[(int) caller_save_spill_class
]++;
1825 while (*p
!= LIM_REG_CLASSES
)
1826 caller_save_needs
[(int) *p
++] += 1;
1829 /* Show that this basic block will need a register of
1833 && ! (basic_block_needs
[(int) caller_save_spill_class
]
1836 basic_block_needs
[(int) caller_save_spill_class
]
1838 something_changed
= 1;
1842 /* If this insn stores the value of a function call,
1843 and that value is in a register that has been spilled,
1844 and if the insn needs a reload in a class
1845 that might use that register as the reload register,
1846 then add an extra need in that class.
1847 This makes sure we have a register available that does
1848 not overlap the return value. */
1850 if (SMALL_REGISTER_CLASSES
&& avoid_return_reg
)
1852 int regno
= REGNO (avoid_return_reg
);
1854 = HARD_REGNO_NREGS (regno
, GET_MODE (avoid_return_reg
));
1856 int basic_needs
[N_REG_CLASSES
], basic_groups
[N_REG_CLASSES
];
1858 /* First compute the "basic needs", which counts a
1859 need only in the smallest class in which it
1862 bcopy ((char *) insn_needs
.other
.regs
[0],
1863 (char *) basic_needs
, sizeof basic_needs
);
1864 bcopy ((char *) insn_needs
.other
.groups
,
1865 (char *) basic_groups
, sizeof basic_groups
);
1867 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1871 if (basic_needs
[i
] >= 0)
1872 for (p
= reg_class_superclasses
[i
];
1873 *p
!= LIM_REG_CLASSES
; p
++)
1874 basic_needs
[(int) *p
] -= basic_needs
[i
];
1876 if (basic_groups
[i
] >= 0)
1877 for (p
= reg_class_superclasses
[i
];
1878 *p
!= LIM_REG_CLASSES
; p
++)
1879 basic_groups
[(int) *p
] -= basic_groups
[i
];
1882 /* Now count extra regs if there might be a conflict with
1883 the return value register. */
1885 for (r
= regno
; r
< regno
+ nregs
; r
++)
1886 if (spill_reg_order
[r
] >= 0)
1887 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1888 if (TEST_HARD_REG_BIT (reg_class_contents
[i
], r
))
1890 if (basic_needs
[i
] > 0)
1894 insn_needs
.other
.regs
[0][i
]++;
1895 p
= reg_class_superclasses
[i
];
1896 while (*p
!= LIM_REG_CLASSES
)
1897 insn_needs
.other
.regs
[0][(int) *p
++]++;
1899 if (basic_groups
[i
] > 0)
1903 insn_needs
.other
.groups
[i
]++;
1904 p
= reg_class_superclasses
[i
];
1905 while (*p
!= LIM_REG_CLASSES
)
1906 insn_needs
.other
.groups
[(int) *p
++]++;
1911 /* For each class, collect maximum need of any insn. */
1913 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1915 if (max_needs
[i
] < insn_needs
.other
.regs
[0][i
])
1917 max_needs
[i
] = insn_needs
.other
.regs
[0][i
];
1918 max_needs_insn
[i
] = insn
;
1920 if (max_groups
[i
] < insn_needs
.other
.groups
[i
])
1922 max_groups
[i
] = insn_needs
.other
.groups
[i
];
1923 max_groups_insn
[i
] = insn
;
1925 if (max_nongroups
[i
] < insn_needs
.other
.regs
[1][i
])
1927 max_nongroups
[i
] = insn_needs
.other
.regs
[1][i
];
1928 max_nongroups_insn
[i
] = insn
;
1931 return something_changed
;
1934 /* Find a group of exactly 2 registers.
1936 First try to fill out the group by spilling a single register which
1937 would allow completion of the group.
1939 Then try to create a new group from a pair of registers, neither of
1940 which are explicitly used.
1942 Then try to create a group from any pair of registers. */
1944 find_tworeg_group (global
, class, dumpfile
)
1950 /* First, look for a register that will complete a group. */
1951 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1955 j
= potential_reload_regs
[i
];
1956 if (j
>= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs
, j
)
1957 && ((j
> 0 && (other
= j
- 1, spill_reg_order
[other
] >= 0)
1958 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
)
1959 && TEST_HARD_REG_BIT (reg_class_contents
[class], other
)
1960 && HARD_REGNO_MODE_OK (other
, group_mode
[class])
1961 && ! TEST_HARD_REG_BIT (counted_for_nongroups
, other
)
1962 /* We don't want one part of another group.
1963 We could get "two groups" that overlap! */
1964 && ! TEST_HARD_REG_BIT (counted_for_groups
, other
))
1965 || (j
< FIRST_PSEUDO_REGISTER
- 1
1966 && (other
= j
+ 1, spill_reg_order
[other
] >= 0)
1967 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
)
1968 && TEST_HARD_REG_BIT (reg_class_contents
[class], other
)
1969 && HARD_REGNO_MODE_OK (j
, group_mode
[class])
1970 && ! TEST_HARD_REG_BIT (counted_for_nongroups
, other
)
1971 && ! TEST_HARD_REG_BIT (counted_for_groups
, other
))))
1973 register enum reg_class
*p
;
1975 /* We have found one that will complete a group,
1976 so count off one group as provided. */
1977 max_groups
[class]--;
1978 p
= reg_class_superclasses
[class];
1979 while (*p
!= LIM_REG_CLASSES
)
1981 if (group_size
[(int) *p
] <= group_size
[class])
1982 max_groups
[(int) *p
]--;
1986 /* Indicate both these regs are part of a group. */
1987 SET_HARD_REG_BIT (counted_for_groups
, j
);
1988 SET_HARD_REG_BIT (counted_for_groups
, other
);
1992 /* We can't complete a group, so start one. */
1993 /* Look for a pair neither of which is explicitly used. */
1994 if (SMALL_REGISTER_CLASSES
&& i
== FIRST_PSEUDO_REGISTER
)
1995 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1998 j
= potential_reload_regs
[i
];
1999 /* Verify that J+1 is a potential reload reg. */
2000 for (k
= 0; k
< FIRST_PSEUDO_REGISTER
; k
++)
2001 if (potential_reload_regs
[k
] == j
+ 1)
2003 if (j
>= 0 && j
+ 1 < FIRST_PSEUDO_REGISTER
2004 && k
< FIRST_PSEUDO_REGISTER
2005 && spill_reg_order
[j
] < 0 && spill_reg_order
[j
+ 1] < 0
2006 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
)
2007 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
+ 1)
2008 && HARD_REGNO_MODE_OK (j
, group_mode
[class])
2009 && ! TEST_HARD_REG_BIT (counted_for_nongroups
,
2011 && ! TEST_HARD_REG_BIT (bad_spill_regs
, j
+ 1)
2012 /* Reject J at this stage
2013 if J+1 was explicitly used. */
2014 && ! regs_explicitly_used
[j
+ 1])
2017 /* Now try any group at all
2018 whose registers are not in bad_spill_regs. */
2019 if (i
== FIRST_PSEUDO_REGISTER
)
2020 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2023 j
= potential_reload_regs
[i
];
2024 /* Verify that J+1 is a potential reload reg. */
2025 for (k
= 0; k
< FIRST_PSEUDO_REGISTER
; k
++)
2026 if (potential_reload_regs
[k
] == j
+ 1)
2028 if (j
>= 0 && j
+ 1 < FIRST_PSEUDO_REGISTER
2029 && k
< FIRST_PSEUDO_REGISTER
2030 && spill_reg_order
[j
] < 0 && spill_reg_order
[j
+ 1] < 0
2031 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
)
2032 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
+ 1)
2033 && HARD_REGNO_MODE_OK (j
, group_mode
[class])
2034 && ! TEST_HARD_REG_BIT (counted_for_nongroups
, j
+ 1)
2035 && ! TEST_HARD_REG_BIT (bad_spill_regs
, j
+ 1))
2039 /* I should be the index in potential_reload_regs
2040 of the new reload reg we have found. */
2042 if (i
< FIRST_PSEUDO_REGISTER
)
2043 return new_spill_reg (i
, class, max_needs
, NULL_PTR
,
2046 /* There are no groups left to spill. */
2047 spill_failure (max_groups_insn
[class]);
2052 /* Find a group of more than 2 registers.
2053 Look for a sufficient sequence of unspilled registers, and spill them all
2056 find_group (global
, class, dumpfile
)
2061 int something_changed
= 0;
2064 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2068 j
= potential_reload_regs
[i
];
2070 && j
+ group_size
[class] <= FIRST_PSEUDO_REGISTER
2071 && HARD_REGNO_MODE_OK (j
, group_mode
[class]))
2073 /* Check each reg in the sequence. */
2074 for (k
= 0; k
< group_size
[class]; k
++)
2075 if (! (spill_reg_order
[j
+ k
] < 0
2076 && ! TEST_HARD_REG_BIT (bad_spill_regs
, j
+ k
)
2077 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
+ k
)))
2079 /* We got a full sequence, so spill them all. */
2080 if (k
== group_size
[class])
2082 register enum reg_class
*p
;
2083 for (k
= 0; k
< group_size
[class]; k
++)
2086 SET_HARD_REG_BIT (counted_for_groups
, j
+ k
);
2087 for (idx
= 0; idx
< FIRST_PSEUDO_REGISTER
; idx
++)
2088 if (potential_reload_regs
[idx
] == j
+ k
)
2090 something_changed
|= new_spill_reg (idx
, class, max_needs
,
2095 /* We have found one that will complete a group,
2096 so count off one group as provided. */
2097 max_groups
[class]--;
2098 p
= reg_class_superclasses
[class];
2099 while (*p
!= LIM_REG_CLASSES
)
2101 if (group_size
[(int) *p
]
2102 <= group_size
[class])
2103 max_groups
[(int) *p
]--;
2106 return something_changed
;
2110 /* There are no groups left. */
2111 spill_failure (max_groups_insn
[class]);
2116 /* Find more reload regs to satisfy the remaining need.
2117 Do it by ascending class number, since otherwise a reg
2118 might be spilled for a big class and might fail to count
2119 for a smaller class even though it belongs to that class.
2121 Count spilled regs in `spills', and add entries to
2122 `spill_regs' and `spill_reg_order'.
2124 ??? Note there is a problem here.
2125 When there is a need for a group in a high-numbered class,
2126 and also need for non-group regs that come from a lower class,
2127 the non-group regs are chosen first. If there aren't many regs,
2128 they might leave no room for a group.
2130 This was happening on the 386. To fix it, we added the code
2131 that calls possible_group_p, so that the lower class won't
2132 break up the last possible group.
2134 Really fixing the problem would require changes above
2135 in counting the regs already spilled, and in choose_reload_regs.
2136 It might be hard to avoid introducing bugs there. */
2139 find_reload_regs (global
, dumpfile
)
2144 int something_changed
= 0;
2146 CLEAR_HARD_REG_SET (counted_for_groups
);
2147 CLEAR_HARD_REG_SET (counted_for_nongroups
);
2149 for (class = 0; class < N_REG_CLASSES
; class++)
2151 /* First get the groups of registers.
2152 If we got single registers first, we might fragment
2154 while (max_groups
[class] > 0)
2156 /* If any single spilled regs happen to form groups,
2157 count them now. Maybe we don't really need
2158 to spill another group. */
2159 count_possible_groups (group_size
, group_mode
, max_groups
, class);
2161 if (max_groups
[class] <= 0)
2164 /* Groups of size 2 (the only groups used on most machines)
2165 are treated specially. */
2166 if (group_size
[class] == 2)
2167 something_changed
|= find_tworeg_group (global
, class, dumpfile
);
2169 something_changed
|= find_group (global
, class, dumpfile
);
2175 /* Now similarly satisfy all need for single registers. */
2177 while (max_needs
[class] > 0 || max_nongroups
[class] > 0)
2180 /* If we spilled enough regs, but they weren't counted
2181 against the non-group need, see if we can count them now.
2182 If so, we can avoid some actual spilling. */
2183 if (max_needs
[class] <= 0 && max_nongroups
[class] > 0)
2184 for (i
= 0; i
< n_spills
; i
++)
2186 int regno
= spill_regs
[i
];
2187 if (TEST_HARD_REG_BIT (reg_class_contents
[class], regno
)
2188 && !TEST_HARD_REG_BIT (counted_for_groups
, regno
)
2189 && !TEST_HARD_REG_BIT (counted_for_nongroups
, regno
)
2190 && max_nongroups
[class] > 0)
2192 register enum reg_class
*p
;
2194 SET_HARD_REG_BIT (counted_for_nongroups
, regno
);
2195 max_nongroups
[class]--;
2196 p
= reg_class_superclasses
[class];
2197 while (*p
!= LIM_REG_CLASSES
)
2198 max_nongroups
[(int) *p
++]--;
2201 if (max_needs
[class] <= 0 && max_nongroups
[class] <= 0)
2204 /* Consider the potential reload regs that aren't
2205 yet in use as reload regs, in order of preference.
2206 Find the most preferred one that's in this class. */
2208 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2210 int regno
= potential_reload_regs
[i
];
2212 && TEST_HARD_REG_BIT (reg_class_contents
[class], regno
)
2213 /* If this reg will not be available for groups,
2214 pick one that does not foreclose possible groups.
2215 This is a kludge, and not very general,
2216 but it should be sufficient to make the 386 work,
2217 and the problem should not occur on machines with
2219 && (max_nongroups
[class] == 0
2220 || possible_group_p (regno
, max_groups
)))
2224 /* If we couldn't get a register, try to get one even if we
2225 might foreclose possible groups. This may cause problems
2226 later, but that's better than aborting now, since it is
2227 possible that we will, in fact, be able to form the needed
2228 group even with this allocation. */
2230 if (i
>= FIRST_PSEUDO_REGISTER
2231 && (asm_noperands (max_needs
[class] > 0
2232 ? max_needs_insn
[class]
2233 : max_nongroups_insn
[class])
2235 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2236 if (potential_reload_regs
[i
] >= 0
2237 && TEST_HARD_REG_BIT (reg_class_contents
[class],
2238 potential_reload_regs
[i
]))
2241 /* I should be the index in potential_reload_regs
2242 of the new reload reg we have found. */
2244 if (i
>= FIRST_PSEUDO_REGISTER
)
2246 /* There are no possible registers left to spill. */
2247 spill_failure (max_needs
[class] > 0 ? max_needs_insn
[class]
2248 : max_nongroups_insn
[class]);
2253 something_changed
|= new_spill_reg (i
, class, max_needs
,
2254 max_nongroups
, global
,
2258 return something_changed
;
2262 /* Nonzero if, after spilling reg REGNO for non-groups,
2263 it will still be possible to find a group if we still need one. */
2266 possible_group_p (regno
, max_groups
)
2271 int class = (int) NO_REGS
;
2273 for (i
= 0; i
< (int) N_REG_CLASSES
; i
++)
2274 if (max_groups
[i
] > 0)
2280 if (class == (int) NO_REGS
)
2283 /* Consider each pair of consecutive registers. */
2284 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
- 1; i
++)
2286 /* Ignore pairs that include reg REGNO. */
2287 if (i
== regno
|| i
+ 1 == regno
)
2290 /* Ignore pairs that are outside the class that needs the group.
2291 ??? Here we fail to handle the case where two different classes
2292 independently need groups. But this never happens with our
2293 current machine descriptions. */
2294 if (! (TEST_HARD_REG_BIT (reg_class_contents
[class], i
)
2295 && TEST_HARD_REG_BIT (reg_class_contents
[class], i
+ 1)))
2298 /* A pair of consecutive regs we can still spill does the trick. */
2299 if (spill_reg_order
[i
] < 0 && spill_reg_order
[i
+ 1] < 0
2300 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
)
2301 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
+ 1))
2304 /* A pair of one already spilled and one we can spill does it
2305 provided the one already spilled is not otherwise reserved. */
2306 if (spill_reg_order
[i
] < 0
2307 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
)
2308 && spill_reg_order
[i
+ 1] >= 0
2309 && ! TEST_HARD_REG_BIT (counted_for_groups
, i
+ 1)
2310 && ! TEST_HARD_REG_BIT (counted_for_nongroups
, i
+ 1))
2312 if (spill_reg_order
[i
+ 1] < 0
2313 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
+ 1)
2314 && spill_reg_order
[i
] >= 0
2315 && ! TEST_HARD_REG_BIT (counted_for_groups
, i
)
2316 && ! TEST_HARD_REG_BIT (counted_for_nongroups
, i
))
2323 /* Count any groups of CLASS that can be formed from the registers recently
2327 count_possible_groups (group_size
, group_mode
, max_groups
, class)
2329 enum machine_mode
*group_mode
;
2336 /* Now find all consecutive groups of spilled registers
2337 and mark each group off against the need for such groups.
2338 But don't count them against ordinary need, yet. */
2340 if (group_size
[class] == 0)
2343 CLEAR_HARD_REG_SET (new);
2345 /* Make a mask of all the regs that are spill regs in class I. */
2346 for (i
= 0; i
< n_spills
; i
++)
2347 if (TEST_HARD_REG_BIT (reg_class_contents
[class], spill_regs
[i
])
2348 && ! TEST_HARD_REG_BIT (counted_for_groups
, spill_regs
[i
])
2349 && ! TEST_HARD_REG_BIT (counted_for_nongroups
, spill_regs
[i
]))
2350 SET_HARD_REG_BIT (new, spill_regs
[i
]);
2352 /* Find each consecutive group of them. */
2353 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
&& max_groups
[class] > 0; i
++)
2354 if (TEST_HARD_REG_BIT (new, i
)
2355 && i
+ group_size
[class] <= FIRST_PSEUDO_REGISTER
2356 && HARD_REGNO_MODE_OK (i
, group_mode
[class]))
2358 for (j
= 1; j
< group_size
[class]; j
++)
2359 if (! TEST_HARD_REG_BIT (new, i
+ j
))
2362 if (j
== group_size
[class])
2364 /* We found a group. Mark it off against this class's need for
2365 groups, and against each superclass too. */
2366 register enum reg_class
*p
;
2368 max_groups
[class]--;
2369 p
= reg_class_superclasses
[class];
2370 while (*p
!= LIM_REG_CLASSES
)
2372 if (group_size
[(int) *p
] <= group_size
[class])
2373 max_groups
[(int) *p
]--;
2377 /* Don't count these registers again. */
2378 for (j
= 0; j
< group_size
[class]; j
++)
2379 SET_HARD_REG_BIT (counted_for_groups
, i
+ j
);
2382 /* Skip to the last reg in this group. When i is incremented above,
2383 it will then point to the first reg of the next possible group. */
2388 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2389 another mode that needs to be reloaded for the same register class CLASS.
2390 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2391 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2393 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2394 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2395 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2396 causes unnecessary failures on machines requiring alignment of register
2397 groups when the two modes are different sizes, because the larger mode has
2398 more strict alignment rules than the smaller mode. */
2401 modes_equiv_for_class_p (allocate_mode
, other_mode
, class)
2402 enum machine_mode allocate_mode
, other_mode
;
2403 enum reg_class
class;
2406 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
2408 if (TEST_HARD_REG_BIT (reg_class_contents
[(int) class], regno
)
2409 && HARD_REGNO_MODE_OK (regno
, allocate_mode
)
2410 && ! HARD_REGNO_MODE_OK (regno
, other_mode
))
2416 /* Handle the failure to find a register to spill.
2417 INSN should be one of the insns which needed this particular spill reg. */
2420 spill_failure (insn
)
2423 if (asm_noperands (PATTERN (insn
)) >= 0)
2424 error_for_asm (insn
, "`asm' needs too many reloads");
2426 fatal_insn ("Unable to find a register to spill.", insn
);
2429 /* Add a new register to the tables of available spill-registers
2430 (as well as spilling all pseudos allocated to the register).
2431 I is the index of this register in potential_reload_regs.
2432 CLASS is the regclass whose need is being satisfied.
2433 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2434 so that this register can count off against them.
2435 MAX_NONGROUPS is 0 if this register is part of a group.
2436 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2439 new_spill_reg (i
, class, max_needs
, max_nongroups
, global
, dumpfile
)
2447 register enum reg_class
*p
;
2449 int regno
= potential_reload_regs
[i
];
2451 if (i
>= FIRST_PSEUDO_REGISTER
)
2452 abort (); /* Caller failed to find any register. */
2454 if (fixed_regs
[regno
] || TEST_HARD_REG_BIT (forbidden_regs
, regno
))
2456 static char *reg_class_names
[] = REG_CLASS_NAMES
;
2457 fatal ("fixed or forbidden register %d (%s) was spilled for class %s.\n\
2458 This may be due to a compiler bug or to impossible asm\n\
2459 statements or clauses.", regno
, reg_names
[regno
], reg_class_names
[class]);
2462 /* Make reg REGNO an additional reload reg. */
2464 potential_reload_regs
[i
] = -1;
2465 spill_regs
[n_spills
] = regno
;
2466 spill_reg_order
[regno
] = n_spills
;
2468 fprintf (dumpfile
, "Spilling reg %d.\n", spill_regs
[n_spills
]);
2470 /* Clear off the needs we just satisfied. */
2473 p
= reg_class_superclasses
[class];
2474 while (*p
!= LIM_REG_CLASSES
)
2475 max_needs
[(int) *p
++]--;
2477 if (max_nongroups
&& max_nongroups
[class] > 0)
2479 SET_HARD_REG_BIT (counted_for_nongroups
, regno
);
2480 max_nongroups
[class]--;
2481 p
= reg_class_superclasses
[class];
2482 while (*p
!= LIM_REG_CLASSES
)
2483 max_nongroups
[(int) *p
++]--;
2486 /* Spill every pseudo reg that was allocated to this reg
2487 or to something that overlaps this reg. */
2489 val
= spill_hard_reg (spill_regs
[n_spills
], global
, dumpfile
, 0);
2491 /* If there are some registers still to eliminate and this register
2492 wasn't ever used before, additional stack space may have to be
2493 allocated to store this register. Thus, we may have changed the offset
2494 between the stack and frame pointers, so mark that something has changed.
2495 (If new pseudos were spilled, thus requiring more space, VAL would have
2496 been set non-zero by the call to spill_hard_reg above since additional
2497 reloads may be needed in that case.
2499 One might think that we need only set VAL to 1 if this is a call-used
2500 register. However, the set of registers that must be saved by the
2501 prologue is not identical to the call-used set. For example, the
2502 register used by the call insn for the return PC is a call-used register,
2503 but must be saved by the prologue. */
2504 if (num_eliminable
&& ! regs_ever_live
[spill_regs
[n_spills
]])
2507 regs_ever_live
[spill_regs
[n_spills
]] = 1;
2513 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2514 data that is dead in INSN. */
2517 delete_dead_insn (insn
)
2520 rtx prev
= prev_real_insn (insn
);
2523 /* If the previous insn sets a register that dies in our insn, delete it
2525 if (prev
&& GET_CODE (PATTERN (prev
)) == SET
2526 && (prev_dest
= SET_DEST (PATTERN (prev
)), GET_CODE (prev_dest
) == REG
)
2527 && reg_mentioned_p (prev_dest
, PATTERN (insn
))
2528 && find_regno_note (insn
, REG_DEAD
, REGNO (prev_dest
))
2529 && ! side_effects_p (SET_SRC (PATTERN (prev
))))
2530 delete_dead_insn (prev
);
2532 PUT_CODE (insn
, NOTE
);
2533 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2534 NOTE_SOURCE_FILE (insn
) = 0;
2537 /* Modify the home of pseudo-reg I.
2538 The new home is present in reg_renumber[I].
2540 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2541 or it may be -1, meaning there is none or it is not relevant.
2542 This is used so that all pseudos spilled from a given hard reg
2543 can share one stack slot. */
2546 alter_reg (i
, from_reg
)
2550 /* When outputting an inline function, this can happen
2551 for a reg that isn't actually used. */
2552 if (regno_reg_rtx
[i
] == 0)
2555 /* If the reg got changed to a MEM at rtl-generation time,
2557 if (GET_CODE (regno_reg_rtx
[i
]) != REG
)
2560 /* Modify the reg-rtx to contain the new hard reg
2561 number or else to contain its pseudo reg number. */
2562 REGNO (regno_reg_rtx
[i
])
2563 = reg_renumber
[i
] >= 0 ? reg_renumber
[i
] : i
;
2565 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2566 allocate a stack slot for it. */
2568 if (reg_renumber
[i
] < 0
2569 && REG_N_REFS (i
) > 0
2570 && reg_equiv_constant
[i
] == 0
2571 && reg_equiv_memory_loc
[i
] == 0)
2574 int inherent_size
= PSEUDO_REGNO_BYTES (i
);
2575 int total_size
= MAX (inherent_size
, reg_max_ref_width
[i
]);
2578 /* Each pseudo reg has an inherent size which comes from its own mode,
2579 and a total size which provides room for paradoxical subregs
2580 which refer to the pseudo reg in wider modes.
2582 We can use a slot already allocated if it provides both
2583 enough inherent space and enough total space.
2584 Otherwise, we allocate a new slot, making sure that it has no less
2585 inherent space, and no less total space, then the previous slot. */
2588 /* No known place to spill from => no slot to reuse. */
2589 x
= assign_stack_local (GET_MODE (regno_reg_rtx
[i
]), total_size
,
2590 inherent_size
== total_size
? 0 : -1);
2591 if (BYTES_BIG_ENDIAN
)
2592 /* Cancel the big-endian correction done in assign_stack_local.
2593 Get the address of the beginning of the slot.
2594 This is so we can do a big-endian correction unconditionally
2596 adjust
= inherent_size
- total_size
;
2598 RTX_UNCHANGING_P (x
) = RTX_UNCHANGING_P (regno_reg_rtx
[i
]);
2600 /* Reuse a stack slot if possible. */
2601 else if (spill_stack_slot
[from_reg
] != 0
2602 && spill_stack_slot_width
[from_reg
] >= total_size
2603 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot
[from_reg
]))
2605 x
= spill_stack_slot
[from_reg
];
2606 /* Allocate a bigger slot. */
2609 /* Compute maximum size needed, both for inherent size
2610 and for total size. */
2611 enum machine_mode mode
= GET_MODE (regno_reg_rtx
[i
]);
2613 if (spill_stack_slot
[from_reg
])
2615 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot
[from_reg
]))
2617 mode
= GET_MODE (spill_stack_slot
[from_reg
]);
2618 if (spill_stack_slot_width
[from_reg
] > total_size
)
2619 total_size
= spill_stack_slot_width
[from_reg
];
2621 /* Make a slot with that size. */
2622 x
= assign_stack_local (mode
, total_size
,
2623 inherent_size
== total_size
? 0 : -1);
2625 if (BYTES_BIG_ENDIAN
)
2627 /* Cancel the big-endian correction done in assign_stack_local.
2628 Get the address of the beginning of the slot.
2629 This is so we can do a big-endian correction unconditionally
2631 adjust
= GET_MODE_SIZE (mode
) - total_size
;
2633 stack_slot
= gen_rtx_MEM (mode_for_size (total_size
2636 plus_constant (XEXP (x
, 0), adjust
));
2638 spill_stack_slot
[from_reg
] = stack_slot
;
2639 spill_stack_slot_width
[from_reg
] = total_size
;
2642 /* On a big endian machine, the "address" of the slot
2643 is the address of the low part that fits its inherent mode. */
2644 if (BYTES_BIG_ENDIAN
&& inherent_size
< total_size
)
2645 adjust
+= (total_size
- inherent_size
);
2647 /* If we have any adjustment to make, or if the stack slot is the
2648 wrong mode, make a new stack slot. */
2649 if (adjust
!= 0 || GET_MODE (x
) != GET_MODE (regno_reg_rtx
[i
]))
2651 x
= gen_rtx_MEM (GET_MODE (regno_reg_rtx
[i
]),
2652 plus_constant (XEXP (x
, 0), adjust
));
2654 /* If this was shared among registers, must ensure we never
2655 set it readonly since that can cause scheduling
2656 problems. Note we would only have in this adjustment
2657 case in any event, since the code above doesn't set it. */
2660 RTX_UNCHANGING_P (x
) = RTX_UNCHANGING_P (regno_reg_rtx
[i
]);
2663 /* Save the stack slot for later. */
2664 reg_equiv_memory_loc
[i
] = x
;
2668 /* Mark the slots in regs_ever_live for the hard regs
2669 used by pseudo-reg number REGNO. */
2672 mark_home_live (regno
)
2675 register int i
, lim
;
2676 i
= reg_renumber
[regno
];
2679 lim
= i
+ HARD_REGNO_NREGS (i
, PSEUDO_REGNO_MODE (regno
));
2681 regs_ever_live
[i
++] = 1;
2684 /* Mark the registers used in SCRATCH as being live. */
2687 mark_scratch_live (scratch
)
2691 int regno
= REGNO (scratch
);
2692 int lim
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (scratch
));
2694 for (i
= regno
; i
< lim
; i
++)
2695 regs_ever_live
[i
] = 1;
2698 /* This function handles the tracking of elimination offsets around branches.
2700 X is a piece of RTL being scanned.
2702 INSN is the insn that it came from, if any.
2704 INITIAL_P is non-zero if we are to set the offset to be the initial
2705 offset and zero if we are setting the offset of the label to be the
2709 set_label_offsets (x
, insn
, initial_p
)
2714 enum rtx_code code
= GET_CODE (x
);
2717 struct elim_table
*p
;
2722 if (LABEL_REF_NONLOCAL_P (x
))
2727 /* ... fall through ... */
2730 /* If we know nothing about this label, set the desired offsets. Note
2731 that this sets the offset at a label to be the offset before a label
2732 if we don't know anything about the label. This is not correct for
2733 the label after a BARRIER, but is the best guess we can make. If
2734 we guessed wrong, we will suppress an elimination that might have
2735 been possible had we been able to guess correctly. */
2737 if (! offsets_known_at
[CODE_LABEL_NUMBER (x
)])
2739 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2740 offsets_at
[CODE_LABEL_NUMBER (x
)][i
]
2741 = (initial_p
? reg_eliminate
[i
].initial_offset
2742 : reg_eliminate
[i
].offset
);
2743 offsets_known_at
[CODE_LABEL_NUMBER (x
)] = 1;
2746 /* Otherwise, if this is the definition of a label and it is
2747 preceded by a BARRIER, set our offsets to the known offset of
2751 && (tem
= prev_nonnote_insn (insn
)) != 0
2752 && GET_CODE (tem
) == BARRIER
)
2754 num_not_at_initial_offset
= 0;
2755 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2757 reg_eliminate
[i
].offset
= reg_eliminate
[i
].previous_offset
2758 = offsets_at
[CODE_LABEL_NUMBER (x
)][i
];
2759 if (reg_eliminate
[i
].can_eliminate
2760 && (reg_eliminate
[i
].offset
2761 != reg_eliminate
[i
].initial_offset
))
2762 num_not_at_initial_offset
++;
2767 /* If neither of the above cases is true, compare each offset
2768 with those previously recorded and suppress any eliminations
2769 where the offsets disagree. */
2771 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2772 if (offsets_at
[CODE_LABEL_NUMBER (x
)][i
]
2773 != (initial_p
? reg_eliminate
[i
].initial_offset
2774 : reg_eliminate
[i
].offset
))
2775 reg_eliminate
[i
].can_eliminate
= 0;
2780 set_label_offsets (PATTERN (insn
), insn
, initial_p
);
2782 /* ... fall through ... */
2786 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2787 and hence must have all eliminations at their initial offsets. */
2788 for (tem
= REG_NOTES (x
); tem
; tem
= XEXP (tem
, 1))
2789 if (REG_NOTE_KIND (tem
) == REG_LABEL
)
2790 set_label_offsets (XEXP (tem
, 0), insn
, 1);
2795 /* Each of the labels in the address vector must be at their initial
2796 offsets. We want the first field for ADDR_VEC and the second
2797 field for ADDR_DIFF_VEC. */
2799 for (i
= 0; i
< XVECLEN (x
, code
== ADDR_DIFF_VEC
); i
++)
2800 set_label_offsets (XVECEXP (x
, code
== ADDR_DIFF_VEC
, i
),
2805 /* We only care about setting PC. If the source is not RETURN,
2806 IF_THEN_ELSE, or a label, disable any eliminations not at
2807 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2808 isn't one of those possibilities. For branches to a label,
2809 call ourselves recursively.
2811 Note that this can disable elimination unnecessarily when we have
2812 a non-local goto since it will look like a non-constant jump to
2813 someplace in the current function. This isn't a significant
2814 problem since such jumps will normally be when all elimination
2815 pairs are back to their initial offsets. */
2817 if (SET_DEST (x
) != pc_rtx
)
2820 switch (GET_CODE (SET_SRC (x
)))
2827 set_label_offsets (XEXP (SET_SRC (x
), 0), insn
, initial_p
);
2831 tem
= XEXP (SET_SRC (x
), 1);
2832 if (GET_CODE (tem
) == LABEL_REF
)
2833 set_label_offsets (XEXP (tem
, 0), insn
, initial_p
);
2834 else if (GET_CODE (tem
) != PC
&& GET_CODE (tem
) != RETURN
)
2837 tem
= XEXP (SET_SRC (x
), 2);
2838 if (GET_CODE (tem
) == LABEL_REF
)
2839 set_label_offsets (XEXP (tem
, 0), insn
, initial_p
);
2840 else if (GET_CODE (tem
) != PC
&& GET_CODE (tem
) != RETURN
)
2848 /* If we reach here, all eliminations must be at their initial
2849 offset because we are doing a jump to a variable address. */
2850 for (p
= reg_eliminate
; p
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; p
++)
2851 if (p
->offset
!= p
->initial_offset
)
2852 p
->can_eliminate
= 0;
2860 /* Used for communication between the next two function to properly share
2861 the vector for an ASM_OPERANDS. */
2863 static struct rtvec_def
*old_asm_operands_vec
, *new_asm_operands_vec
;
2865 /* Scan X and replace any eliminable registers (such as fp) with a
2866 replacement (such as sp), plus an offset.
2868 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2869 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2870 MEM, we are allowed to replace a sum of a register and the constant zero
2871 with the register, which we cannot do outside a MEM. In addition, we need
2872 to record the fact that a register is referenced outside a MEM.
2874 If INSN is an insn, it is the insn containing X. If we replace a REG
2875 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2876 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2877 the REG is being modified.
2879 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2880 That's used when we eliminate in expressions stored in notes.
2881 This means, do not set ref_outside_mem even if the reference
2884 If we see a modification to a register we know about, take the
2885 appropriate action (see case SET, below).
2887 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2888 replacements done assuming all offsets are at their initial values. If
2889 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2890 encounter, return the actual location so that find_reloads will do
2891 the proper thing. */
2894 eliminate_regs (x
, mem_mode
, insn
)
2896 enum machine_mode mem_mode
;
2899 enum rtx_code code
= GET_CODE (x
);
2900 struct elim_table
*ep
;
2923 /* This is only for the benefit of the debugging backends, which call
2924 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2925 removed after CSE. */
2926 new = eliminate_regs (XEXP (x
, 0), 0, insn
);
2927 if (GET_CODE (new) == MEM
)
2928 return XEXP (new, 0);
2934 /* First handle the case where we encounter a bare register that
2935 is eliminable. Replace it with a PLUS. */
2936 if (regno
< FIRST_PSEUDO_REGISTER
)
2938 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2940 if (ep
->from_rtx
== x
&& ep
->can_eliminate
)
2943 /* Refs inside notes don't count for this purpose. */
2944 && ! (insn
!= 0 && (GET_CODE (insn
) == EXPR_LIST
2945 || GET_CODE (insn
) == INSN_LIST
)))
2946 ep
->ref_outside_mem
= 1;
2947 return plus_constant (ep
->to_rtx
, ep
->previous_offset
);
2951 else if (reg_equiv_memory_loc
&& reg_equiv_memory_loc
[regno
]
2952 && (reg_equiv_address
[regno
] || num_not_at_initial_offset
))
2954 /* In this case, find_reloads would attempt to either use an
2955 incorrect address (if something is not at its initial offset)
2956 or substitute an replaced address into an insn (which loses
2957 if the offset is changed by some later action). So we simply
2958 return the replaced stack slot (assuming it is changed by
2959 elimination) and ignore the fact that this is actually a
2960 reference to the pseudo. Ensure we make a copy of the
2961 address in case it is shared. */
2962 new = eliminate_regs (reg_equiv_memory_loc
[regno
], mem_mode
, insn
);
2963 if (new != reg_equiv_memory_loc
[regno
])
2965 if (insn
!= 0 && GET_CODE (insn
) != EXPR_LIST
2966 && GET_CODE (insn
) != INSN_LIST
)
2967 REG_NOTES (emit_insn_before (gen_rtx_USE (VOIDmode
, x
), insn
))
2968 = gen_rtx_EXPR_LIST (REG_EQUAL
, new, NULL_RTX
);
2969 return copy_rtx (new);
2975 /* If this is the sum of an eliminable register and a constant, rework
2977 if (GET_CODE (XEXP (x
, 0)) == REG
2978 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2979 && CONSTANT_P (XEXP (x
, 1)))
2981 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2983 if (ep
->from_rtx
== XEXP (x
, 0) && ep
->can_eliminate
)
2986 /* Refs inside notes don't count for this purpose. */
2987 && ! (insn
!= 0 && (GET_CODE (insn
) == EXPR_LIST
2988 || GET_CODE (insn
) == INSN_LIST
)))
2989 ep
->ref_outside_mem
= 1;
2991 /* The only time we want to replace a PLUS with a REG (this
2992 occurs when the constant operand of the PLUS is the negative
2993 of the offset) is when we are inside a MEM. We won't want
2994 to do so at other times because that would change the
2995 structure of the insn in a way that reload can't handle.
2996 We special-case the commonest situation in
2997 eliminate_regs_in_insn, so just replace a PLUS with a
2998 PLUS here, unless inside a MEM. */
2999 if (mem_mode
!= 0 && GET_CODE (XEXP (x
, 1)) == CONST_INT
3000 && INTVAL (XEXP (x
, 1)) == - ep
->previous_offset
)
3003 return gen_rtx_PLUS (Pmode
, ep
->to_rtx
,
3004 plus_constant (XEXP (x
, 1),
3005 ep
->previous_offset
));
3008 /* If the register is not eliminable, we are done since the other
3009 operand is a constant. */
3013 /* If this is part of an address, we want to bring any constant to the
3014 outermost PLUS. We will do this by doing register replacement in
3015 our operands and seeing if a constant shows up in one of them.
3017 We assume here this is part of an address (or a "load address" insn)
3018 since an eliminable register is not likely to appear in any other
3021 If we have (plus (eliminable) (reg)), we want to produce
3022 (plus (plus (replacement) (reg) (const))). If this was part of a
3023 normal add insn, (plus (replacement) (reg)) will be pushed as a
3024 reload. This is the desired action. */
3027 rtx new0
= eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
3028 rtx new1
= eliminate_regs (XEXP (x
, 1), mem_mode
, insn
);
3030 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
3032 /* If one side is a PLUS and the other side is a pseudo that
3033 didn't get a hard register but has a reg_equiv_constant,
3034 we must replace the constant here since it may no longer
3035 be in the position of any operand. */
3036 if (GET_CODE (new0
) == PLUS
&& GET_CODE (new1
) == REG
3037 && REGNO (new1
) >= FIRST_PSEUDO_REGISTER
3038 && reg_renumber
[REGNO (new1
)] < 0
3039 && reg_equiv_constant
!= 0
3040 && reg_equiv_constant
[REGNO (new1
)] != 0)
3041 new1
= reg_equiv_constant
[REGNO (new1
)];
3042 else if (GET_CODE (new1
) == PLUS
&& GET_CODE (new0
) == REG
3043 && REGNO (new0
) >= FIRST_PSEUDO_REGISTER
3044 && reg_renumber
[REGNO (new0
)] < 0
3045 && reg_equiv_constant
[REGNO (new0
)] != 0)
3046 new0
= reg_equiv_constant
[REGNO (new0
)];
3048 new = form_sum (new0
, new1
);
3050 /* As above, if we are not inside a MEM we do not want to
3051 turn a PLUS into something else. We might try to do so here
3052 for an addition of 0 if we aren't optimizing. */
3053 if (! mem_mode
&& GET_CODE (new) != PLUS
)
3054 return gen_rtx_PLUS (GET_MODE (x
), new, const0_rtx
);
3062 /* If this is the product of an eliminable register and a
3063 constant, apply the distribute law and move the constant out
3064 so that we have (plus (mult ..) ..). This is needed in order
3065 to keep load-address insns valid. This case is pathological.
3066 We ignore the possibility of overflow here. */
3067 if (GET_CODE (XEXP (x
, 0)) == REG
3068 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
3069 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3070 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3072 if (ep
->from_rtx
== XEXP (x
, 0) && ep
->can_eliminate
)
3075 /* Refs inside notes don't count for this purpose. */
3076 && ! (insn
!= 0 && (GET_CODE (insn
) == EXPR_LIST
3077 || GET_CODE (insn
) == INSN_LIST
)))
3078 ep
->ref_outside_mem
= 1;
3081 plus_constant (gen_rtx_MULT (Pmode
, ep
->to_rtx
, XEXP (x
, 1)),
3082 ep
->previous_offset
* INTVAL (XEXP (x
, 1)));
3085 /* ... fall through ... */
3090 case DIV
: case UDIV
:
3091 case MOD
: case UMOD
:
3092 case AND
: case IOR
: case XOR
:
3093 case ROTATERT
: case ROTATE
:
3094 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
3096 case GE
: case GT
: case GEU
: case GTU
:
3097 case LE
: case LT
: case LEU
: case LTU
:
3099 rtx new0
= eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
3101 = XEXP (x
, 1) ? eliminate_regs (XEXP (x
, 1), mem_mode
, insn
) : 0;
3103 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
3104 return gen_rtx_fmt_ee (code
, GET_MODE (x
), new0
, new1
);
3109 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
3112 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
3113 if (new != XEXP (x
, 0))
3114 x
= gen_rtx_EXPR_LIST (REG_NOTE_KIND (x
), new, XEXP (x
, 1));
3117 /* ... fall through ... */
3120 /* Now do eliminations in the rest of the chain. If this was
3121 an EXPR_LIST, this might result in allocating more memory than is
3122 strictly needed, but it simplifies the code. */
3125 new = eliminate_regs (XEXP (x
, 1), mem_mode
, insn
);
3126 if (new != XEXP (x
, 1))
3127 return gen_rtx_fmt_ee (GET_CODE (x
), GET_MODE (x
), XEXP (x
, 0), new);
3135 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3136 if (ep
->to_rtx
== XEXP (x
, 0))
3138 int size
= GET_MODE_SIZE (mem_mode
);
3140 /* If more bytes than MEM_MODE are pushed, account for them. */
3141 #ifdef PUSH_ROUNDING
3142 if (ep
->to_rtx
== stack_pointer_rtx
)
3143 size
= PUSH_ROUNDING (size
);
3145 if (code
== PRE_DEC
|| code
== POST_DEC
)
3151 /* Fall through to generic unary operation case. */
3152 case STRICT_LOW_PART
:
3154 case SIGN_EXTEND
: case ZERO_EXTEND
:
3155 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
3156 case FLOAT
: case FIX
:
3157 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
3161 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
3162 if (new != XEXP (x
, 0))
3163 return gen_rtx_fmt_e (code
, GET_MODE (x
), new);
3167 /* Similar to above processing, but preserve SUBREG_WORD.
3168 Convert (subreg (mem)) to (mem) if not paradoxical.
3169 Also, if we have a non-paradoxical (subreg (pseudo)) and the
3170 pseudo didn't get a hard reg, we must replace this with the
3171 eliminated version of the memory location because push_reloads
3172 may do the replacement in certain circumstances. */
3173 if (GET_CODE (SUBREG_REG (x
)) == REG
3174 && (GET_MODE_SIZE (GET_MODE (x
))
3175 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
3176 && reg_equiv_memory_loc
!= 0
3177 && reg_equiv_memory_loc
[REGNO (SUBREG_REG (x
))] != 0)
3179 new = eliminate_regs (reg_equiv_memory_loc
[REGNO (SUBREG_REG (x
))],
3182 /* If we didn't change anything, we must retain the pseudo. */
3183 if (new == reg_equiv_memory_loc
[REGNO (SUBREG_REG (x
))])
3184 new = SUBREG_REG (x
);
3187 /* In this case, we must show that the pseudo is used in this
3188 insn so that delete_output_reload will do the right thing. */
3189 if (insn
!= 0 && GET_CODE (insn
) != EXPR_LIST
3190 && GET_CODE (insn
) != INSN_LIST
)
3191 REG_NOTES (emit_insn_before (gen_rtx_USE (VOIDmode
,
3194 = gen_rtx_EXPR_LIST (REG_EQUAL
, new, NULL_RTX
);
3196 /* Ensure NEW isn't shared in case we have to reload it. */
3197 new = copy_rtx (new);
3201 new = eliminate_regs (SUBREG_REG (x
), mem_mode
, insn
);
3203 if (new != XEXP (x
, 0))
3205 int x_size
= GET_MODE_SIZE (GET_MODE (x
));
3206 int new_size
= GET_MODE_SIZE (GET_MODE (new));
3208 if (GET_CODE (new) == MEM
3209 && ((x_size
< new_size
3210 #ifdef WORD_REGISTER_OPERATIONS
3211 /* On these machines, combine can create rtl of the form
3212 (set (subreg:m1 (reg:m2 R) 0) ...)
3213 where m1 < m2, and expects something interesting to
3214 happen to the entire word. Moreover, it will use the
3215 (reg:m2 R) later, expecting all bits to be preserved.
3216 So if the number of words is the same, preserve the
3217 subreg so that push_reloads can see it. */
3218 && ! ((x_size
-1)/UNITS_PER_WORD
== (new_size
-1)/UNITS_PER_WORD
)
3221 || (x_size
== new_size
))
3224 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
3225 enum machine_mode mode
= GET_MODE (x
);
3227 if (BYTES_BIG_ENDIAN
)
3228 offset
+= (MIN (UNITS_PER_WORD
,
3229 GET_MODE_SIZE (GET_MODE (new)))
3230 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
3232 PUT_MODE (new, mode
);
3233 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset
);
3237 return gen_rtx_SUBREG (GET_MODE (x
), new, SUBREG_WORD (x
));
3243 /* If using a register that is the source of an eliminate we still
3244 think can be performed, note it cannot be performed since we don't
3245 know how this register is used. */
3246 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3247 if (ep
->from_rtx
== XEXP (x
, 0))
3248 ep
->can_eliminate
= 0;
3250 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
3251 if (new != XEXP (x
, 0))
3252 return gen_rtx_fmt_e (code
, GET_MODE (x
), new);
3256 /* If clobbering a register that is the replacement register for an
3257 elimination we still think can be performed, note that it cannot
3258 be performed. Otherwise, we need not be concerned about it. */
3259 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3260 if (ep
->to_rtx
== XEXP (x
, 0))
3261 ep
->can_eliminate
= 0;
3263 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
3264 if (new != XEXP (x
, 0))
3265 return gen_rtx_fmt_e (code
, GET_MODE (x
), new);
3271 /* Properly handle sharing input and constraint vectors. */
3272 if (ASM_OPERANDS_INPUT_VEC (x
) != old_asm_operands_vec
)
3274 /* When we come to a new vector not seen before,
3275 scan all its elements; keep the old vector if none
3276 of them changes; otherwise, make a copy. */
3277 old_asm_operands_vec
= ASM_OPERANDS_INPUT_VEC (x
);
3278 temp_vec
= (rtx
*) alloca (XVECLEN (x
, 3) * sizeof (rtx
));
3279 for (i
= 0; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
3280 temp_vec
[i
] = eliminate_regs (ASM_OPERANDS_INPUT (x
, i
),
3283 for (i
= 0; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
3284 if (temp_vec
[i
] != ASM_OPERANDS_INPUT (x
, i
))
3287 if (i
== ASM_OPERANDS_INPUT_LENGTH (x
))
3288 new_asm_operands_vec
= old_asm_operands_vec
;
3290 new_asm_operands_vec
3291 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x
), temp_vec
);
3294 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3295 if (new_asm_operands_vec
== old_asm_operands_vec
)
3298 new = gen_rtx_ASM_OPERANDS (VOIDmode
, ASM_OPERANDS_TEMPLATE (x
),
3299 ASM_OPERANDS_OUTPUT_CONSTRAINT (x
),
3300 ASM_OPERANDS_OUTPUT_IDX (x
),
3301 new_asm_operands_vec
,
3302 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x
),
3303 ASM_OPERANDS_SOURCE_FILE (x
),
3304 ASM_OPERANDS_SOURCE_LINE (x
));
3305 new->volatil
= x
->volatil
;
3310 /* Check for setting a register that we know about. */
3311 if (GET_CODE (SET_DEST (x
)) == REG
)
3313 /* See if this is setting the replacement register for an
3316 If DEST is the hard frame pointer, we do nothing because we
3317 assume that all assignments to the frame pointer are for
3318 non-local gotos and are being done at a time when they are valid
3319 and do not disturb anything else. Some machines want to
3320 eliminate a fake argument pointer (or even a fake frame pointer)
3321 with either the real frame or the stack pointer. Assignments to
3322 the hard frame pointer must not prevent this elimination. */
3324 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3326 if (ep
->to_rtx
== SET_DEST (x
)
3327 && SET_DEST (x
) != hard_frame_pointer_rtx
)
3329 /* If it is being incremented, adjust the offset. Otherwise,
3330 this elimination can't be done. */
3331 rtx src
= SET_SRC (x
);
3333 if (GET_CODE (src
) == PLUS
3334 && XEXP (src
, 0) == SET_DEST (x
)
3335 && GET_CODE (XEXP (src
, 1)) == CONST_INT
)
3336 ep
->offset
-= INTVAL (XEXP (src
, 1));
3338 ep
->can_eliminate
= 0;
3341 /* Now check to see we are assigning to a register that can be
3342 eliminated. If so, it must be as part of a PARALLEL, since we
3343 will not have been called if this is a single SET. So indicate
3344 that we can no longer eliminate this reg. */
3345 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3347 if (ep
->from_rtx
== SET_DEST (x
) && ep
->can_eliminate
)
3348 ep
->can_eliminate
= 0;
3351 /* Now avoid the loop below in this common case. */
3353 rtx new0
= eliminate_regs (SET_DEST (x
), 0, insn
);
3354 rtx new1
= eliminate_regs (SET_SRC (x
), 0, insn
);
3356 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3357 write a CLOBBER insn. */
3358 if (GET_CODE (SET_DEST (x
)) == REG
&& GET_CODE (new0
) == MEM
3359 && insn
!= 0 && GET_CODE (insn
) != EXPR_LIST
3360 && GET_CODE (insn
) != INSN_LIST
)
3361 emit_insn_after (gen_rtx_CLOBBER (VOIDmode
, SET_DEST (x
)), insn
);
3363 if (new0
!= SET_DEST (x
) || new1
!= SET_SRC (x
))
3364 return gen_rtx_SET (VOIDmode
, new0
, new1
);
3370 /* This is only for the benefit of the debugging backends, which call
3371 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
3372 removed after CSE. */
3373 if (GET_CODE (XEXP (x
, 0)) == ADDRESSOF
)
3374 return eliminate_regs (XEXP (XEXP (x
, 0), 0), 0, insn
);
3376 /* Our only special processing is to pass the mode of the MEM to our
3377 recursive call and copy the flags. While we are here, handle this
3378 case more efficiently. */
3379 new = eliminate_regs (XEXP (x
, 0), GET_MODE (x
), insn
);
3380 if (new != XEXP (x
, 0))
3382 new = gen_rtx_MEM (GET_MODE (x
), new);
3383 new->volatil
= x
->volatil
;
3384 new->unchanging
= x
->unchanging
;
3385 new->in_struct
= x
->in_struct
;
3395 /* Process each of our operands recursively. If any have changed, make a
3397 fmt
= GET_RTX_FORMAT (code
);
3398 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3402 new = eliminate_regs (XEXP (x
, i
), mem_mode
, insn
);
3403 if (new != XEXP (x
, i
) && ! copied
)
3405 rtx new_x
= rtx_alloc (code
);
3406 bcopy ((char *) x
, (char *) new_x
,
3407 (sizeof (*new_x
) - sizeof (new_x
->fld
)
3408 + sizeof (new_x
->fld
[0]) * GET_RTX_LENGTH (code
)));
3414 else if (*fmt
== 'E')
3417 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3419 new = eliminate_regs (XVECEXP (x
, i
, j
), mem_mode
, insn
);
3420 if (new != XVECEXP (x
, i
, j
) && ! copied_vec
)
3422 rtvec new_v
= gen_rtvec_vv (XVECLEN (x
, i
),
3426 rtx new_x
= rtx_alloc (code
);
3427 bcopy ((char *) x
, (char *) new_x
,
3428 (sizeof (*new_x
) - sizeof (new_x
->fld
)
3429 + (sizeof (new_x
->fld
[0])
3430 * GET_RTX_LENGTH (code
))));
3434 XVEC (x
, i
) = new_v
;
3437 XVECEXP (x
, i
, j
) = new;
3445 /* Scan INSN and eliminate all eliminable registers in it.
3447 If REPLACE is nonzero, do the replacement destructively. Also
3448 delete the insn as dead it if it is setting an eliminable register.
3450 If REPLACE is zero, do all our allocations in reload_obstack.
3452 If no eliminations were done and this insn doesn't require any elimination
3453 processing (these are not identical conditions: it might be updating sp,
3454 but not referencing fp; this needs to be seen during reload_as_needed so
3455 that the offset between fp and sp can be taken into consideration), zero
3456 is returned. Otherwise, 1 is returned. */
3459 eliminate_regs_in_insn (insn
, replace
)
3463 rtx old_body
= PATTERN (insn
);
3464 rtx old_set
= single_set (insn
);
3467 struct elim_table
*ep
;
3470 push_obstacks (&reload_obstack
, &reload_obstack
);
3472 if (old_set
!= 0 && GET_CODE (SET_DEST (old_set
)) == REG
3473 && REGNO (SET_DEST (old_set
)) < FIRST_PSEUDO_REGISTER
)
3475 /* Check for setting an eliminable register. */
3476 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3477 if (ep
->from_rtx
== SET_DEST (old_set
) && ep
->can_eliminate
)
3479 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3480 /* If this is setting the frame pointer register to the
3481 hardware frame pointer register and this is an elimination
3482 that will be done (tested above), this insn is really
3483 adjusting the frame pointer downward to compensate for
3484 the adjustment done before a nonlocal goto. */
3485 if (ep
->from
== FRAME_POINTER_REGNUM
3486 && ep
->to
== HARD_FRAME_POINTER_REGNUM
)
3488 rtx src
= SET_SRC (old_set
);
3490 rtx prev_insn
, prev_set
;
3492 if (src
== ep
->to_rtx
)
3494 else if (GET_CODE (src
) == PLUS
3495 && GET_CODE (XEXP (src
, 0)) == CONST_INT
3496 && XEXP (src
, 1) == ep
->to_rtx
)
3497 offset
= INTVAL (XEXP (src
, 0)), ok
= 1;
3498 else if (GET_CODE (src
) == PLUS
3499 && GET_CODE (XEXP (src
, 1)) == CONST_INT
3500 && XEXP (src
, 0) == ep
->to_rtx
)
3501 offset
= INTVAL (XEXP (src
, 1)), ok
= 1;
3502 else if ((prev_insn
= prev_nonnote_insn (insn
)) != 0
3503 && (prev_set
= single_set (prev_insn
)) != 0
3504 && rtx_equal_p (SET_DEST (prev_set
), src
))
3506 src
= SET_SRC (prev_set
);
3507 if (src
== ep
->to_rtx
)
3509 else if (GET_CODE (src
) == PLUS
3510 && GET_CODE (XEXP (src
, 0)) == CONST_INT
3511 && XEXP (src
, 1) == ep
->to_rtx
)
3512 offset
= INTVAL (XEXP (src
, 0)), ok
= 1;
3513 else if (GET_CODE (src
) == PLUS
3514 && GET_CODE (XEXP (src
, 1)) == CONST_INT
3515 && XEXP (src
, 0) == ep
->to_rtx
)
3516 offset
= INTVAL (XEXP (src
, 1)), ok
= 1;
3524 = plus_constant (ep
->to_rtx
, offset
- ep
->offset
);
3526 /* First see if this insn remains valid when we
3527 make the change. If not, keep the INSN_CODE
3528 the same and let reload fit it up. */
3529 validate_change (insn
, &SET_SRC (old_set
), src
, 1);
3530 validate_change (insn
, &SET_DEST (old_set
),
3532 if (! apply_change_group ())
3534 SET_SRC (old_set
) = src
;
3535 SET_DEST (old_set
) = ep
->to_rtx
;
3545 /* In this case this insn isn't serving a useful purpose. We
3546 will delete it in reload_as_needed once we know that this
3547 elimination is, in fact, being done.
3549 If REPLACE isn't set, we can't delete this insn, but needn't
3550 process it since it won't be used unless something changes. */
3552 delete_dead_insn (insn
);
3557 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3558 in the insn is the negative of the offset in FROM. Substitute
3559 (set (reg) (reg to)) for the insn and change its code.
3561 We have to do this here, rather than in eliminate_regs, do that we can
3562 change the insn code. */
3564 if (GET_CODE (SET_SRC (old_set
)) == PLUS
3565 && GET_CODE (XEXP (SET_SRC (old_set
), 0)) == REG
3566 && GET_CODE (XEXP (SET_SRC (old_set
), 1)) == CONST_INT
)
3567 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3569 if (ep
->from_rtx
== XEXP (SET_SRC (old_set
), 0)
3570 && ep
->can_eliminate
)
3572 /* We must stop at the first elimination that will be used.
3573 If this one would replace the PLUS with a REG, do it
3574 now. Otherwise, quit the loop and let eliminate_regs
3575 do its normal replacement. */
3576 if (ep
->offset
== - INTVAL (XEXP (SET_SRC (old_set
), 1)))
3578 /* We assume here that we don't need a PARALLEL of
3579 any CLOBBERs for this assignment. There's not
3580 much we can do if we do need it. */
3581 PATTERN (insn
) = gen_rtx_SET (VOIDmode
,
3584 INSN_CODE (insn
) = -1;
3593 old_asm_operands_vec
= 0;
3595 /* Replace the body of this insn with a substituted form. If we changed
3596 something, return non-zero.
3598 If we are replacing a body that was a (set X (plus Y Z)), try to
3599 re-recognize the insn. We do this in case we had a simple addition
3600 but now can do this as a load-address. This saves an insn in this
3603 new_body
= eliminate_regs (old_body
, 0, replace
? insn
: NULL_RTX
);
3604 if (new_body
!= old_body
)
3606 /* If we aren't replacing things permanently and we changed something,
3607 make another copy to ensure that all the RTL is new. Otherwise
3608 things can go wrong if find_reload swaps commutative operands
3609 and one is inside RTL that has been copied while the other is not. */
3611 /* Don't copy an asm_operands because (1) there's no need and (2)
3612 copy_rtx can't do it properly when there are multiple outputs. */
3613 if (! replace
&& asm_noperands (old_body
) < 0)
3614 new_body
= copy_rtx (new_body
);
3616 /* If we had a move insn but now we don't, rerecognize it. This will
3617 cause spurious re-recognition if the old move had a PARALLEL since
3618 the new one still will, but we can't call single_set without
3619 having put NEW_BODY into the insn and the re-recognition won't
3620 hurt in this rare case. */
3622 && ((GET_CODE (SET_SRC (old_set
)) == REG
3623 && (GET_CODE (new_body
) != SET
3624 || GET_CODE (SET_SRC (new_body
)) != REG
))
3625 /* If this was a load from or store to memory, compare
3626 the MEM in recog_operand to the one in the insn. If they
3627 are not equal, then rerecognize the insn. */
3629 && ((GET_CODE (SET_SRC (old_set
)) == MEM
3630 && SET_SRC (old_set
) != recog_operand
[1])
3631 || (GET_CODE (SET_DEST (old_set
)) == MEM
3632 && SET_DEST (old_set
) != recog_operand
[0])))
3633 /* If this was an add insn before, rerecognize. */
3634 || GET_CODE (SET_SRC (old_set
)) == PLUS
))
3636 if (! validate_change (insn
, &PATTERN (insn
), new_body
, 0))
3637 /* If recognition fails, store the new body anyway.
3638 It's normal to have recognition failures here
3639 due to bizarre memory addresses; reloading will fix them. */
3640 PATTERN (insn
) = new_body
;
3643 PATTERN (insn
) = new_body
;
3648 /* Loop through all elimination pairs. See if any have changed and
3649 recalculate the number not at initial offset.
3651 Compute the maximum offset (minimum offset if the stack does not
3652 grow downward) for each elimination pair.
3654 We also detect a cases where register elimination cannot be done,
3655 namely, if a register would be both changed and referenced outside a MEM
3656 in the resulting insn since such an insn is often undefined and, even if
3657 not, we cannot know what meaning will be given to it. Note that it is
3658 valid to have a register used in an address in an insn that changes it
3659 (presumably with a pre- or post-increment or decrement).
3661 If anything changes, return nonzero. */
3663 num_not_at_initial_offset
= 0;
3664 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3666 if (ep
->previous_offset
!= ep
->offset
&& ep
->ref_outside_mem
)
3667 ep
->can_eliminate
= 0;
3669 ep
->ref_outside_mem
= 0;
3671 if (ep
->previous_offset
!= ep
->offset
)
3674 ep
->previous_offset
= ep
->offset
;
3675 if (ep
->can_eliminate
&& ep
->offset
!= ep
->initial_offset
)
3676 num_not_at_initial_offset
++;
3678 #ifdef STACK_GROWS_DOWNWARD
3679 ep
->max_offset
= MAX (ep
->max_offset
, ep
->offset
);
3681 ep
->max_offset
= MIN (ep
->max_offset
, ep
->offset
);
3686 /* If we changed something, perform elimination in REG_NOTES. This is
3687 needed even when REPLACE is zero because a REG_DEAD note might refer
3688 to a register that we eliminate and could cause a different number
3689 of spill registers to be needed in the final reload pass than in
3691 if (val
&& REG_NOTES (insn
) != 0)
3692 REG_NOTES (insn
) = eliminate_regs (REG_NOTES (insn
), 0, REG_NOTES (insn
));
3700 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3701 replacement we currently believe is valid, mark it as not eliminable if X
3702 modifies DEST in any way other than by adding a constant integer to it.
3704 If DEST is the frame pointer, we do nothing because we assume that
3705 all assignments to the hard frame pointer are nonlocal gotos and are being
3706 done at a time when they are valid and do not disturb anything else.
3707 Some machines want to eliminate a fake argument pointer with either the
3708 frame or stack pointer. Assignments to the hard frame pointer must not
3709 prevent this elimination.
3711 Called via note_stores from reload before starting its passes to scan
3712 the insns of the function. */
3715 mark_not_eliminable (dest
, x
)
3721 /* A SUBREG of a hard register here is just changing its mode. We should
3722 not see a SUBREG of an eliminable hard register, but check just in
3724 if (GET_CODE (dest
) == SUBREG
)
3725 dest
= SUBREG_REG (dest
);
3727 if (dest
== hard_frame_pointer_rtx
)
3730 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
3731 if (reg_eliminate
[i
].can_eliminate
&& dest
== reg_eliminate
[i
].to_rtx
3732 && (GET_CODE (x
) != SET
3733 || GET_CODE (SET_SRC (x
)) != PLUS
3734 || XEXP (SET_SRC (x
), 0) != dest
3735 || GET_CODE (XEXP (SET_SRC (x
), 1)) != CONST_INT
))
3737 reg_eliminate
[i
].can_eliminate_previous
3738 = reg_eliminate
[i
].can_eliminate
= 0;
3743 /* Kick all pseudos out of hard register REGNO.
3744 If GLOBAL is nonzero, try to find someplace else to put them.
3745 If DUMPFILE is nonzero, log actions taken on that file.
3747 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3748 because we found we can't eliminate some register. In the case, no pseudos
3749 are allowed to be in the register, even if they are only in a block that
3750 doesn't require spill registers, unlike the case when we are spilling this
3751 hard reg to produce another spill register.
3753 Return nonzero if any pseudos needed to be kicked out. */
3756 spill_hard_reg (regno
, global
, dumpfile
, cant_eliminate
)
3762 enum reg_class
class = REGNO_REG_CLASS (regno
);
3763 int something_changed
= 0;
3766 SET_HARD_REG_BIT (forbidden_regs
, regno
);
3769 regs_ever_live
[regno
] = 1;
3771 /* Spill every pseudo reg that was allocated to this reg
3772 or to something that overlaps this reg. */
3774 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
3775 if (reg_renumber
[i
] >= 0
3776 && reg_renumber
[i
] <= regno
3778 + HARD_REGNO_NREGS (reg_renumber
[i
],
3779 PSEUDO_REGNO_MODE (i
))
3782 /* If this register belongs solely to a basic block which needed no
3783 spilling of any class that this register is contained in,
3784 leave it be, unless we are spilling this register because
3785 it was a hard register that can't be eliminated. */
3787 if (! cant_eliminate
3788 && basic_block_needs
[0]
3789 && REG_BASIC_BLOCK (i
) >= 0
3790 && basic_block_needs
[(int) class][REG_BASIC_BLOCK (i
)] == 0)
3794 for (p
= reg_class_superclasses
[(int) class];
3795 *p
!= LIM_REG_CLASSES
; p
++)
3796 if (basic_block_needs
[(int) *p
][REG_BASIC_BLOCK (i
)] > 0)
3799 if (*p
== LIM_REG_CLASSES
)
3803 /* Mark it as no longer having a hard register home. */
3804 reg_renumber
[i
] = -1;
3805 /* We will need to scan everything again. */
3806 something_changed
= 1;
3808 retry_global_alloc (i
, forbidden_regs
);
3810 alter_reg (i
, regno
);
3813 if (reg_renumber
[i
] == -1)
3814 fprintf (dumpfile
, " Register %d now on stack.\n\n", i
);
3816 fprintf (dumpfile
, " Register %d now in %d.\n\n",
3817 i
, reg_renumber
[i
]);
3820 for (i
= 0; i
< scratch_list_length
; i
++)
3823 && regno
>= REGNO (scratch_list
[i
])
3824 && regno
< REGNO (scratch_list
[i
])
3825 + HARD_REGNO_NREGS (REGNO (scratch_list
[i
]),
3826 GET_MODE (scratch_list
[i
])))
3828 if (! cant_eliminate
&& basic_block_needs
[0]
3829 && ! basic_block_needs
[(int) class][scratch_block
[i
]])
3833 for (p
= reg_class_superclasses
[(int) class];
3834 *p
!= LIM_REG_CLASSES
; p
++)
3835 if (basic_block_needs
[(int) *p
][scratch_block
[i
]] > 0)
3838 if (*p
== LIM_REG_CLASSES
)
3841 PUT_CODE (scratch_list
[i
], SCRATCH
);
3842 scratch_list
[i
] = 0;
3843 something_changed
= 1;
3848 return something_changed
;
3851 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3852 Also mark any hard registers used to store user variables as
3853 forbidden from being used for spill registers. */
3856 scan_paradoxical_subregs (x
)
3861 register enum rtx_code code
= GET_CODE (x
);
3866 if (SMALL_REGISTER_CLASSES
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3867 && REG_USERVAR_P (x
))
3868 SET_HARD_REG_BIT (forbidden_regs
, REGNO (x
));
3883 if (GET_CODE (SUBREG_REG (x
)) == REG
3884 && GET_MODE_SIZE (GET_MODE (x
)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
3885 reg_max_ref_width
[REGNO (SUBREG_REG (x
))]
3886 = GET_MODE_SIZE (GET_MODE (x
));
3893 fmt
= GET_RTX_FORMAT (code
);
3894 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3897 scan_paradoxical_subregs (XEXP (x
, i
));
3898 else if (fmt
[i
] == 'E')
3901 for (j
= XVECLEN (x
, i
) - 1; j
>=0; j
--)
3902 scan_paradoxical_subregs (XVECEXP (x
, i
, j
));
3908 hard_reg_use_compare (p1p
, p2p
)
3909 const GENERIC_PTR p1p
;
3910 const GENERIC_PTR p2p
;
3912 struct hard_reg_n_uses
*p1
= (struct hard_reg_n_uses
*)p1p
,
3913 *p2
= (struct hard_reg_n_uses
*)p2p
;
3914 int tem
= p1
->uses
- p2
->uses
;
3915 if (tem
!= 0) return tem
;
3916 /* If regs are equally good, sort by regno,
3917 so that the results of qsort leave nothing to chance. */
3918 return p1
->regno
- p2
->regno
;
3921 /* Choose the order to consider regs for use as reload registers
3922 based on how much trouble would be caused by spilling one.
3923 Store them in order of decreasing preference in potential_reload_regs. */
3926 order_regs_for_reload ()
3932 struct hard_reg_n_uses hard_reg_n_uses
[FIRST_PSEUDO_REGISTER
];
3934 CLEAR_HARD_REG_SET (bad_spill_regs
);
3936 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3937 potential_reload_regs
[i
] = -1;
3939 /* Count number of uses of each hard reg by pseudo regs allocated to it
3940 and then order them by decreasing use. */
3942 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3944 hard_reg_n_uses
[i
].uses
= 0;
3945 hard_reg_n_uses
[i
].regno
= i
;
3948 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
3950 int regno
= reg_renumber
[i
];
3953 int lim
= regno
+ HARD_REGNO_NREGS (regno
, PSEUDO_REGNO_MODE (i
));
3955 hard_reg_n_uses
[regno
++].uses
+= REG_N_REFS (i
);
3957 large
+= REG_N_REFS (i
);
3960 /* Now fixed registers (which cannot safely be used for reloading)
3961 get a very high use count so they will be considered least desirable.
3962 Registers used explicitly in the rtl code are almost as bad. */
3964 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3968 hard_reg_n_uses
[i
].uses
+= 2 * large
+ 2;
3969 SET_HARD_REG_BIT (bad_spill_regs
, i
);
3971 else if (regs_explicitly_used
[i
])
3973 hard_reg_n_uses
[i
].uses
+= large
+ 1;
3974 if (! SMALL_REGISTER_CLASSES
)
3975 /* ??? We are doing this here because of the potential
3976 that bad code may be generated if a register explicitly
3977 used in an insn was used as a spill register for that
3978 insn. But not using these are spill registers may lose
3979 on some machine. We'll have to see how this works out. */
3980 SET_HARD_REG_BIT (bad_spill_regs
, i
);
3983 hard_reg_n_uses
[HARD_FRAME_POINTER_REGNUM
].uses
+= 2 * large
+ 2;
3984 SET_HARD_REG_BIT (bad_spill_regs
, HARD_FRAME_POINTER_REGNUM
);
3986 #ifdef ELIMINABLE_REGS
3987 /* If registers other than the frame pointer are eliminable, mark them as
3989 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
3991 hard_reg_n_uses
[reg_eliminate
[i
].from
].uses
+= 2 * large
+ 2;
3992 SET_HARD_REG_BIT (bad_spill_regs
, reg_eliminate
[i
].from
);
3996 /* Prefer registers not so far used, for use in temporary loading.
3997 Among them, if REG_ALLOC_ORDER is defined, use that order.
3998 Otherwise, prefer registers not preserved by calls. */
4000 #ifdef REG_ALLOC_ORDER
4001 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4003 int regno
= reg_alloc_order
[i
];
4005 if (hard_reg_n_uses
[regno
].uses
== 0)
4006 potential_reload_regs
[o
++] = regno
;
4009 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4011 if (hard_reg_n_uses
[i
].uses
== 0 && call_used_regs
[i
])
4012 potential_reload_regs
[o
++] = i
;
4014 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4016 if (hard_reg_n_uses
[i
].uses
== 0 && ! call_used_regs
[i
])
4017 potential_reload_regs
[o
++] = i
;
4021 qsort (hard_reg_n_uses
, FIRST_PSEUDO_REGISTER
,
4022 sizeof hard_reg_n_uses
[0], hard_reg_use_compare
);
4024 /* Now add the regs that are already used,
4025 preferring those used less often. The fixed and otherwise forbidden
4026 registers will be at the end of this list. */
4028 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4029 if (hard_reg_n_uses
[i
].uses
!= 0)
4030 potential_reload_regs
[o
++] = hard_reg_n_uses
[i
].regno
;
4033 /* Used in reload_as_needed to sort the spilled regs. */
4036 compare_spill_regs (r1p
, r2p
)
4037 const GENERIC_PTR r1p
;
4038 const GENERIC_PTR r2p
;
4040 short r1
= *(short *)r1p
, r2
= *(short *)r2p
;
4044 /* Reload pseudo-registers into hard regs around each insn as needed.
4045 Additional register load insns are output before the insn that needs it
4046 and perhaps store insns after insns that modify the reloaded pseudo reg.
4048 reg_last_reload_reg and reg_reloaded_contents keep track of
4049 which registers are already available in reload registers.
4050 We update these for the reloads that we perform,
4051 as the insns are scanned. */
4054 reload_as_needed (first
, live_known
)
4064 bzero ((char *) spill_reg_rtx
, sizeof spill_reg_rtx
);
4065 bzero ((char *) spill_reg_store
, sizeof spill_reg_store
);
4066 reg_last_reload_reg
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
4067 bzero ((char *) reg_last_reload_reg
, max_regno
* sizeof (rtx
));
4068 reg_has_output_reload
= (char *) alloca (max_regno
);
4069 CLEAR_HARD_REG_SET (reg_reloaded_valid
);
4071 /* Reset all offsets on eliminable registers to their initial values. */
4072 #ifdef ELIMINABLE_REGS
4073 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
4075 INITIAL_ELIMINATION_OFFSET (reg_eliminate
[i
].from
, reg_eliminate
[i
].to
,
4076 reg_eliminate
[i
].initial_offset
);
4077 reg_eliminate
[i
].previous_offset
4078 = reg_eliminate
[i
].offset
= reg_eliminate
[i
].initial_offset
;
4081 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate
[0].initial_offset
);
4082 reg_eliminate
[0].previous_offset
4083 = reg_eliminate
[0].offset
= reg_eliminate
[0].initial_offset
;
4086 num_not_at_initial_offset
= 0;
4088 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
4089 pack registers with group needs. */
4092 qsort (spill_regs
, n_spills
, sizeof (short), compare_spill_regs
);
4093 for (i
= 0; i
< n_spills
; i
++)
4094 spill_reg_order
[spill_regs
[i
]] = i
;
4097 for (insn
= first
; insn
;)
4099 register rtx next
= NEXT_INSN (insn
);
4101 /* Notice when we move to a new basic block. */
4102 if (live_known
&& this_block
+ 1 < n_basic_blocks
4103 && insn
== basic_block_head
[this_block
+1])
4106 /* If we pass a label, copy the offsets from the label information
4107 into the current offsets of each elimination. */
4108 if (GET_CODE (insn
) == CODE_LABEL
)
4110 num_not_at_initial_offset
= 0;
4111 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
4113 reg_eliminate
[i
].offset
= reg_eliminate
[i
].previous_offset
4114 = offsets_at
[CODE_LABEL_NUMBER (insn
)][i
];
4115 if (reg_eliminate
[i
].can_eliminate
4116 && (reg_eliminate
[i
].offset
4117 != reg_eliminate
[i
].initial_offset
))
4118 num_not_at_initial_offset
++;
4122 else if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
4124 rtx avoid_return_reg
= 0;
4125 rtx oldpat
= PATTERN (insn
);
4127 /* Set avoid_return_reg if this is an insn
4128 that might use the value of a function call. */
4129 if (SMALL_REGISTER_CLASSES
&& GET_CODE (insn
) == CALL_INSN
)
4131 if (GET_CODE (PATTERN (insn
)) == SET
)
4132 after_call
= SET_DEST (PATTERN (insn
));
4133 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
4134 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
4135 after_call
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 0));
4139 else if (SMALL_REGISTER_CLASSES
&& after_call
!= 0
4140 && !(GET_CODE (PATTERN (insn
)) == SET
4141 && SET_DEST (PATTERN (insn
)) == stack_pointer_rtx
)
4142 && GET_CODE (PATTERN (insn
)) != USE
)
4144 if (reg_referenced_p (after_call
, PATTERN (insn
)))
4145 avoid_return_reg
= after_call
;
4149 /* If this is a USE and CLOBBER of a MEM, ensure that any
4150 references to eliminable registers have been removed. */
4152 if ((GET_CODE (PATTERN (insn
)) == USE
4153 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
4154 && GET_CODE (XEXP (PATTERN (insn
), 0)) == MEM
)
4155 XEXP (XEXP (PATTERN (insn
), 0), 0)
4156 = eliminate_regs (XEXP (XEXP (PATTERN (insn
), 0), 0),
4157 GET_MODE (XEXP (PATTERN (insn
), 0)),
4160 /* If we need to do register elimination processing, do so.
4161 This might delete the insn, in which case we are done. */
4162 if (num_eliminable
&& GET_MODE (insn
) == QImode
)
4164 eliminate_regs_in_insn (insn
, 1);
4165 if (GET_CODE (insn
) == NOTE
)
4172 if (GET_MODE (insn
) == VOIDmode
)
4174 /* First find the pseudo regs that must be reloaded for this insn.
4175 This info is returned in the tables reload_... (see reload.h).
4176 Also modify the body of INSN by substituting RELOAD
4177 rtx's for those pseudo regs. */
4180 bzero (reg_has_output_reload
, max_regno
);
4181 CLEAR_HARD_REG_SET (reg_is_output_reload
);
4183 find_reloads (insn
, 1, spill_indirect_levels
, live_known
,
4189 rtx prev
= PREV_INSN (insn
), next
= NEXT_INSN (insn
);
4193 /* If this block has not had spilling done for a
4194 particular clas and we have any non-optionals that need a
4195 spill reg in that class, abort. */
4197 for (class = 0; class < N_REG_CLASSES
; class++)
4198 if (basic_block_needs
[class] != 0
4199 && basic_block_needs
[class][this_block
] == 0)
4200 for (i
= 0; i
< n_reloads
; i
++)
4201 if (class == (int) reload_reg_class
[i
]
4202 && reload_reg_rtx
[i
] == 0
4203 && ! reload_optional
[i
]
4204 && (reload_in
[i
] != 0 || reload_out
[i
] != 0
4205 || reload_secondary_p
[i
] != 0))
4206 fatal_insn ("Non-optional registers need a spill register", insn
);
4208 /* Now compute which reload regs to reload them into. Perhaps
4209 reusing reload regs from previous insns, or else output
4210 load insns to reload them. Maybe output store insns too.
4211 Record the choices of reload reg in reload_reg_rtx. */
4212 choose_reload_regs (insn
, avoid_return_reg
);
4214 /* Merge any reloads that we didn't combine for fear of
4215 increasing the number of spill registers needed but now
4216 discover can be safely merged. */
4217 if (SMALL_REGISTER_CLASSES
)
4218 merge_assigned_reloads (insn
);
4220 /* Generate the insns to reload operands into or out of
4221 their reload regs. */
4222 emit_reload_insns (insn
);
4224 /* Substitute the chosen reload regs from reload_reg_rtx
4225 into the insn's body (or perhaps into the bodies of other
4226 load and store insn that we just made for reloading
4227 and that we moved the structure into). */
4230 /* If this was an ASM, make sure that all the reload insns
4231 we have generated are valid. If not, give an error
4234 if (asm_noperands (PATTERN (insn
)) >= 0)
4235 for (p
= NEXT_INSN (prev
); p
!= next
; p
= NEXT_INSN (p
))
4236 if (p
!= insn
&& GET_RTX_CLASS (GET_CODE (p
)) == 'i'
4237 && (recog_memoized (p
) < 0
4238 || (insn_extract (p
),
4239 ! constrain_operands (INSN_CODE (p
), 1))))
4241 error_for_asm (insn
,
4242 "`asm' operand requires impossible reload");
4244 NOTE_SOURCE_FILE (p
) = 0;
4245 NOTE_LINE_NUMBER (p
) = NOTE_INSN_DELETED
;
4248 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4249 is no longer validly lying around to save a future reload.
4250 Note that this does not detect pseudos that were reloaded
4251 for this insn in order to be stored in
4252 (obeying register constraints). That is correct; such reload
4253 registers ARE still valid. */
4254 note_stores (oldpat
, forget_old_reloads_1
);
4256 /* There may have been CLOBBER insns placed after INSN. So scan
4257 between INSN and NEXT and use them to forget old reloads. */
4258 for (x
= NEXT_INSN (insn
); x
!= next
; x
= NEXT_INSN (x
))
4259 if (GET_CODE (x
) == INSN
&& GET_CODE (PATTERN (x
)) == CLOBBER
)
4260 note_stores (PATTERN (x
), forget_old_reloads_1
);
4263 /* Likewise for regs altered by auto-increment in this insn.
4264 But note that the reg-notes are not changed by reloading:
4265 they still contain the pseudo-regs, not the spill regs. */
4266 for (x
= REG_NOTES (insn
); x
; x
= XEXP (x
, 1))
4267 if (REG_NOTE_KIND (x
) == REG_INC
)
4269 /* See if this pseudo reg was reloaded in this insn.
4270 If so, its last-reload info is still valid
4271 because it is based on this insn's reload. */
4272 for (i
= 0; i
< n_reloads
; i
++)
4273 if (reload_out
[i
] == XEXP (x
, 0))
4277 forget_old_reloads_1 (XEXP (x
, 0), NULL_RTX
);
4281 /* A reload reg's contents are unknown after a label. */
4282 if (GET_CODE (insn
) == CODE_LABEL
)
4283 CLEAR_HARD_REG_SET (reg_reloaded_valid
);
4285 /* Don't assume a reload reg is still good after a call insn
4286 if it is a call-used reg. */
4287 else if (GET_CODE (insn
) == CALL_INSN
)
4288 AND_COMPL_HARD_REG_SET(reg_reloaded_valid
, call_used_reg_set
);
4290 /* In case registers overlap, allow certain insns to invalidate
4291 particular hard registers. */
4293 #ifdef INSN_CLOBBERS_REGNO_P
4294 for (i
= 0 ; i
< FIRST_PSEUDO_REGISTER
; i
++)
4295 if (TEST_HARD_REG_BIT (reg_reloaded_valid
, i
)
4296 && INSN_CLOBBERS_REGNO_P (insn
, i
))
4297 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, i
);
4308 /* Discard all record of any value reloaded from X,
4309 or reloaded in X from someplace else;
4310 unless X is an output reload reg of the current insn.
4312 X may be a hard reg (the reload reg)
4313 or it may be a pseudo reg that was reloaded from. */
4316 forget_old_reloads_1 (x
, ignored
)
4318 rtx ignored ATTRIBUTE_UNUSED
;
4324 /* note_stores does give us subregs of hard regs. */
4325 while (GET_CODE (x
) == SUBREG
)
4327 offset
+= SUBREG_WORD (x
);
4331 if (GET_CODE (x
) != REG
)
4334 regno
= REGNO (x
) + offset
;
4336 if (regno
>= FIRST_PSEUDO_REGISTER
)
4341 nr
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
4342 /* Storing into a spilled-reg invalidates its contents.
4343 This can happen if a block-local pseudo is allocated to that reg
4344 and it wasn't spilled because this block's total need is 0.
4345 Then some insn might have an optional reload and use this reg. */
4346 for (i
= 0; i
< nr
; i
++)
4347 /* But don't do this if the reg actually serves as an output
4348 reload reg in the current instruction. */
4350 || ! TEST_HARD_REG_BIT (reg_is_output_reload
, regno
+ i
))
4351 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, regno
+ i
);
4354 /* Since value of X has changed,
4355 forget any value previously copied from it. */
4358 /* But don't forget a copy if this is the output reload
4359 that establishes the copy's validity. */
4360 if (n_reloads
== 0 || reg_has_output_reload
[regno
+ nr
] == 0)
4361 reg_last_reload_reg
[regno
+ nr
] = 0;
4364 /* For each reload, the mode of the reload register. */
4365 static enum machine_mode reload_mode
[MAX_RELOADS
];
4367 /* For each reload, the largest number of registers it will require. */
4368 static int reload_nregs
[MAX_RELOADS
];
4370 /* Comparison function for qsort to decide which of two reloads
4371 should be handled first. *P1 and *P2 are the reload numbers. */
4374 reload_reg_class_lower (r1p
, r2p
)
4375 const GENERIC_PTR r1p
;
4376 const GENERIC_PTR r2p
;
4378 register int r1
= *(short *)r1p
, r2
= *(short *)r2p
;
4381 /* Consider required reloads before optional ones. */
4382 t
= reload_optional
[r1
] - reload_optional
[r2
];
4386 /* Count all solitary classes before non-solitary ones. */
4387 t
= ((reg_class_size
[(int) reload_reg_class
[r2
]] == 1)
4388 - (reg_class_size
[(int) reload_reg_class
[r1
]] == 1));
4392 /* Aside from solitaires, consider all multi-reg groups first. */
4393 t
= reload_nregs
[r2
] - reload_nregs
[r1
];
4397 /* Consider reloads in order of increasing reg-class number. */
4398 t
= (int) reload_reg_class
[r1
] - (int) reload_reg_class
[r2
];
4402 /* If reloads are equally urgent, sort by reload number,
4403 so that the results of qsort leave nothing to chance. */
4407 /* The following HARD_REG_SETs indicate when each hard register is
4408 used for a reload of various parts of the current insn. */
4410 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4411 static HARD_REG_SET reload_reg_used
;
4412 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4413 static HARD_REG_SET reload_reg_used_in_input_addr
[MAX_RECOG_OPERANDS
];
4414 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4415 static HARD_REG_SET reload_reg_used_in_inpaddr_addr
[MAX_RECOG_OPERANDS
];
4416 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4417 static HARD_REG_SET reload_reg_used_in_output_addr
[MAX_RECOG_OPERANDS
];
4418 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4419 static HARD_REG_SET reload_reg_used_in_outaddr_addr
[MAX_RECOG_OPERANDS
];
4420 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4421 static HARD_REG_SET reload_reg_used_in_input
[MAX_RECOG_OPERANDS
];
4422 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4423 static HARD_REG_SET reload_reg_used_in_output
[MAX_RECOG_OPERANDS
];
4424 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4425 static HARD_REG_SET reload_reg_used_in_op_addr
;
4426 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4427 static HARD_REG_SET reload_reg_used_in_op_addr_reload
;
4428 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4429 static HARD_REG_SET reload_reg_used_in_insn
;
4430 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4431 static HARD_REG_SET reload_reg_used_in_other_addr
;
4433 /* If reg is in use as a reload reg for any sort of reload. */
4434 static HARD_REG_SET reload_reg_used_at_all
;
4436 /* If reg is use as an inherited reload. We just mark the first register
4438 static HARD_REG_SET reload_reg_used_for_inherit
;
4440 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4441 TYPE. MODE is used to indicate how many consecutive regs are
4445 mark_reload_reg_in_use (regno
, opnum
, type
, mode
)
4448 enum reload_type type
;
4449 enum machine_mode mode
;
4451 int nregs
= HARD_REGNO_NREGS (regno
, mode
);
4454 for (i
= regno
; i
< nregs
+ regno
; i
++)
4459 SET_HARD_REG_BIT (reload_reg_used
, i
);
4462 case RELOAD_FOR_INPUT_ADDRESS
:
4463 SET_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], i
);
4466 case RELOAD_FOR_INPADDR_ADDRESS
:
4467 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], i
);
4470 case RELOAD_FOR_OUTPUT_ADDRESS
:
4471 SET_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], i
);
4474 case RELOAD_FOR_OUTADDR_ADDRESS
:
4475 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[opnum
], i
);
4478 case RELOAD_FOR_OPERAND_ADDRESS
:
4479 SET_HARD_REG_BIT (reload_reg_used_in_op_addr
, i
);
4482 case RELOAD_FOR_OPADDR_ADDR
:
4483 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, i
);
4486 case RELOAD_FOR_OTHER_ADDRESS
:
4487 SET_HARD_REG_BIT (reload_reg_used_in_other_addr
, i
);
4490 case RELOAD_FOR_INPUT
:
4491 SET_HARD_REG_BIT (reload_reg_used_in_input
[opnum
], i
);
4494 case RELOAD_FOR_OUTPUT
:
4495 SET_HARD_REG_BIT (reload_reg_used_in_output
[opnum
], i
);
4498 case RELOAD_FOR_INSN
:
4499 SET_HARD_REG_BIT (reload_reg_used_in_insn
, i
);
4503 SET_HARD_REG_BIT (reload_reg_used_at_all
, i
);
4507 /* Similarly, but show REGNO is no longer in use for a reload. */
4510 clear_reload_reg_in_use (regno
, opnum
, type
, mode
)
4513 enum reload_type type
;
4514 enum machine_mode mode
;
4516 int nregs
= HARD_REGNO_NREGS (regno
, mode
);
4519 for (i
= regno
; i
< nregs
+ regno
; i
++)
4524 CLEAR_HARD_REG_BIT (reload_reg_used
, i
);
4527 case RELOAD_FOR_INPUT_ADDRESS
:
4528 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], i
);
4531 case RELOAD_FOR_INPADDR_ADDRESS
:
4532 CLEAR_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], i
);
4535 case RELOAD_FOR_OUTPUT_ADDRESS
:
4536 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], i
);
4539 case RELOAD_FOR_OUTADDR_ADDRESS
:
4540 CLEAR_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[opnum
], i
);
4543 case RELOAD_FOR_OPERAND_ADDRESS
:
4544 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr
, i
);
4547 case RELOAD_FOR_OPADDR_ADDR
:
4548 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, i
);
4551 case RELOAD_FOR_OTHER_ADDRESS
:
4552 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr
, i
);
4555 case RELOAD_FOR_INPUT
:
4556 CLEAR_HARD_REG_BIT (reload_reg_used_in_input
[opnum
], i
);
4559 case RELOAD_FOR_OUTPUT
:
4560 CLEAR_HARD_REG_BIT (reload_reg_used_in_output
[opnum
], i
);
4563 case RELOAD_FOR_INSN
:
4564 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn
, i
);
4570 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4571 specified by OPNUM and TYPE. */
4574 reload_reg_free_p (regno
, opnum
, type
)
4577 enum reload_type type
;
4581 /* In use for a RELOAD_OTHER means it's not available for anything. */
4582 if (TEST_HARD_REG_BIT (reload_reg_used
, regno
))
4588 /* In use for anything means we can't use it for RELOAD_OTHER. */
4589 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
)
4590 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4591 || TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
4594 for (i
= 0; i
< reload_n_operands
; i
++)
4595 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4596 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
4597 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4598 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
4599 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
4600 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4605 case RELOAD_FOR_INPUT
:
4606 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4607 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
))
4610 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
))
4613 /* If it is used for some other input, can't use it. */
4614 for (i
= 0; i
< reload_n_operands
; i
++)
4615 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4618 /* If it is used in a later operand's address, can't use it. */
4619 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4620 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4621 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
))
4626 case RELOAD_FOR_INPUT_ADDRESS
:
4627 /* Can't use a register if it is used for an input address for this
4628 operand or used as an input in an earlier one. */
4629 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], regno
)
4630 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], regno
))
4633 for (i
= 0; i
< opnum
; i
++)
4634 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4639 case RELOAD_FOR_INPADDR_ADDRESS
:
4640 /* Can't use a register if it is used for an input address
4641 for this operand or used as an input in an earlier
4643 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], regno
))
4646 for (i
= 0; i
< opnum
; i
++)
4647 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4652 case RELOAD_FOR_OUTPUT_ADDRESS
:
4653 /* Can't use a register if it is used for an output address for this
4654 operand or used as an output in this or a later operand. */
4655 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], regno
))
4658 for (i
= opnum
; i
< reload_n_operands
; i
++)
4659 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4664 case RELOAD_FOR_OUTADDR_ADDRESS
:
4665 /* Can't use a register if it is used for an output address
4666 for this operand or used as an output in this or a
4668 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[opnum
], regno
))
4671 for (i
= opnum
; i
< reload_n_operands
; i
++)
4672 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4677 case RELOAD_FOR_OPERAND_ADDRESS
:
4678 for (i
= 0; i
< reload_n_operands
; i
++)
4679 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4682 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4683 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
4685 case RELOAD_FOR_OPADDR_ADDR
:
4686 for (i
= 0; i
< reload_n_operands
; i
++)
4687 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4690 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
));
4692 case RELOAD_FOR_OUTPUT
:
4693 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4694 outputs, or an operand address for this or an earlier output. */
4695 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
4698 for (i
= 0; i
< reload_n_operands
; i
++)
4699 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4702 for (i
= 0; i
<= opnum
; i
++)
4703 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4704 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
))
4709 case RELOAD_FOR_INSN
:
4710 for (i
= 0; i
< reload_n_operands
; i
++)
4711 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
4712 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4715 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4716 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
4718 case RELOAD_FOR_OTHER_ADDRESS
:
4719 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4724 /* Return 1 if the value in reload reg REGNO, as used by a reload
4725 needed for the part of the insn specified by OPNUM and TYPE,
4726 is not in use for a reload in any prior part of the insn.
4728 We can assume that the reload reg was already tested for availability
4729 at the time it is needed, and we should not check this again,
4730 in case the reg has already been marked in use.
4732 However, if EQUIV is set, we are checking the availability of a register
4733 holding an equivalence to the value to be loaded into the reload register,
4734 not the availability of the reload register itself.
4736 This is still less stringent than what reload_reg_free_p checks; for
4737 example, compare the checks for RELOAD_OTHER. */
4740 reload_reg_free_before_p (regno
, opnum
, type
, equiv
)
4743 enum reload_type type
;
4750 case RELOAD_FOR_OTHER_ADDRESS
:
4751 /* These always come first. */
4752 if (equiv
&& TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
))
4757 if (equiv
&& TEST_HARD_REG_BIT (reload_reg_used
, regno
))
4759 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4761 /* If this use is for part of the insn,
4762 check the reg is not in use for any prior part. It is tempting
4763 to try to do this by falling through from objecs that occur
4764 later in the insn to ones that occur earlier, but that will not
4765 correctly take into account the fact that here we MUST ignore
4766 things that would prevent the register from being allocated in
4767 the first place, since we know that it was allocated. */
4769 case RELOAD_FOR_OUTPUT_ADDRESS
:
4771 && TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], regno
))
4773 /* Earlier reloads include RELOAD_FOR_OUTADDR_ADDRESS reloads. */
4774 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[opnum
], regno
))
4776 /* ... fall through ... */
4777 case RELOAD_FOR_OUTADDR_ADDRESS
:
4779 && (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[opnum
], regno
)
4780 || TEST_HARD_REG_BIT (reload_reg_used
, regno
)))
4782 /* Earlier reloads are for earlier outputs or their addresses,
4783 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4784 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4786 for (i
= 0; i
< opnum
; i
++)
4787 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4788 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
))
4791 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
4794 for (i
= 0; i
< reload_n_operands
; i
++)
4795 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4796 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
4797 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
4798 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4801 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
)
4802 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4803 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
)
4804 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
4806 case RELOAD_FOR_OUTPUT
:
4807 case RELOAD_FOR_INSN
:
4808 /* There is no reason to call this function for output reloads, thus
4809 anything we'd put here wouldn't be tested. So just abort. */
4812 case RELOAD_FOR_OPERAND_ADDRESS
:
4813 if (equiv
&& TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
))
4816 /* Earlier reloads include RELOAD_FOR_OPADDR_ADDR reloads. */
4817 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
))
4820 /* ... fall through ... */
4822 case RELOAD_FOR_OPADDR_ADDR
:
4825 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
)
4826 || TEST_HARD_REG_BIT (reload_reg_used
, regno
))
4828 for (i
= 0; i
< reload_n_operands
; i
++)
4829 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4832 /* These can't conflict with inputs, or each other, so all we have to
4833 test is input addresses and the addresses of OTHER items. */
4835 for (i
= 0; i
< reload_n_operands
; i
++)
4836 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4837 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
))
4840 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4842 case RELOAD_FOR_INPUT
:
4843 if (equiv
&& TEST_HARD_REG_BIT (reload_reg_used
, regno
))
4846 /* The only things earlier are the address for this and
4847 earlier inputs, other inputs (which we know we don't conflict
4848 with), and addresses of RELOAD_OTHER objects. */
4850 for (i
= 0; i
<= opnum
; i
++)
4851 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4852 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
))
4855 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4857 case RELOAD_FOR_INPUT_ADDRESS
:
4858 /* Earlier reloads include RELOAD_FOR_INPADDR_ADDRESS reloads. */
4859 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], regno
))
4861 /* ... fall through ... */
4862 case RELOAD_FOR_INPADDR_ADDRESS
:
4863 if (equiv
&& TEST_HARD_REG_BIT (reload_reg_used
, regno
))
4866 /* Similarly, all we have to check is for use in earlier inputs'
4868 for (i
= 0; i
< opnum
; i
++)
4869 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4870 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
))
4873 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4878 /* Return 1 if the value in reload reg REGNO, as used by a reload
4879 needed for the part of the insn specified by OPNUM and TYPE,
4880 is still available in REGNO at the end of the insn.
4882 We can assume that the reload reg was already tested for availability
4883 at the time it is needed, and we should not check this again,
4884 in case the reg has already been marked in use. */
4887 reload_reg_reaches_end_p (regno
, opnum
, type
)
4890 enum reload_type type
;
4897 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4898 its value must reach the end. */
4901 /* If this use is for part of the insn,
4902 its value reaches if no subsequent part uses the same register.
4903 Just like the above function, don't try to do this with lots
4906 case RELOAD_FOR_OTHER_ADDRESS
:
4907 /* Here we check for everything else, since these don't conflict
4908 with anything else and everything comes later. */
4910 for (i
= 0; i
< reload_n_operands
; i
++)
4911 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4912 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
4913 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
)
4914 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4915 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
4916 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4919 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4920 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4921 && ! TEST_HARD_REG_BIT (reload_reg_used
, regno
));
4923 case RELOAD_FOR_INPUT_ADDRESS
:
4924 case RELOAD_FOR_INPADDR_ADDRESS
:
4925 /* Similar, except that we check only for this and subsequent inputs
4926 and the address of only subsequent inputs and we do not need
4927 to check for RELOAD_OTHER objects since they are known not to
4930 for (i
= opnum
; i
< reload_n_operands
; i
++)
4931 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4934 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4935 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4936 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
))
4939 for (i
= 0; i
< reload_n_operands
; i
++)
4940 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4941 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
4942 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4945 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
))
4948 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4949 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
));
4951 case RELOAD_FOR_INPUT
:
4952 /* Similar to input address, except we start at the next operand for
4953 both input and input address and we do not check for
4954 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4957 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4958 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4959 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
4960 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4963 /* ... fall through ... */
4965 case RELOAD_FOR_OPERAND_ADDRESS
:
4966 /* Check outputs and their addresses. */
4968 for (i
= 0; i
< reload_n_operands
; i
++)
4969 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4970 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
4971 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4976 case RELOAD_FOR_OPADDR_ADDR
:
4977 for (i
= 0; i
< reload_n_operands
; i
++)
4978 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4979 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
4980 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4983 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4984 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
));
4986 case RELOAD_FOR_INSN
:
4987 /* These conflict with other outputs with RELOAD_OTHER. So
4988 we need only check for output addresses. */
4992 /* ... fall through ... */
4994 case RELOAD_FOR_OUTPUT
:
4995 case RELOAD_FOR_OUTPUT_ADDRESS
:
4996 case RELOAD_FOR_OUTADDR_ADDRESS
:
4997 /* We already know these can't conflict with a later output. So the
4998 only thing to check are later output addresses. */
4999 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
5000 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5001 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
))
5010 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5013 This function uses the same algorithm as reload_reg_free_p above. */
5016 reloads_conflict (r1
, r2
)
5019 enum reload_type r1_type
= reload_when_needed
[r1
];
5020 enum reload_type r2_type
= reload_when_needed
[r2
];
5021 int r1_opnum
= reload_opnum
[r1
];
5022 int r2_opnum
= reload_opnum
[r2
];
5024 /* RELOAD_OTHER conflicts with everything. */
5025 if (r2_type
== RELOAD_OTHER
)
5028 /* Otherwise, check conflicts differently for each type. */
5032 case RELOAD_FOR_INPUT
:
5033 return (r2_type
== RELOAD_FOR_INSN
5034 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
5035 || r2_type
== RELOAD_FOR_OPADDR_ADDR
5036 || r2_type
== RELOAD_FOR_INPUT
5037 || ((r2_type
== RELOAD_FOR_INPUT_ADDRESS
5038 || r2_type
== RELOAD_FOR_INPADDR_ADDRESS
)
5039 && r2_opnum
> r1_opnum
));
5041 case RELOAD_FOR_INPUT_ADDRESS
:
5042 return ((r2_type
== RELOAD_FOR_INPUT_ADDRESS
&& r1_opnum
== r2_opnum
)
5043 || (r2_type
== RELOAD_FOR_INPUT
&& r2_opnum
< r1_opnum
));
5045 case RELOAD_FOR_INPADDR_ADDRESS
:
5046 return ((r2_type
== RELOAD_FOR_INPADDR_ADDRESS
&& r1_opnum
== r2_opnum
)
5047 || (r2_type
== RELOAD_FOR_INPUT
&& r2_opnum
< r1_opnum
));
5049 case RELOAD_FOR_OUTPUT_ADDRESS
:
5050 return ((r2_type
== RELOAD_FOR_OUTPUT_ADDRESS
&& r2_opnum
== r1_opnum
)
5051 || (r2_type
== RELOAD_FOR_OUTPUT
&& r2_opnum
>= r1_opnum
));
5053 case RELOAD_FOR_OUTADDR_ADDRESS
:
5054 return ((r2_type
== RELOAD_FOR_OUTADDR_ADDRESS
&& r2_opnum
== r1_opnum
)
5055 || (r2_type
== RELOAD_FOR_OUTPUT
&& r2_opnum
>= r1_opnum
));
5057 case RELOAD_FOR_OPERAND_ADDRESS
:
5058 return (r2_type
== RELOAD_FOR_INPUT
|| r2_type
== RELOAD_FOR_INSN
5059 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
);
5061 case RELOAD_FOR_OPADDR_ADDR
:
5062 return (r2_type
== RELOAD_FOR_INPUT
5063 || r2_type
== RELOAD_FOR_OPADDR_ADDR
);
5065 case RELOAD_FOR_OUTPUT
:
5066 return (r2_type
== RELOAD_FOR_INSN
|| r2_type
== RELOAD_FOR_OUTPUT
5067 || ((r2_type
== RELOAD_FOR_OUTPUT_ADDRESS
5068 || r2_type
== RELOAD_FOR_OUTADDR_ADDRESS
)
5069 && r2_opnum
>= r1_opnum
));
5071 case RELOAD_FOR_INSN
:
5072 return (r2_type
== RELOAD_FOR_INPUT
|| r2_type
== RELOAD_FOR_OUTPUT
5073 || r2_type
== RELOAD_FOR_INSN
5074 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
);
5076 case RELOAD_FOR_OTHER_ADDRESS
:
5077 return r2_type
== RELOAD_FOR_OTHER_ADDRESS
;
5087 /* Vector of reload-numbers showing the order in which the reloads should
5089 short reload_order
[MAX_RELOADS
];
5091 /* Indexed by reload number, 1 if incoming value
5092 inherited from previous insns. */
5093 char reload_inherited
[MAX_RELOADS
];
5095 /* For an inherited reload, this is the insn the reload was inherited from,
5096 if we know it. Otherwise, this is 0. */
5097 rtx reload_inheritance_insn
[MAX_RELOADS
];
5099 /* If non-zero, this is a place to get the value of the reload,
5100 rather than using reload_in. */
5101 rtx reload_override_in
[MAX_RELOADS
];
5103 /* For each reload, the hard register number of the register used,
5104 or -1 if we did not need a register for this reload. */
5105 int reload_spill_index
[MAX_RELOADS
];
5107 /* Return 1 if the value in reload reg REGNO, as used by a reload
5108 needed for the part of the insn specified by OPNUM and TYPE,
5109 may be used to load VALUE into it.
5111 Other read-only reloads with the same value do not conflict
5112 unless OUT is non-zero and these other reloads have to live while
5113 output reloads live.
5115 RELOADNUM is the number of the reload we want to load this value for;
5116 a reload does not conflict with itself.
5118 The caller has to make sure that there is no conflict with the return
5121 reload_reg_free_for_value_p (regno
, opnum
, type
, value
, out
, reloadnum
)
5124 enum reload_type type
;
5131 /* We use some pseudo 'time' value to check if the lifetimes of the
5132 new register use would overlap with the one of a previous reload
5133 that is not read-only or uses a different value.
5134 The 'time' used doesn't have to be linear in any shape or form, just
5136 Some reload types use different 'buckets' for each operand.
5137 So there are MAX_RECOG_OPERANDS different time values for each
5139 We compute TIME1 as the time when the register for the prospective
5140 new reload ceases to be live, and TIME2 for each existing
5141 reload as the time when that the reload register of that reload
5143 Where there is little to be gained by exact lifetime calculations,
5144 we just make conservative assumptions, i.e. a longer lifetime;
5145 this is done in the 'default:' cases. */
5148 case RELOAD_FOR_OTHER_ADDRESS
:
5151 /* For each input, we might have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5152 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5153 respectively, to the time values for these, we get distinct time
5154 values. To get distinct time values for each operand, we have to
5155 multiply opnum by at least three. We round that up to four because
5156 multiply by four is often cheaper. */
5157 case RELOAD_FOR_INPADDR_ADDRESS
:
5158 time1
= opnum
* 4 + 1;
5160 case RELOAD_FOR_INPUT_ADDRESS
:
5161 time1
= opnum
* 4 + 2;
5163 case RELOAD_FOR_INPUT
:
5164 /* All RELOAD_FOR_INPUT reloads remain live till just before the
5165 instruction is executed. */
5166 time1
= (MAX_RECOG_OPERANDS
- 1) * 4 + 3;
5168 /* opnum * 4 + 3 < opnum * 4 + 4
5169 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5170 case RELOAD_FOR_OUTPUT_ADDRESS
:
5171 time1
= MAX_RECOG_OPERANDS
* 4 + opnum
;
5174 time1
= MAX_RECOG_OPERANDS
* 5;
5177 for (i
= 0; i
< n_reloads
; i
++)
5179 rtx reg
= reload_reg_rtx
[i
];
5180 if (reg
&& GET_CODE (reg
) == REG
5181 && ((unsigned) regno
- true_regnum (reg
)
5182 <= HARD_REGNO_NREGS (REGNO (reg
), GET_MODE (reg
)) - (unsigned)1)
5186 && reload_when_needed
[i
] != RELOAD_FOR_INPUT
5187 && reload_when_needed
[i
] != RELOAD_FOR_INPUT_ADDRESS
5188 && reload_when_needed
[i
] != RELOAD_FOR_INPADDR_ADDRESS
)
5190 if (! reload_in
[i
] || ! rtx_equal_p (reload_in
[i
], value
)
5194 switch (reload_when_needed
[i
])
5196 case RELOAD_FOR_OTHER_ADDRESS
:
5199 case RELOAD_FOR_INPADDR_ADDRESS
:
5200 time2
= reload_opnum
[i
] * 4 + 1;
5202 case RELOAD_FOR_INPUT_ADDRESS
:
5203 time2
= reload_opnum
[i
] * 4 + 2;
5205 case RELOAD_FOR_INPUT
:
5206 time2
= reload_opnum
[i
] * 4 + 3;
5208 case RELOAD_FOR_OUTPUT
:
5209 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5210 instruction is executed. */
5211 time2
= MAX_RECOG_OPERANDS
* 4;
5213 /* The first RELOAD_FOR_OUTPUT_ADDRESS reload conflicts with the
5214 RELOAD_FOR_OUTPUT reloads, so assign it the same time value. */
5215 case RELOAD_FOR_OUTPUT_ADDRESS
:
5216 time2
= MAX_RECOG_OPERANDS
* 4 + reload_opnum
[i
];
5219 if (! reload_in
[i
] || rtx_equal_p (reload_in
[i
], value
))
5221 time2
= MAX_RECOG_OPERANDS
* 4;
5235 /* Find a spill register to use as a reload register for reload R.
5236 LAST_RELOAD is non-zero if this is the last reload for the insn being
5239 Set reload_reg_rtx[R] to the register allocated.
5241 If NOERROR is nonzero, we return 1 if successful,
5242 or 0 if we couldn't find a spill reg and we didn't change anything. */
5245 allocate_reload_reg (r
, insn
, last_reload
, noerror
)
5257 /* If we put this reload ahead, thinking it is a group,
5258 then insist on finding a group. Otherwise we can grab a
5259 reg that some other reload needs.
5260 (That can happen when we have a 68000 DATA_OR_FP_REG
5261 which is a group of data regs or one fp reg.)
5262 We need not be so restrictive if there are no more reloads
5265 ??? Really it would be nicer to have smarter handling
5266 for that kind of reg class, where a problem like this is normal.
5267 Perhaps those classes should be avoided for reloading
5268 by use of more alternatives. */
5270 int force_group
= reload_nregs
[r
] > 1 && ! last_reload
;
5272 /* If we want a single register and haven't yet found one,
5273 take any reg in the right class and not in use.
5274 If we want a consecutive group, here is where we look for it.
5276 We use two passes so we can first look for reload regs to
5277 reuse, which are already in use for other reloads in this insn,
5278 and only then use additional registers.
5279 I think that maximizing reuse is needed to make sure we don't
5280 run out of reload regs. Suppose we have three reloads, and
5281 reloads A and B can share regs. These need two regs.
5282 Suppose A and B are given different regs.
5283 That leaves none for C. */
5284 for (pass
= 0; pass
< 2; pass
++)
5286 /* I is the index in spill_regs.
5287 We advance it round-robin between insns to use all spill regs
5288 equally, so that inherited reloads have a chance
5289 of leapfrogging each other. Don't do this, however, when we have
5290 group needs and failure would be fatal; if we only have a relatively
5291 small number of spill registers, and more than one of them has
5292 group needs, then by starting in the middle, we may end up
5293 allocating the first one in such a way that we are not left with
5294 sufficient groups to handle the rest. */
5296 if (noerror
|| ! force_group
)
5301 for (count
= 0; count
< n_spills
; count
++)
5303 int class = (int) reload_reg_class
[r
];
5305 i
= (i
+ 1) % n_spills
;
5307 if ((reload_reg_free_p (spill_regs
[i
], reload_opnum
[r
],
5308 reload_when_needed
[r
])
5310 /* We check reload_reg_used to make sure we
5311 don't clobber the return register. */
5312 && ! TEST_HARD_REG_BIT (reload_reg_used
, spill_regs
[i
])
5313 && reload_reg_free_for_value_p (spill_regs
[i
],
5315 reload_when_needed
[r
],
5318 && TEST_HARD_REG_BIT (reg_class_contents
[class], spill_regs
[i
])
5319 && HARD_REGNO_MODE_OK (spill_regs
[i
], reload_mode
[r
])
5320 /* Look first for regs to share, then for unshared. But
5321 don't share regs used for inherited reloads; they are
5322 the ones we want to preserve. */
5324 || (TEST_HARD_REG_BIT (reload_reg_used_at_all
,
5326 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit
,
5329 int nr
= HARD_REGNO_NREGS (spill_regs
[i
], reload_mode
[r
]);
5330 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5331 (on 68000) got us two FP regs. If NR is 1,
5332 we would reject both of them. */
5334 nr
= CLASS_MAX_NREGS (reload_reg_class
[r
], reload_mode
[r
]);
5335 /* If we need only one reg, we have already won. */
5338 /* But reject a single reg if we demand a group. */
5343 /* Otherwise check that as many consecutive regs as we need
5345 Also, don't use for a group registers that are
5346 needed for nongroups. */
5347 if (! TEST_HARD_REG_BIT (counted_for_nongroups
, spill_regs
[i
]))
5350 regno
= spill_regs
[i
] + nr
- 1;
5351 if (!(TEST_HARD_REG_BIT (reg_class_contents
[class], regno
)
5352 && spill_reg_order
[regno
] >= 0
5353 && reload_reg_free_p (regno
, reload_opnum
[r
],
5354 reload_when_needed
[r
])
5355 && ! TEST_HARD_REG_BIT (counted_for_nongroups
,
5365 /* If we found something on pass 1, omit pass 2. */
5366 if (count
< n_spills
)
5370 /* We should have found a spill register by now. */
5371 if (count
== n_spills
)
5378 /* I is the index in SPILL_REG_RTX of the reload register we are to
5379 allocate. Get an rtx for it and find its register number. */
5381 new = spill_reg_rtx
[i
];
5383 if (new == 0 || GET_MODE (new) != reload_mode
[r
])
5384 spill_reg_rtx
[i
] = new
5385 = gen_rtx_REG (reload_mode
[r
], spill_regs
[i
]);
5387 regno
= true_regnum (new);
5389 /* Detect when the reload reg can't hold the reload mode.
5390 This used to be one `if', but Sequent compiler can't handle that. */
5391 if (HARD_REGNO_MODE_OK (regno
, reload_mode
[r
]))
5393 enum machine_mode test_mode
= VOIDmode
;
5395 test_mode
= GET_MODE (reload_in
[r
]);
5396 /* If reload_in[r] has VOIDmode, it means we will load it
5397 in whatever mode the reload reg has: to wit, reload_mode[r].
5398 We have already tested that for validity. */
5399 /* Aside from that, we need to test that the expressions
5400 to reload from or into have modes which are valid for this
5401 reload register. Otherwise the reload insns would be invalid. */
5402 if (! (reload_in
[r
] != 0 && test_mode
!= VOIDmode
5403 && ! HARD_REGNO_MODE_OK (regno
, test_mode
)))
5404 if (! (reload_out
[r
] != 0
5405 && ! HARD_REGNO_MODE_OK (regno
, GET_MODE (reload_out
[r
]))))
5407 /* The reg is OK. */
5410 /* Mark as in use for this insn the reload regs we use
5412 mark_reload_reg_in_use (spill_regs
[i
], reload_opnum
[r
],
5413 reload_when_needed
[r
], reload_mode
[r
]);
5415 reload_reg_rtx
[r
] = new;
5416 reload_spill_index
[r
] = spill_regs
[i
];
5421 /* The reg is not OK. */
5426 if (asm_noperands (PATTERN (insn
)) < 0)
5427 /* It's the compiler's fault. */
5428 fatal_insn ("Could not find a spill register", insn
);
5430 /* It's the user's fault; the operand's mode and constraint
5431 don't match. Disable this reload so we don't crash in final. */
5432 error_for_asm (insn
,
5433 "`asm' operand constraint incompatible with operand size");
5436 reload_reg_rtx
[r
] = 0;
5437 reload_optional
[r
] = 1;
5438 reload_secondary_p
[r
] = 1;
5443 /* Assign hard reg targets for the pseudo-registers we must reload
5444 into hard regs for this insn.
5445 Also output the instructions to copy them in and out of the hard regs.
5447 For machines with register classes, we are responsible for
5448 finding a reload reg in the proper class. */
5451 choose_reload_regs (insn
, avoid_return_reg
)
5453 rtx avoid_return_reg
;
5456 int max_group_size
= 1;
5457 enum reg_class group_class
= NO_REGS
;
5460 rtx save_reload_reg_rtx
[MAX_RELOADS
];
5461 char save_reload_inherited
[MAX_RELOADS
];
5462 rtx save_reload_inheritance_insn
[MAX_RELOADS
];
5463 rtx save_reload_override_in
[MAX_RELOADS
];
5464 int save_reload_spill_index
[MAX_RELOADS
];
5465 HARD_REG_SET save_reload_reg_used
;
5466 HARD_REG_SET save_reload_reg_used_in_input_addr
[MAX_RECOG_OPERANDS
];
5467 HARD_REG_SET save_reload_reg_used_in_inpaddr_addr
[MAX_RECOG_OPERANDS
];
5468 HARD_REG_SET save_reload_reg_used_in_output_addr
[MAX_RECOG_OPERANDS
];
5469 HARD_REG_SET save_reload_reg_used_in_outaddr_addr
[MAX_RECOG_OPERANDS
];
5470 HARD_REG_SET save_reload_reg_used_in_input
[MAX_RECOG_OPERANDS
];
5471 HARD_REG_SET save_reload_reg_used_in_output
[MAX_RECOG_OPERANDS
];
5472 HARD_REG_SET save_reload_reg_used_in_op_addr
;
5473 HARD_REG_SET save_reload_reg_used_in_op_addr_reload
;
5474 HARD_REG_SET save_reload_reg_used_in_insn
;
5475 HARD_REG_SET save_reload_reg_used_in_other_addr
;
5476 HARD_REG_SET save_reload_reg_used_at_all
;
5478 bzero (reload_inherited
, MAX_RELOADS
);
5479 bzero ((char *) reload_inheritance_insn
, MAX_RELOADS
* sizeof (rtx
));
5480 bzero ((char *) reload_override_in
, MAX_RELOADS
* sizeof (rtx
));
5482 CLEAR_HARD_REG_SET (reload_reg_used
);
5483 CLEAR_HARD_REG_SET (reload_reg_used_at_all
);
5484 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr
);
5485 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload
);
5486 CLEAR_HARD_REG_SET (reload_reg_used_in_insn
);
5487 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr
);
5489 for (i
= 0; i
< reload_n_operands
; i
++)
5491 CLEAR_HARD_REG_SET (reload_reg_used_in_output
[i
]);
5492 CLEAR_HARD_REG_SET (reload_reg_used_in_input
[i
]);
5493 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr
[i
]);
5494 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr
[i
]);
5495 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr
[i
]);
5496 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr
[i
]);
5499 /* Don't bother with avoiding the return reg
5500 if we have no mandatory reload that could use it. */
5501 if (SMALL_REGISTER_CLASSES
&& avoid_return_reg
)
5504 int regno
= REGNO (avoid_return_reg
);
5506 = HARD_REGNO_NREGS (regno
, GET_MODE (avoid_return_reg
));
5509 for (r
= regno
; r
< regno
+ nregs
; r
++)
5510 if (spill_reg_order
[r
] >= 0)
5511 for (j
= 0; j
< n_reloads
; j
++)
5512 if (!reload_optional
[j
] && reload_reg_rtx
[j
] == 0
5513 && (reload_in
[j
] != 0 || reload_out
[j
] != 0
5514 || reload_secondary_p
[j
])
5516 TEST_HARD_REG_BIT (reg_class_contents
[(int) reload_reg_class
[j
]], r
))
5519 avoid_return_reg
= 0;
5522 #if 0 /* Not needed, now that we can always retry without inheritance. */
5523 /* See if we have more mandatory reloads than spill regs.
5524 If so, then we cannot risk optimizations that could prevent
5525 reloads from sharing one spill register.
5527 Since we will try finding a better register than reload_reg_rtx
5528 unless it is equal to reload_in or reload_out, count such reloads. */
5531 int tem
= SMALL_REGISTER_CLASSES
? (avoid_return_reg
!= 0): 0;
5532 for (j
= 0; j
< n_reloads
; j
++)
5533 if (! reload_optional
[j
]
5534 && (reload_in
[j
] != 0 || reload_out
[j
] != 0 || reload_secondary_p
[j
])
5535 && (reload_reg_rtx
[j
] == 0
5536 || (! rtx_equal_p (reload_reg_rtx
[j
], reload_in
[j
])
5537 && ! rtx_equal_p (reload_reg_rtx
[j
], reload_out
[j
]))))
5544 /* Don't use the subroutine call return reg for a reload
5545 if we are supposed to avoid it. */
5546 if (SMALL_REGISTER_CLASSES
&& avoid_return_reg
)
5548 int regno
= REGNO (avoid_return_reg
);
5550 = HARD_REGNO_NREGS (regno
, GET_MODE (avoid_return_reg
));
5553 for (r
= regno
; r
< regno
+ nregs
; r
++)
5554 if (spill_reg_order
[r
] >= 0)
5555 SET_HARD_REG_BIT (reload_reg_used
, r
);
5558 /* In order to be certain of getting the registers we need,
5559 we must sort the reloads into order of increasing register class.
5560 Then our grabbing of reload registers will parallel the process
5561 that provided the reload registers.
5563 Also note whether any of the reloads wants a consecutive group of regs.
5564 If so, record the maximum size of the group desired and what
5565 register class contains all the groups needed by this insn. */
5567 for (j
= 0; j
< n_reloads
; j
++)
5569 reload_order
[j
] = j
;
5570 reload_spill_index
[j
] = -1;
5573 = (reload_inmode
[j
] == VOIDmode
5574 || (GET_MODE_SIZE (reload_outmode
[j
])
5575 > GET_MODE_SIZE (reload_inmode
[j
])))
5576 ? reload_outmode
[j
] : reload_inmode
[j
];
5578 reload_nregs
[j
] = CLASS_MAX_NREGS (reload_reg_class
[j
], reload_mode
[j
]);
5580 if (reload_nregs
[j
] > 1)
5582 max_group_size
= MAX (reload_nregs
[j
], max_group_size
);
5583 group_class
= reg_class_superunion
[(int)reload_reg_class
[j
]][(int)group_class
];
5586 /* If we have already decided to use a certain register,
5587 don't use it in another way. */
5588 if (reload_reg_rtx
[j
])
5589 mark_reload_reg_in_use (REGNO (reload_reg_rtx
[j
]), reload_opnum
[j
],
5590 reload_when_needed
[j
], reload_mode
[j
]);
5594 qsort (reload_order
, n_reloads
, sizeof (short), reload_reg_class_lower
);
5596 bcopy ((char *) reload_reg_rtx
, (char *) save_reload_reg_rtx
,
5597 sizeof reload_reg_rtx
);
5598 bcopy (reload_inherited
, save_reload_inherited
, sizeof reload_inherited
);
5599 bcopy ((char *) reload_inheritance_insn
,
5600 (char *) save_reload_inheritance_insn
,
5601 sizeof reload_inheritance_insn
);
5602 bcopy ((char *) reload_override_in
, (char *) save_reload_override_in
,
5603 sizeof reload_override_in
);
5604 bcopy ((char *) reload_spill_index
, (char *) save_reload_spill_index
,
5605 sizeof reload_spill_index
);
5606 COPY_HARD_REG_SET (save_reload_reg_used
, reload_reg_used
);
5607 COPY_HARD_REG_SET (save_reload_reg_used_at_all
, reload_reg_used_at_all
);
5608 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr
,
5609 reload_reg_used_in_op_addr
);
5611 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload
,
5612 reload_reg_used_in_op_addr_reload
);
5614 COPY_HARD_REG_SET (save_reload_reg_used_in_insn
,
5615 reload_reg_used_in_insn
);
5616 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr
,
5617 reload_reg_used_in_other_addr
);
5619 for (i
= 0; i
< reload_n_operands
; i
++)
5621 COPY_HARD_REG_SET (save_reload_reg_used_in_output
[i
],
5622 reload_reg_used_in_output
[i
]);
5623 COPY_HARD_REG_SET (save_reload_reg_used_in_input
[i
],
5624 reload_reg_used_in_input
[i
]);
5625 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr
[i
],
5626 reload_reg_used_in_input_addr
[i
]);
5627 COPY_HARD_REG_SET (save_reload_reg_used_in_inpaddr_addr
[i
],
5628 reload_reg_used_in_inpaddr_addr
[i
]);
5629 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr
[i
],
5630 reload_reg_used_in_output_addr
[i
]);
5631 COPY_HARD_REG_SET (save_reload_reg_used_in_outaddr_addr
[i
],
5632 reload_reg_used_in_outaddr_addr
[i
]);
5635 /* If -O, try first with inheritance, then turning it off.
5636 If not -O, don't do inheritance.
5637 Using inheritance when not optimizing leads to paradoxes
5638 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5639 because one side of the comparison might be inherited. */
5641 for (inheritance
= optimize
> 0; inheritance
>= 0; inheritance
--)
5643 /* Process the reloads in order of preference just found.
5644 Beyond this point, subregs can be found in reload_reg_rtx.
5646 This used to look for an existing reloaded home for all
5647 of the reloads, and only then perform any new reloads.
5648 But that could lose if the reloads were done out of reg-class order
5649 because a later reload with a looser constraint might have an old
5650 home in a register needed by an earlier reload with a tighter constraint.
5652 To solve this, we make two passes over the reloads, in the order
5653 described above. In the first pass we try to inherit a reload
5654 from a previous insn. If there is a later reload that needs a
5655 class that is a proper subset of the class being processed, we must
5656 also allocate a spill register during the first pass.
5658 Then make a second pass over the reloads to allocate any reloads
5659 that haven't been given registers yet. */
5661 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit
);
5663 for (j
= 0; j
< n_reloads
; j
++)
5665 register int r
= reload_order
[j
];
5667 /* Ignore reloads that got marked inoperative. */
5668 if (reload_out
[r
] == 0 && reload_in
[r
] == 0
5669 && ! reload_secondary_p
[r
])
5672 /* If find_reloads chose a to use reload_in or reload_out as a reload
5673 register, we don't need to chose one. Otherwise, try even if it
5674 found one since we might save an insn if we find the value lying
5676 if (reload_in
[r
] != 0 && reload_reg_rtx
[r
] != 0
5677 && (rtx_equal_p (reload_in
[r
], reload_reg_rtx
[r
])
5678 || rtx_equal_p (reload_out
[r
], reload_reg_rtx
[r
])))
5681 #if 0 /* No longer needed for correct operation.
5682 It might give better code, or might not; worth an experiment? */
5683 /* If this is an optional reload, we can't inherit from earlier insns
5684 until we are sure that any non-optional reloads have been allocated.
5685 The following code takes advantage of the fact that optional reloads
5686 are at the end of reload_order. */
5687 if (reload_optional
[r
] != 0)
5688 for (i
= 0; i
< j
; i
++)
5689 if ((reload_out
[reload_order
[i
]] != 0
5690 || reload_in
[reload_order
[i
]] != 0
5691 || reload_secondary_p
[reload_order
[i
]])
5692 && ! reload_optional
[reload_order
[i
]]
5693 && reload_reg_rtx
[reload_order
[i
]] == 0)
5694 allocate_reload_reg (reload_order
[i
], insn
, 0, inheritance
);
5697 /* First see if this pseudo is already available as reloaded
5698 for a previous insn. We cannot try to inherit for reloads
5699 that are smaller than the maximum number of registers needed
5700 for groups unless the register we would allocate cannot be used
5703 We could check here to see if this is a secondary reload for
5704 an object that is already in a register of the desired class.
5705 This would avoid the need for the secondary reload register.
5706 But this is complex because we can't easily determine what
5707 objects might want to be loaded via this reload. So let a
5708 register be allocated here. In `emit_reload_insns' we suppress
5709 one of the loads in the case described above. */
5713 register int regno
= -1;
5714 enum machine_mode mode
;
5716 if (reload_in
[r
] == 0)
5718 else if (GET_CODE (reload_in
[r
]) == REG
)
5720 regno
= REGNO (reload_in
[r
]);
5721 mode
= GET_MODE (reload_in
[r
]);
5723 else if (GET_CODE (reload_in_reg
[r
]) == REG
)
5725 regno
= REGNO (reload_in_reg
[r
]);
5726 mode
= GET_MODE (reload_in_reg
[r
]);
5728 else if (GET_CODE (reload_in
[r
]) == MEM
)
5730 rtx prev
= prev_nonnote_insn (insn
), note
;
5732 if (prev
&& GET_CODE (prev
) == INSN
5733 && GET_CODE (PATTERN (prev
)) == USE
5734 && GET_CODE (XEXP (PATTERN (prev
), 0)) == REG
5735 && (REGNO (XEXP (PATTERN (prev
), 0))
5736 >= FIRST_PSEUDO_REGISTER
)
5737 && (note
= find_reg_note (prev
, REG_EQUAL
, NULL_RTX
))
5738 && GET_CODE (XEXP (note
, 0)) == MEM
)
5740 rtx addr
= XEXP (XEXP (note
, 0), 0);
5742 = (GET_MODE_SIZE (GET_MODE (addr
))
5743 - GET_MODE_SIZE (GET_MODE (reload_in
[r
])));
5745 && rtx_equal_p ((BYTES_BIG_ENDIAN
5746 ? plus_constant (addr
, size_diff
)
5748 XEXP (reload_in
[r
], 0)))
5750 regno
= REGNO (XEXP (PATTERN (prev
), 0));
5751 mode
= GET_MODE (reload_in
[r
]);
5756 /* This won't work, since REGNO can be a pseudo reg number.
5757 Also, it takes much more hair to keep track of all the things
5758 that can invalidate an inherited reload of part of a pseudoreg. */
5759 else if (GET_CODE (reload_in
[r
]) == SUBREG
5760 && GET_CODE (SUBREG_REG (reload_in
[r
])) == REG
)
5761 regno
= REGNO (SUBREG_REG (reload_in
[r
])) + SUBREG_WORD (reload_in
[r
]);
5764 if (regno
>= 0 && reg_last_reload_reg
[regno
] != 0)
5766 i
= REGNO (reg_last_reload_reg
[regno
]);
5768 if (reg_reloaded_contents
[i
] == regno
5769 && TEST_HARD_REG_BIT (reg_reloaded_valid
, i
)
5770 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg
[regno
]))
5771 >= GET_MODE_SIZE (mode
))
5772 && HARD_REGNO_MODE_OK (i
, reload_mode
[r
])
5773 && TEST_HARD_REG_BIT (reg_class_contents
[(int) reload_reg_class
[r
]],
5775 && (reload_nregs
[r
] == max_group_size
5776 || ! TEST_HARD_REG_BIT (reg_class_contents
[(int) group_class
],
5778 && ((reload_reg_free_p (i
, reload_opnum
[r
],
5779 reload_when_needed
[r
])
5780 && reload_reg_free_before_p (i
, reload_opnum
[r
],
5781 reload_when_needed
[r
],
5783 || reload_reg_free_for_value_p (i
, reload_opnum
[r
],
5784 reload_when_needed
[r
],
5788 /* If a group is needed, verify that all the subsequent
5789 registers still have their values intact. */
5791 = HARD_REGNO_NREGS (i
, reload_mode
[r
]);
5794 for (k
= 1; k
< nr
; k
++)
5795 if (reg_reloaded_contents
[i
+ k
] != regno
5796 || ! TEST_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
))
5803 /* We found a register that contains the
5804 value we need. If this register is the
5805 same as an `earlyclobber' operand of the
5806 current insn, just mark it as a place to
5807 reload from since we can't use it as the
5808 reload register itself. */
5810 for (i1
= 0; i1
< n_earlyclobbers
; i1
++)
5811 if (reg_overlap_mentioned_for_reload_p
5812 (reg_last_reload_reg
[regno
],
5813 reload_earlyclobbers
[i1
]))
5816 if (i1
!= n_earlyclobbers
5817 /* Don't use it if we'd clobber a pseudo reg. */
5818 || (spill_reg_order
[i
] < 0
5820 && ! TEST_HARD_REG_BIT (reg_reloaded_dead
, i
))
5821 /* Don't really use the inherited spill reg
5822 if we need it wider than we've got it. */
5823 || (GET_MODE_SIZE (reload_mode
[r
])
5824 > GET_MODE_SIZE (mode
)))
5825 reload_override_in
[r
] = reg_last_reload_reg
[regno
];
5829 /* We can use this as a reload reg. */
5830 /* Mark the register as in use for this part of
5832 mark_reload_reg_in_use (i
,
5834 reload_when_needed
[r
],
5836 reload_reg_rtx
[r
] = reg_last_reload_reg
[regno
];
5837 reload_inherited
[r
] = 1;
5838 reload_inheritance_insn
[r
]
5839 = reg_reloaded_insn
[i
];
5840 reload_spill_index
[r
] = i
;
5841 for (k
= 0; k
< nr
; k
++)
5842 SET_HARD_REG_BIT (reload_reg_used_for_inherit
,
5850 /* Here's another way to see if the value is already lying around. */
5852 && reload_in
[r
] != 0
5853 && ! reload_inherited
[r
]
5854 && reload_out
[r
] == 0
5855 && (CONSTANT_P (reload_in
[r
])
5856 || GET_CODE (reload_in
[r
]) == PLUS
5857 || GET_CODE (reload_in
[r
]) == REG
5858 || GET_CODE (reload_in
[r
]) == MEM
)
5859 && (reload_nregs
[r
] == max_group_size
5860 || ! reg_classes_intersect_p (reload_reg_class
[r
], group_class
)))
5863 = find_equiv_reg (reload_in
[r
], insn
, reload_reg_class
[r
],
5864 -1, NULL_PTR
, 0, reload_mode
[r
]);
5869 if (GET_CODE (equiv
) == REG
)
5870 regno
= REGNO (equiv
);
5871 else if (GET_CODE (equiv
) == SUBREG
)
5873 /* This must be a SUBREG of a hard register.
5874 Make a new REG since this might be used in an
5875 address and not all machines support SUBREGs
5877 regno
= REGNO (SUBREG_REG (equiv
)) + SUBREG_WORD (equiv
);
5878 equiv
= gen_rtx_REG (reload_mode
[r
], regno
);
5884 /* If we found a spill reg, reject it unless it is free
5885 and of the desired class. */
5887 && ((spill_reg_order
[regno
] >= 0
5888 && ! (reload_reg_free_before_p (regno
, reload_opnum
[r
],
5889 reload_when_needed
[r
], 1)
5890 || reload_reg_free_for_value_p (regno
,
5892 reload_when_needed
[r
],
5895 || ! TEST_HARD_REG_BIT (reg_class_contents
[(int) reload_reg_class
[r
]],
5899 if (equiv
!= 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all
, regno
))
5902 if (equiv
!= 0 && ! HARD_REGNO_MODE_OK (regno
, reload_mode
[r
]))
5905 /* We found a register that contains the value we need.
5906 If this register is the same as an `earlyclobber' operand
5907 of the current insn, just mark it as a place to reload from
5908 since we can't use it as the reload register itself. */
5911 for (i
= 0; i
< n_earlyclobbers
; i
++)
5912 if (reg_overlap_mentioned_for_reload_p (equiv
,
5913 reload_earlyclobbers
[i
]))
5915 reload_override_in
[r
] = equiv
;
5920 /* If the equiv register we have found is explicitly clobbered
5921 in the current insn, it depends on the reload type if we
5922 can use it, use it for reload_override_in, or not at all.
5923 In particular, we then can't use EQUIV for a
5924 RELOAD_FOR_OUTPUT_ADDRESS reload. */
5926 if (equiv
!= 0 && regno_clobbered_p (regno
, insn
))
5928 switch (reload_when_needed
[r
])
5930 case RELOAD_FOR_OTHER_ADDRESS
:
5931 case RELOAD_FOR_INPADDR_ADDRESS
:
5932 case RELOAD_FOR_INPUT_ADDRESS
:
5933 case RELOAD_FOR_OPADDR_ADDR
:
5936 case RELOAD_FOR_INPUT
:
5937 case RELOAD_FOR_OPERAND_ADDRESS
:
5938 reload_override_in
[r
] = equiv
;
5946 /* If we found an equivalent reg, say no code need be generated
5947 to load it, and use it as our reload reg. */
5948 if (equiv
!= 0 && regno
!= HARD_FRAME_POINTER_REGNUM
)
5950 int nr
= HARD_REGNO_NREGS (regno
, reload_mode
[r
]);
5952 reload_reg_rtx
[r
] = equiv
;
5953 reload_inherited
[r
] = 1;
5955 /* If reg_reloaded_valid is not set for this register,
5956 there might be a stale spill_reg_store lying around.
5957 We must clear it, since otherwise emit_reload_insns
5958 might delete the store. */
5959 if (! TEST_HARD_REG_BIT (reg_reloaded_valid
, regno
))
5960 spill_reg_store
[regno
] = NULL_RTX
;
5961 /* If any of the hard registers in EQUIV are spill
5962 registers, mark them as in use for this insn. */
5963 for (k
= 0; k
< nr
; k
++)
5965 i
= spill_reg_order
[regno
+ k
];
5968 mark_reload_reg_in_use (regno
, reload_opnum
[r
],
5969 reload_when_needed
[r
],
5971 SET_HARD_REG_BIT (reload_reg_used_for_inherit
,
5978 /* If we found a register to use already, or if this is an optional
5979 reload, we are done. */
5980 if (reload_reg_rtx
[r
] != 0 || reload_optional
[r
] != 0)
5983 #if 0 /* No longer needed for correct operation. Might or might not
5984 give better code on the average. Want to experiment? */
5986 /* See if there is a later reload that has a class different from our
5987 class that intersects our class or that requires less register
5988 than our reload. If so, we must allocate a register to this
5989 reload now, since that reload might inherit a previous reload
5990 and take the only available register in our class. Don't do this
5991 for optional reloads since they will force all previous reloads
5992 to be allocated. Also don't do this for reloads that have been
5995 for (i
= j
+ 1; i
< n_reloads
; i
++)
5997 int s
= reload_order
[i
];
5999 if ((reload_in
[s
] == 0 && reload_out
[s
] == 0
6000 && ! reload_secondary_p
[s
])
6001 || reload_optional
[s
])
6004 if ((reload_reg_class
[s
] != reload_reg_class
[r
]
6005 && reg_classes_intersect_p (reload_reg_class
[r
],
6006 reload_reg_class
[s
]))
6007 || reload_nregs
[s
] < reload_nregs
[r
])
6014 allocate_reload_reg (r
, insn
, j
== n_reloads
- 1, inheritance
);
6018 /* Now allocate reload registers for anything non-optional that
6019 didn't get one yet. */
6020 for (j
= 0; j
< n_reloads
; j
++)
6022 register int r
= reload_order
[j
];
6024 /* Ignore reloads that got marked inoperative. */
6025 if (reload_out
[r
] == 0 && reload_in
[r
] == 0 && ! reload_secondary_p
[r
])
6028 /* Skip reloads that already have a register allocated or are
6030 if (reload_reg_rtx
[r
] != 0 || reload_optional
[r
])
6033 if (! allocate_reload_reg (r
, insn
, j
== n_reloads
- 1, inheritance
))
6037 /* If that loop got all the way, we have won. */
6042 /* Loop around and try without any inheritance. */
6043 /* First undo everything done by the failed attempt
6044 to allocate with inheritance. */
6045 bcopy ((char *) save_reload_reg_rtx
, (char *) reload_reg_rtx
,
6046 sizeof reload_reg_rtx
);
6047 bcopy ((char *) save_reload_inherited
, (char *) reload_inherited
,
6048 sizeof reload_inherited
);
6049 bcopy ((char *) save_reload_inheritance_insn
,
6050 (char *) reload_inheritance_insn
,
6051 sizeof reload_inheritance_insn
);
6052 bcopy ((char *) save_reload_override_in
, (char *) reload_override_in
,
6053 sizeof reload_override_in
);
6054 bcopy ((char *) save_reload_spill_index
, (char *) reload_spill_index
,
6055 sizeof reload_spill_index
);
6056 COPY_HARD_REG_SET (reload_reg_used
, save_reload_reg_used
);
6057 COPY_HARD_REG_SET (reload_reg_used_at_all
, save_reload_reg_used_at_all
);
6058 COPY_HARD_REG_SET (reload_reg_used_in_op_addr
,
6059 save_reload_reg_used_in_op_addr
);
6060 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload
,
6061 save_reload_reg_used_in_op_addr_reload
);
6062 COPY_HARD_REG_SET (reload_reg_used_in_insn
,
6063 save_reload_reg_used_in_insn
);
6064 COPY_HARD_REG_SET (reload_reg_used_in_other_addr
,
6065 save_reload_reg_used_in_other_addr
);
6067 for (i
= 0; i
< reload_n_operands
; i
++)
6069 COPY_HARD_REG_SET (reload_reg_used_in_input
[i
],
6070 save_reload_reg_used_in_input
[i
]);
6071 COPY_HARD_REG_SET (reload_reg_used_in_output
[i
],
6072 save_reload_reg_used_in_output
[i
]);
6073 COPY_HARD_REG_SET (reload_reg_used_in_input_addr
[i
],
6074 save_reload_reg_used_in_input_addr
[i
]);
6075 COPY_HARD_REG_SET (reload_reg_used_in_inpaddr_addr
[i
],
6076 save_reload_reg_used_in_inpaddr_addr
[i
]);
6077 COPY_HARD_REG_SET (reload_reg_used_in_output_addr
[i
],
6078 save_reload_reg_used_in_output_addr
[i
]);
6079 COPY_HARD_REG_SET (reload_reg_used_in_outaddr_addr
[i
],
6080 save_reload_reg_used_in_outaddr_addr
[i
]);
6084 /* If we thought we could inherit a reload, because it seemed that
6085 nothing else wanted the same reload register earlier in the insn,
6086 verify that assumption, now that all reloads have been assigned. */
6088 for (j
= 0; j
< n_reloads
; j
++)
6090 register int r
= reload_order
[j
];
6092 if (reload_inherited
[r
] && reload_reg_rtx
[r
] != 0
6093 && ! (reload_reg_free_before_p (true_regnum (reload_reg_rtx
[r
]),
6095 reload_when_needed
[r
], 0)
6096 || reload_reg_free_for_value_p (true_regnum (reload_reg_rtx
[r
]),
6098 reload_when_needed
[r
],
6101 reload_inherited
[r
] = 0;
6102 /* If we can inherit a RELOAD_FOR_INPUT, then we do not need its related
6103 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads.
6104 ??? This could be extended to other reload types, but these are
6105 more tricky to handle:
6106 RELOAD_FOR_OTHER_ADDRESS reloads might have been merged, so we
6107 can't eliminate them without a check that *all* references are
6108 now unused due to inheritance.
6109 While RELOAD_FOR_INPADDR_ADDRESS and RELOAD_FOR_OUTADDR_ADDRESS are
6110 not merged, we can't be sure that we have eliminated the use of
6111 that particular reload if we have seen just one
6112 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_OUTPUT_ADDRESS being inherited,
6113 since there might be multiple of the latter two reloads for a single
6115 RELOAD_FOR_OPADDR_ADDR reloads for different operands are not
6116 merged, but might share the same register by courtesy of
6117 reload_reg_free_for_value_p. reload_reg_used_in_op_addr_reload
6118 does not differentiate by opnum, thus calling clear_reload_reg_in_use
6119 for one of these reloads would mark the register as free even though
6120 another RELOAD_FOR_OPADDR_ADDR reload might still use it. */
6121 else if (reload_inherited
[r
] && reload_when_needed
[r
] == RELOAD_FOR_INPUT
)
6123 for (i
= 0; i
< n_reloads
; i
++)
6125 if ((reload_when_needed
[i
] == RELOAD_FOR_INPUT_ADDRESS
6126 || reload_when_needed
[i
] == RELOAD_FOR_INPADDR_ADDRESS
)
6127 && reload_opnum
[i
] == reload_opnum
[r
]
6128 && reload_in
[i
] && reload_reg_rtx
[i
])
6130 int regno
= true_regnum (reload_reg_rtx
[i
]);
6133 if (spill_reg_order
[regno
] >= 0)
6134 clear_reload_reg_in_use (regno
, reload_opnum
[i
],
6135 reload_when_needed
[i
],
6137 reload_reg_rtx
[i
] = 0;
6138 reload_spill_index
[i
] = -1;
6139 remove_replacements (i
);
6144 /* If we found a better place to reload from,
6145 validate it in the same fashion, if it is a reload reg. */
6146 if (reload_override_in
[r
]
6147 && (GET_CODE (reload_override_in
[r
]) == REG
6148 || GET_CODE (reload_override_in
[r
]) == SUBREG
))
6150 int regno
= true_regnum (reload_override_in
[r
]);
6151 if (spill_reg_order
[regno
] >= 0
6152 && ! reload_reg_free_before_p (regno
, reload_opnum
[r
],
6153 reload_when_needed
[r
], 1))
6154 reload_override_in
[r
] = 0;
6158 /* Now that reload_override_in is known valid,
6159 actually override reload_in. */
6160 for (j
= 0; j
< n_reloads
; j
++)
6161 if (reload_override_in
[j
])
6162 reload_in
[j
] = reload_override_in
[j
];
6164 /* If this reload won't be done because it has been cancelled or is
6165 optional and not inherited, clear reload_reg_rtx so other
6166 routines (such as subst_reloads) don't get confused. */
6167 for (j
= 0; j
< n_reloads
; j
++)
6168 if (reload_reg_rtx
[j
] != 0
6169 && ((reload_optional
[j
] && ! reload_inherited
[j
])
6170 || (reload_in
[j
] == 0 && reload_out
[j
] == 0
6171 && ! reload_secondary_p
[j
])))
6173 int regno
= true_regnum (reload_reg_rtx
[j
]);
6175 if (spill_reg_order
[regno
] >= 0)
6176 clear_reload_reg_in_use (regno
, reload_opnum
[j
],
6177 reload_when_needed
[j
], reload_mode
[j
]);
6178 reload_reg_rtx
[j
] = 0;
6181 /* Record which pseudos and which spill regs have output reloads. */
6182 for (j
= 0; j
< n_reloads
; j
++)
6184 register int r
= reload_order
[j
];
6186 i
= reload_spill_index
[r
];
6188 /* I is nonneg if this reload uses a register.
6189 If reload_reg_rtx[r] is 0, this is an optional reload
6190 that we opted to ignore. */
6191 if (reload_out
[r
] != 0 && GET_CODE (reload_out
[r
]) == REG
6192 && reload_reg_rtx
[r
] != 0)
6194 register int nregno
= REGNO (reload_out
[r
]);
6197 if (nregno
< FIRST_PSEUDO_REGISTER
)
6198 nr
= HARD_REGNO_NREGS (nregno
, reload_mode
[r
]);
6201 reg_has_output_reload
[nregno
+ nr
] = 1;
6205 nr
= HARD_REGNO_NREGS (i
, reload_mode
[r
]);
6207 SET_HARD_REG_BIT (reg_is_output_reload
, i
+ nr
);
6210 if (reload_when_needed
[r
] != RELOAD_OTHER
6211 && reload_when_needed
[r
] != RELOAD_FOR_OUTPUT
6212 && reload_when_needed
[r
] != RELOAD_FOR_INSN
)
6218 /* If SMALL_REGISTER_CLASSES is non-zero, we may not have merged two
6219 reloads of the same item for fear that we might not have enough reload
6220 registers. However, normally they will get the same reload register
6221 and hence actually need not be loaded twice.
6223 Here we check for the most common case of this phenomenon: when we have
6224 a number of reloads for the same object, each of which were allocated
6225 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6226 reload, and is not modified in the insn itself. If we find such,
6227 merge all the reloads and set the resulting reload to RELOAD_OTHER.
6228 This will not increase the number of spill registers needed and will
6229 prevent redundant code. */
6232 merge_assigned_reloads (insn
)
6237 /* Scan all the reloads looking for ones that only load values and
6238 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6239 assigned and not modified by INSN. */
6241 for (i
= 0; i
< n_reloads
; i
++)
6243 int conflicting_input
= 0;
6244 int max_input_address_opnum
= -1;
6245 int min_conflicting_input_opnum
= MAX_RECOG_OPERANDS
;
6247 if (reload_in
[i
] == 0 || reload_when_needed
[i
] == RELOAD_OTHER
6248 || reload_out
[i
] != 0 || reload_reg_rtx
[i
] == 0
6249 || reg_set_p (reload_reg_rtx
[i
], insn
))
6252 /* Look at all other reloads. Ensure that the only use of this
6253 reload_reg_rtx is in a reload that just loads the same value
6254 as we do. Note that any secondary reloads must be of the identical
6255 class since the values, modes, and result registers are the
6256 same, so we need not do anything with any secondary reloads. */
6258 for (j
= 0; j
< n_reloads
; j
++)
6260 if (i
== j
|| reload_reg_rtx
[j
] == 0
6261 || ! reg_overlap_mentioned_p (reload_reg_rtx
[j
],
6265 if (reload_when_needed
[j
] == RELOAD_FOR_INPUT_ADDRESS
6266 && reload_opnum
[j
] > max_input_address_opnum
)
6267 max_input_address_opnum
= reload_opnum
[j
];
6269 /* If the reload regs aren't exactly the same (e.g, different modes)
6270 or if the values are different, we can't merge this reload.
6271 But if it is an input reload, we might still merge
6272 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
6274 if (! rtx_equal_p (reload_reg_rtx
[i
], reload_reg_rtx
[j
])
6275 || reload_out
[j
] != 0 || reload_in
[j
] == 0
6276 || ! rtx_equal_p (reload_in
[i
], reload_in
[j
]))
6278 if (reload_when_needed
[j
] != RELOAD_FOR_INPUT
6279 || ((reload_when_needed
[i
] != RELOAD_FOR_INPUT_ADDRESS
6280 || reload_opnum
[i
] > reload_opnum
[j
])
6281 && reload_when_needed
[i
] != RELOAD_FOR_OTHER_ADDRESS
))
6283 conflicting_input
= 1;
6284 if (min_conflicting_input_opnum
> reload_opnum
[j
])
6285 min_conflicting_input_opnum
= reload_opnum
[j
];
6289 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6290 we, in fact, found any matching reloads. */
6293 && max_input_address_opnum
<= min_conflicting_input_opnum
)
6295 for (j
= 0; j
< n_reloads
; j
++)
6296 if (i
!= j
&& reload_reg_rtx
[j
] != 0
6297 && rtx_equal_p (reload_reg_rtx
[i
], reload_reg_rtx
[j
])
6298 && (! conflicting_input
6299 || reload_when_needed
[j
] == RELOAD_FOR_INPUT_ADDRESS
6300 || reload_when_needed
[j
] == RELOAD_FOR_OTHER_ADDRESS
))
6302 reload_when_needed
[i
] = RELOAD_OTHER
;
6304 reload_spill_index
[j
] = -1;
6305 transfer_replacements (i
, j
);
6308 /* If this is now RELOAD_OTHER, look for any reloads that load
6309 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
6310 if they were for inputs, RELOAD_OTHER for outputs. Note that
6311 this test is equivalent to looking for reloads for this operand
6314 if (reload_when_needed
[i
] == RELOAD_OTHER
)
6315 for (j
= 0; j
< n_reloads
; j
++)
6316 if (reload_in
[j
] != 0
6317 && reload_when_needed
[i
] != RELOAD_OTHER
6318 && reg_overlap_mentioned_for_reload_p (reload_in
[j
],
6320 reload_when_needed
[j
]
6321 = ((reload_when_needed
[i
] == RELOAD_FOR_INPUT_ADDRESS
6322 || reload_when_needed
[i
] == RELOAD_FOR_INPADDR_ADDRESS
)
6323 ? RELOAD_FOR_OTHER_ADDRESS
: RELOAD_OTHER
);
6329 /* Output insns to reload values in and out of the chosen reload regs. */
6332 emit_reload_insns (insn
)
6336 rtx input_reload_insns
[MAX_RECOG_OPERANDS
];
6337 rtx other_input_address_reload_insns
= 0;
6338 rtx other_input_reload_insns
= 0;
6339 rtx input_address_reload_insns
[MAX_RECOG_OPERANDS
];
6340 rtx inpaddr_address_reload_insns
[MAX_RECOG_OPERANDS
];
6341 rtx output_reload_insns
[MAX_RECOG_OPERANDS
];
6342 rtx output_address_reload_insns
[MAX_RECOG_OPERANDS
];
6343 rtx outaddr_address_reload_insns
[MAX_RECOG_OPERANDS
];
6344 rtx operand_reload_insns
= 0;
6345 rtx other_operand_reload_insns
= 0;
6346 rtx other_output_reload_insns
[MAX_RECOG_OPERANDS
];
6347 rtx following_insn
= NEXT_INSN (insn
);
6348 rtx before_insn
= insn
;
6350 /* Values to be put in spill_reg_store are put here first. */
6351 rtx new_spill_reg_store
[FIRST_PSEUDO_REGISTER
];
6352 HARD_REG_SET reg_reloaded_died
;
6354 CLEAR_HARD_REG_SET (reg_reloaded_died
);
6356 for (j
= 0; j
< reload_n_operands
; j
++)
6357 input_reload_insns
[j
] = input_address_reload_insns
[j
]
6358 = inpaddr_address_reload_insns
[j
]
6359 = output_reload_insns
[j
] = output_address_reload_insns
[j
]
6360 = outaddr_address_reload_insns
[j
]
6361 = other_output_reload_insns
[j
] = 0;
6363 /* Now output the instructions to copy the data into and out of the
6364 reload registers. Do these in the order that the reloads were reported,
6365 since reloads of base and index registers precede reloads of operands
6366 and the operands may need the base and index registers reloaded. */
6368 for (j
= 0; j
< n_reloads
; j
++)
6371 rtx oldequiv_reg
= 0;
6372 rtx this_reload_insn
= 0;
6373 int expect_occurrences
= 1;
6375 if (reload_spill_index
[j
] >= 0)
6376 new_spill_reg_store
[reload_spill_index
[j
]] = 0;
6379 if (old
!= 0 && ! reload_inherited
[j
]
6380 && ! rtx_equal_p (reload_reg_rtx
[j
], old
)
6381 && reload_reg_rtx
[j
] != 0)
6383 register rtx reloadreg
= reload_reg_rtx
[j
];
6385 enum machine_mode mode
;
6388 /* Determine the mode to reload in.
6389 This is very tricky because we have three to choose from.
6390 There is the mode the insn operand wants (reload_inmode[J]).
6391 There is the mode of the reload register RELOADREG.
6392 There is the intrinsic mode of the operand, which we could find
6393 by stripping some SUBREGs.
6394 It turns out that RELOADREG's mode is irrelevant:
6395 we can change that arbitrarily.
6397 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6398 then the reload reg may not support QImode moves, so use SImode.
6399 If foo is in memory due to spilling a pseudo reg, this is safe,
6400 because the QImode value is in the least significant part of a
6401 slot big enough for a SImode. If foo is some other sort of
6402 memory reference, then it is impossible to reload this case,
6403 so previous passes had better make sure this never happens.
6405 Then consider a one-word union which has SImode and one of its
6406 members is a float, being fetched as (SUBREG:SF union:SI).
6407 We must fetch that as SFmode because we could be loading into
6408 a float-only register. In this case OLD's mode is correct.
6410 Consider an immediate integer: it has VOIDmode. Here we need
6411 to get a mode from something else.
6413 In some cases, there is a fourth mode, the operand's
6414 containing mode. If the insn specifies a containing mode for
6415 this operand, it overrides all others.
6417 I am not sure whether the algorithm here is always right,
6418 but it does the right things in those cases. */
6420 mode
= GET_MODE (old
);
6421 if (mode
== VOIDmode
)
6422 mode
= reload_inmode
[j
];
6424 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6425 /* If we need a secondary register for this operation, see if
6426 the value is already in a register in that class. Don't
6427 do this if the secondary register will be used as a scratch
6430 if (reload_secondary_in_reload
[j
] >= 0
6431 && reload_secondary_in_icode
[j
] == CODE_FOR_nothing
6434 = find_equiv_reg (old
, insn
,
6435 reload_reg_class
[reload_secondary_in_reload
[j
]],
6436 -1, NULL_PTR
, 0, mode
);
6439 /* If reloading from memory, see if there is a register
6440 that already holds the same value. If so, reload from there.
6441 We can pass 0 as the reload_reg_p argument because
6442 any other reload has either already been emitted,
6443 in which case find_equiv_reg will see the reload-insn,
6444 or has yet to be emitted, in which case it doesn't matter
6445 because we will use this equiv reg right away. */
6447 if (oldequiv
== 0 && optimize
6448 && (GET_CODE (old
) == MEM
6449 || (GET_CODE (old
) == REG
6450 && REGNO (old
) >= FIRST_PSEUDO_REGISTER
6451 && reg_renumber
[REGNO (old
)] < 0)))
6452 oldequiv
= find_equiv_reg (old
, insn
, ALL_REGS
,
6453 -1, NULL_PTR
, 0, mode
);
6457 int regno
= true_regnum (oldequiv
);
6459 /* If OLDEQUIV is a spill register, don't use it for this
6460 if any other reload needs it at an earlier stage of this insn
6461 or at this stage. */
6462 if (spill_reg_order
[regno
] >= 0
6463 && (! reload_reg_free_p (regno
, reload_opnum
[j
],
6464 reload_when_needed
[j
])
6465 || ! reload_reg_free_before_p (regno
, reload_opnum
[j
],
6466 reload_when_needed
[j
], 1)))
6469 /* If OLDEQUIV is not a spill register,
6470 don't use it if any other reload wants it. */
6471 if (spill_reg_order
[regno
] < 0)
6474 for (k
= 0; k
< n_reloads
; k
++)
6475 if (reload_reg_rtx
[k
] != 0 && k
!= j
6476 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx
[k
],
6484 /* If it is no cheaper to copy from OLDEQUIV into the
6485 reload register than it would be to move from memory,
6486 don't use it. Likewise, if we need a secondary register
6490 && ((REGNO_REG_CLASS (regno
) != reload_reg_class
[j
]
6491 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno
),
6492 reload_reg_class
[j
])
6493 >= MEMORY_MOVE_COST (mode
, reload_reg_class
[j
], 1)))
6494 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6495 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class
[j
],
6499 #ifdef SECONDARY_MEMORY_NEEDED
6500 || SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (regno
),
6501 reload_reg_class
[j
],
6510 else if (GET_CODE (oldequiv
) == REG
)
6511 oldequiv_reg
= oldequiv
;
6512 else if (GET_CODE (oldequiv
) == SUBREG
)
6513 oldequiv_reg
= SUBREG_REG (oldequiv
);
6515 /* If we are reloading from a register that was recently stored in
6516 with an output-reload, see if we can prove there was
6517 actually no need to store the old value in it. */
6519 if (optimize
&& GET_CODE (oldequiv
) == REG
6520 && REGNO (oldequiv
) < FIRST_PSEUDO_REGISTER
6521 && spill_reg_store
[REGNO (oldequiv
)]
6522 && GET_CODE (old
) == REG
&& dead_or_set_p (insn
, old
)
6523 /* This is unsafe if operand occurs more than once in current
6524 insn. Perhaps some occurrences weren't reloaded. */
6525 && count_occurrences (PATTERN (insn
), old
) == 1)
6526 delete_output_reload (insn
, j
, spill_reg_store
[REGNO (oldequiv
)]);
6528 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6529 then load RELOADREG from OLDEQUIV. Note that we cannot use
6530 gen_lowpart_common since it can do the wrong thing when
6531 RELOADREG has a multi-word mode. Note that RELOADREG
6532 must always be a REG here. */
6534 if (GET_MODE (reloadreg
) != mode
)
6535 reloadreg
= gen_rtx_REG (mode
, REGNO (reloadreg
));
6536 while (GET_CODE (oldequiv
) == SUBREG
&& GET_MODE (oldequiv
) != mode
)
6537 oldequiv
= SUBREG_REG (oldequiv
);
6538 if (GET_MODE (oldequiv
) != VOIDmode
6539 && mode
!= GET_MODE (oldequiv
))
6540 oldequiv
= gen_rtx_SUBREG (mode
, oldequiv
, 0);
6542 /* Switch to the right place to emit the reload insns. */
6543 switch (reload_when_needed
[j
])
6546 where
= &other_input_reload_insns
;
6548 case RELOAD_FOR_INPUT
:
6549 where
= &input_reload_insns
[reload_opnum
[j
]];
6551 case RELOAD_FOR_INPUT_ADDRESS
:
6552 where
= &input_address_reload_insns
[reload_opnum
[j
]];
6554 case RELOAD_FOR_INPADDR_ADDRESS
:
6555 where
= &inpaddr_address_reload_insns
[reload_opnum
[j
]];
6557 case RELOAD_FOR_OUTPUT_ADDRESS
:
6558 where
= &output_address_reload_insns
[reload_opnum
[j
]];
6560 case RELOAD_FOR_OUTADDR_ADDRESS
:
6561 where
= &outaddr_address_reload_insns
[reload_opnum
[j
]];
6563 case RELOAD_FOR_OPERAND_ADDRESS
:
6564 where
= &operand_reload_insns
;
6566 case RELOAD_FOR_OPADDR_ADDR
:
6567 where
= &other_operand_reload_insns
;
6569 case RELOAD_FOR_OTHER_ADDRESS
:
6570 where
= &other_input_address_reload_insns
;
6576 push_to_sequence (*where
);
6579 /* Auto-increment addresses must be reloaded in a special way. */
6580 if (GET_CODE (oldequiv
) == POST_INC
6581 || GET_CODE (oldequiv
) == POST_DEC
6582 || GET_CODE (oldequiv
) == PRE_INC
6583 || GET_CODE (oldequiv
) == PRE_DEC
)
6585 /* We are not going to bother supporting the case where a
6586 incremented register can't be copied directly from
6587 OLDEQUIV since this seems highly unlikely. */
6588 if (reload_secondary_in_reload
[j
] >= 0)
6590 /* Prevent normal processing of this reload. */
6592 /* Output a special code sequence for this case. */
6593 inc_for_reload (reloadreg
, oldequiv
, reload_inc
[j
]);
6596 /* If we are reloading a pseudo-register that was set by the previous
6597 insn, see if we can get rid of that pseudo-register entirely
6598 by redirecting the previous insn into our reload register. */
6600 else if (optimize
&& GET_CODE (old
) == REG
6601 && REGNO (old
) >= FIRST_PSEUDO_REGISTER
6602 && dead_or_set_p (insn
, old
)
6603 /* This is unsafe if some other reload
6604 uses the same reg first. */
6605 && reload_reg_free_before_p (REGNO (reloadreg
),
6607 reload_when_needed
[j
], 0))
6609 rtx temp
= PREV_INSN (insn
);
6610 while (temp
&& GET_CODE (temp
) == NOTE
)
6611 temp
= PREV_INSN (temp
);
6613 && GET_CODE (temp
) == INSN
6614 && GET_CODE (PATTERN (temp
)) == SET
6615 && SET_DEST (PATTERN (temp
)) == old
6616 /* Make sure we can access insn_operand_constraint. */
6617 && asm_noperands (PATTERN (temp
)) < 0
6618 /* This is unsafe if prev insn rejects our reload reg. */
6619 && constraint_accepts_reg_p (insn_operand_constraint
[recog_memoized (temp
)][0],
6621 /* This is unsafe if operand occurs more than once in current
6622 insn. Perhaps some occurrences aren't reloaded. */
6623 && count_occurrences (PATTERN (insn
), old
) == 1
6624 /* Don't risk splitting a matching pair of operands. */
6625 && ! reg_mentioned_p (old
, SET_SRC (PATTERN (temp
))))
6627 /* Store into the reload register instead of the pseudo. */
6628 SET_DEST (PATTERN (temp
)) = reloadreg
;
6629 /* If these are the only uses of the pseudo reg,
6630 pretend for GDB it lives in the reload reg we used. */
6631 if (REG_N_DEATHS (REGNO (old
)) == 1
6632 && REG_N_SETS (REGNO (old
)) == 1)
6634 reg_renumber
[REGNO (old
)] = REGNO (reload_reg_rtx
[j
]);
6635 alter_reg (REGNO (old
), -1);
6641 /* We can't do that, so output an insn to load RELOADREG. */
6645 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6646 rtx second_reload_reg
= 0;
6647 enum insn_code icode
;
6649 /* If we have a secondary reload, pick up the secondary register
6650 and icode, if any. If OLDEQUIV and OLD are different or
6651 if this is an in-out reload, recompute whether or not we
6652 still need a secondary register and what the icode should
6653 be. If we still need a secondary register and the class or
6654 icode is different, go back to reloading from OLD if using
6655 OLDEQUIV means that we got the wrong type of register. We
6656 cannot have different class or icode due to an in-out reload
6657 because we don't make such reloads when both the input and
6658 output need secondary reload registers. */
6660 if (reload_secondary_in_reload
[j
] >= 0)
6662 int secondary_reload
= reload_secondary_in_reload
[j
];
6663 rtx real_oldequiv
= oldequiv
;
6666 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6667 and similarly for OLD.
6668 See comments in get_secondary_reload in reload.c. */
6669 if (GET_CODE (oldequiv
) == REG
6670 && REGNO (oldequiv
) >= FIRST_PSEUDO_REGISTER
6671 && reg_equiv_mem
[REGNO (oldequiv
)] != 0)
6672 real_oldequiv
= reg_equiv_mem
[REGNO (oldequiv
)];
6674 if (GET_CODE (old
) == REG
6675 && REGNO (old
) >= FIRST_PSEUDO_REGISTER
6676 && reg_equiv_mem
[REGNO (old
)] != 0)
6677 real_old
= reg_equiv_mem
[REGNO (old
)];
6679 second_reload_reg
= reload_reg_rtx
[secondary_reload
];
6680 icode
= reload_secondary_in_icode
[j
];
6682 if ((old
!= oldequiv
&& ! rtx_equal_p (old
, oldequiv
))
6683 || (reload_in
[j
] != 0 && reload_out
[j
] != 0))
6685 enum reg_class new_class
6686 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class
[j
],
6687 mode
, real_oldequiv
);
6689 if (new_class
== NO_REGS
)
6690 second_reload_reg
= 0;
6693 enum insn_code new_icode
;
6694 enum machine_mode new_mode
;
6696 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) new_class
],
6697 REGNO (second_reload_reg
)))
6698 oldequiv
= old
, real_oldequiv
= real_old
;
6701 new_icode
= reload_in_optab
[(int) mode
];
6702 if (new_icode
!= CODE_FOR_nothing
6703 && ((insn_operand_predicate
[(int) new_icode
][0]
6704 && ! ((*insn_operand_predicate
[(int) new_icode
][0])
6706 || (insn_operand_predicate
[(int) new_icode
][1]
6707 && ! ((*insn_operand_predicate
[(int) new_icode
][1])
6708 (real_oldequiv
, mode
)))))
6709 new_icode
= CODE_FOR_nothing
;
6711 if (new_icode
== CODE_FOR_nothing
)
6714 new_mode
= insn_operand_mode
[(int) new_icode
][2];
6716 if (GET_MODE (second_reload_reg
) != new_mode
)
6718 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg
),
6720 oldequiv
= old
, real_oldequiv
= real_old
;
6723 = gen_rtx_REG (new_mode
,
6724 REGNO (second_reload_reg
));
6730 /* If we still need a secondary reload register, check
6731 to see if it is being used as a scratch or intermediate
6732 register and generate code appropriately. If we need
6733 a scratch register, use REAL_OLDEQUIV since the form of
6734 the insn may depend on the actual address if it is
6737 if (second_reload_reg
)
6739 if (icode
!= CODE_FOR_nothing
)
6741 emit_insn (GEN_FCN (icode
) (reloadreg
, real_oldequiv
,
6742 second_reload_reg
));
6747 /* See if we need a scratch register to load the
6748 intermediate register (a tertiary reload). */
6749 enum insn_code tertiary_icode
6750 = reload_secondary_in_icode
[secondary_reload
];
6752 if (tertiary_icode
!= CODE_FOR_nothing
)
6754 rtx third_reload_reg
6755 = reload_reg_rtx
[reload_secondary_in_reload
[secondary_reload
]];
6757 emit_insn ((GEN_FCN (tertiary_icode
)
6758 (second_reload_reg
, real_oldequiv
,
6759 third_reload_reg
)));
6762 gen_reload (second_reload_reg
, oldequiv
,
6764 reload_when_needed
[j
]);
6766 oldequiv
= second_reload_reg
;
6772 if (! special
&& ! rtx_equal_p (reloadreg
, oldequiv
))
6773 gen_reload (reloadreg
, oldequiv
, reload_opnum
[j
],
6774 reload_when_needed
[j
]);
6776 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6777 /* We may have to make a REG_DEAD note for the secondary reload
6778 register in the insns we just made. Find the last insn that
6779 mentioned the register. */
6780 if (! special
&& second_reload_reg
6781 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg
)))
6785 for (prev
= get_last_insn (); prev
;
6786 prev
= PREV_INSN (prev
))
6787 if (GET_RTX_CLASS (GET_CODE (prev
) == 'i')
6788 && reg_overlap_mentioned_for_reload_p (second_reload_reg
,
6791 REG_NOTES (prev
) = gen_rtx_EXPR_LIST (REG_DEAD
,
6800 this_reload_insn
= get_last_insn ();
6801 /* End this sequence. */
6802 *where
= get_insns ();
6806 /* When inheriting a wider reload, we have a MEM in reload_in[j],
6807 e.g. inheriting a SImode output reload for
6808 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
6809 if (optimize
&& reload_inherited
[j
] && reload_in
[j
]
6810 && GET_CODE (reload_in
[j
]) == MEM
6811 && reload_spill_index
[j
] >= 0
6812 && TEST_HARD_REG_BIT (reg_reloaded_valid
, reload_spill_index
[j
]))
6815 = count_occurrences (PATTERN (insn
), reload_in
[j
]) == 1 ? 0 : -1;
6817 = regno_reg_rtx
[reg_reloaded_contents
[reload_spill_index
[j
]]];
6819 /* Add a note saying the input reload reg
6820 dies in this insn, if anyone cares. */
6821 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6823 && reload_reg_rtx
[j
] != old
6824 && reload_reg_rtx
[j
] != 0
6825 && reload_out
[j
] == 0
6826 && ! reload_inherited
[j
]
6827 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx
[j
])))
6829 register rtx reloadreg
= reload_reg_rtx
[j
];
6832 /* We can't abort here because we need to support this for sched.c.
6833 It's not terrible to miss a REG_DEAD note, but we should try
6834 to figure out how to do this correctly. */
6835 /* The code below is incorrect for address-only reloads. */
6836 if (reload_when_needed
[j
] != RELOAD_OTHER
6837 && reload_when_needed
[j
] != RELOAD_FOR_INPUT
)
6841 /* Add a death note to this insn, for an input reload. */
6843 if ((reload_when_needed
[j
] == RELOAD_OTHER
6844 || reload_when_needed
[j
] == RELOAD_FOR_INPUT
)
6845 && ! dead_or_set_p (insn
, reloadreg
))
6847 = gen_rtx_EXPR_LIST (REG_DEAD
,
6848 reloadreg
, REG_NOTES (insn
));
6851 /* When we inherit a reload, the last marked death of the reload reg
6852 may no longer really be a death. */
6853 if (reload_reg_rtx
[j
] != 0
6854 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx
[j
]))
6855 && reload_inherited
[j
])
6857 /* Handle inheriting an output reload.
6858 Remove the death note from the output reload insn. */
6859 if (reload_spill_index
[j
] >= 0
6860 && GET_CODE (reload_in
[j
]) == REG
6861 && spill_reg_store
[reload_spill_index
[j
]] != 0
6862 && find_regno_note (spill_reg_store
[reload_spill_index
[j
]],
6863 REG_DEAD
, REGNO (reload_reg_rtx
[j
])))
6864 remove_death (REGNO (reload_reg_rtx
[j
]),
6865 spill_reg_store
[reload_spill_index
[j
]]);
6866 /* Likewise for input reloads that were inherited. */
6867 else if (reload_spill_index
[j
] >= 0
6868 && GET_CODE (reload_in
[j
]) == REG
6869 && spill_reg_store
[reload_spill_index
[j
]] == 0
6870 && reload_inheritance_insn
[j
] != 0
6871 && find_regno_note (reload_inheritance_insn
[j
], REG_DEAD
,
6872 REGNO (reload_reg_rtx
[j
])))
6873 remove_death (REGNO (reload_reg_rtx
[j
]),
6874 reload_inheritance_insn
[j
]);
6879 /* We got this register from find_equiv_reg.
6880 Search back for its last death note and get rid of it.
6881 But don't search back too far.
6882 Don't go past a place where this reg is set,
6883 since a death note before that remains valid. */
6884 for (prev
= PREV_INSN (insn
);
6885 prev
&& GET_CODE (prev
) != CODE_LABEL
;
6886 prev
= PREV_INSN (prev
))
6887 if (GET_RTX_CLASS (GET_CODE (prev
)) == 'i'
6888 && dead_or_set_p (prev
, reload_reg_rtx
[j
]))
6890 if (find_regno_note (prev
, REG_DEAD
,
6891 REGNO (reload_reg_rtx
[j
])))
6892 remove_death (REGNO (reload_reg_rtx
[j
]), prev
);
6898 /* We might have used find_equiv_reg above to choose an alternate
6899 place from which to reload. If so, and it died, we need to remove
6900 that death and move it to one of the insns we just made. */
6902 if (oldequiv_reg
!= 0
6903 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg
)))
6907 for (prev
= PREV_INSN (insn
); prev
&& GET_CODE (prev
) != CODE_LABEL
;
6908 prev
= PREV_INSN (prev
))
6909 if (GET_RTX_CLASS (GET_CODE (prev
)) == 'i'
6910 && dead_or_set_p (prev
, oldequiv_reg
))
6912 if (find_regno_note (prev
, REG_DEAD
, REGNO (oldequiv_reg
)))
6914 for (prev1
= this_reload_insn
;
6915 prev1
; prev1
= PREV_INSN (prev1
))
6916 if (GET_RTX_CLASS (GET_CODE (prev1
) == 'i')
6917 && reg_overlap_mentioned_for_reload_p (oldequiv_reg
,
6920 REG_NOTES (prev1
) = gen_rtx_EXPR_LIST (REG_DEAD
,
6925 remove_death (REGNO (oldequiv_reg
), prev
);
6932 /* If we are reloading a register that was recently stored in with an
6933 output-reload, see if we can prove there was
6934 actually no need to store the old value in it. */
6936 if (optimize
&& reload_inherited
[j
] && reload_spill_index
[j
] >= 0
6937 && reload_in
[j
] != 0
6938 && GET_CODE (reload_in
[j
]) == REG
6940 /* There doesn't seem to be any reason to restrict this to pseudos
6941 and doing so loses in the case where we are copying from a
6942 register of the wrong class. */
6943 && REGNO (reload_in
[j
]) >= FIRST_PSEUDO_REGISTER
6945 && spill_reg_store
[reload_spill_index
[j
]] != 0
6946 /* This is unsafe if some other reload uses the same reg first. */
6947 && reload_reg_free_before_p (reload_spill_index
[j
],
6948 reload_opnum
[j
], reload_when_needed
[j
],
6950 && dead_or_set_p (insn
, reload_in
[j
])
6951 /* This is unsafe if operand occurs more than once in current
6952 insn. Perhaps some occurrences weren't reloaded. */
6953 && (count_occurrences (PATTERN (insn
), reload_in
[j
])
6954 == expect_occurrences
))
6955 delete_output_reload (insn
, j
,
6956 spill_reg_store
[reload_spill_index
[j
]]);
6958 /* Input-reloading is done. Now do output-reloading,
6959 storing the value from the reload-register after the main insn
6960 if reload_out[j] is nonzero.
6962 ??? At some point we need to support handling output reloads of
6963 JUMP_INSNs or insns that set cc0. */
6964 old
= reload_out
[j
];
6966 && reload_reg_rtx
[j
] != old
6967 && reload_reg_rtx
[j
] != 0)
6969 register rtx reloadreg
= reload_reg_rtx
[j
];
6970 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6971 register rtx second_reloadreg
= 0;
6974 enum machine_mode mode
;
6977 /* An output operand that dies right away does need a reload,
6978 but need not be copied from it. Show the new location in the
6980 if ((GET_CODE (old
) == REG
|| GET_CODE (old
) == SCRATCH
)
6981 && (note
= find_reg_note (insn
, REG_UNUSED
, old
)) != 0)
6983 XEXP (note
, 0) = reload_reg_rtx
[j
];
6986 /* Likewise for a SUBREG of an operand that dies. */
6987 else if (GET_CODE (old
) == SUBREG
6988 && GET_CODE (SUBREG_REG (old
)) == REG
6989 && 0 != (note
= find_reg_note (insn
, REG_UNUSED
,
6992 XEXP (note
, 0) = gen_lowpart_common (GET_MODE (old
),
6996 else if (GET_CODE (old
) == SCRATCH
)
6997 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6998 but we don't want to make an output reload. */
7002 /* Strip off of OLD any size-increasing SUBREGs such as
7003 (SUBREG:SI foo:QI 0). */
7005 while (GET_CODE (old
) == SUBREG
&& SUBREG_WORD (old
) == 0
7006 && (GET_MODE_SIZE (GET_MODE (old
))
7007 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old
)))))
7008 old
= SUBREG_REG (old
);
7011 /* If is a JUMP_INSN, we can't support output reloads yet. */
7012 if (GET_CODE (insn
) == JUMP_INSN
)
7015 if (reload_when_needed
[j
] == RELOAD_OTHER
)
7018 push_to_sequence (output_reload_insns
[reload_opnum
[j
]]);
7020 /* Determine the mode to reload in.
7021 See comments above (for input reloading). */
7023 mode
= GET_MODE (old
);
7024 if (mode
== VOIDmode
)
7026 /* VOIDmode should never happen for an output. */
7027 if (asm_noperands (PATTERN (insn
)) < 0)
7028 /* It's the compiler's fault. */
7029 fatal_insn ("VOIDmode on an output", insn
);
7030 error_for_asm (insn
, "output operand is constant in `asm'");
7031 /* Prevent crash--use something we know is valid. */
7033 old
= gen_rtx_REG (mode
, REGNO (reloadreg
));
7036 if (GET_MODE (reloadreg
) != mode
)
7037 reloadreg
= gen_rtx_REG (mode
, REGNO (reloadreg
));
7039 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
7041 /* If we need two reload regs, set RELOADREG to the intermediate
7042 one, since it will be stored into OLD. We might need a secondary
7043 register only for an input reload, so check again here. */
7045 if (reload_secondary_out_reload
[j
] >= 0)
7049 if (GET_CODE (old
) == REG
&& REGNO (old
) >= FIRST_PSEUDO_REGISTER
7050 && reg_equiv_mem
[REGNO (old
)] != 0)
7051 real_old
= reg_equiv_mem
[REGNO (old
)];
7053 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class
[j
],
7057 second_reloadreg
= reloadreg
;
7058 reloadreg
= reload_reg_rtx
[reload_secondary_out_reload
[j
]];
7060 /* See if RELOADREG is to be used as a scratch register
7061 or as an intermediate register. */
7062 if (reload_secondary_out_icode
[j
] != CODE_FOR_nothing
)
7064 emit_insn ((GEN_FCN (reload_secondary_out_icode
[j
])
7065 (real_old
, second_reloadreg
, reloadreg
)));
7070 /* See if we need both a scratch and intermediate reload
7073 int secondary_reload
= reload_secondary_out_reload
[j
];
7074 enum insn_code tertiary_icode
7075 = reload_secondary_out_icode
[secondary_reload
];
7077 if (GET_MODE (reloadreg
) != mode
)
7078 reloadreg
= gen_rtx_REG (mode
, REGNO (reloadreg
));
7080 if (tertiary_icode
!= CODE_FOR_nothing
)
7083 = reload_reg_rtx
[reload_secondary_out_reload
[secondary_reload
]];
7086 /* Copy primary reload reg to secondary reload reg.
7087 (Note that these have been swapped above, then
7088 secondary reload reg to OLD using our insn. */
7090 /* If REAL_OLD is a paradoxical SUBREG, remove it
7091 and try to put the opposite SUBREG on
7093 if (GET_CODE (real_old
) == SUBREG
7094 && (GET_MODE_SIZE (GET_MODE (real_old
))
7095 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old
))))
7096 && 0 != (tem
= gen_lowpart_common
7097 (GET_MODE (SUBREG_REG (real_old
)),
7099 real_old
= SUBREG_REG (real_old
), reloadreg
= tem
;
7101 gen_reload (reloadreg
, second_reloadreg
,
7102 reload_opnum
[j
], reload_when_needed
[j
]);
7103 emit_insn ((GEN_FCN (tertiary_icode
)
7104 (real_old
, reloadreg
, third_reloadreg
)));
7109 /* Copy between the reload regs here and then to
7112 gen_reload (reloadreg
, second_reloadreg
,
7113 reload_opnum
[j
], reload_when_needed
[j
]);
7119 /* Output the last reload insn. */
7124 /* Don't output the last reload if OLD is not the dest of
7125 INSN and is in the src and is clobbered by INSN. */
7126 if (! flag_expensive_optimizations
7127 || GET_CODE (old
) != REG
7128 || !(set
= single_set (insn
))
7129 || rtx_equal_p (old
, SET_DEST (set
))
7130 || !reg_mentioned_p (old
, SET_SRC (set
))
7131 || !regno_clobbered_p (REGNO (old
), insn
))
7132 gen_reload (old
, reloadreg
, reload_opnum
[j
],
7133 reload_when_needed
[j
]);
7136 #ifdef PRESERVE_DEATH_INFO_REGNO_P
7137 /* If final will look at death notes for this reg,
7138 put one on the last output-reload insn to use it. Similarly
7139 for any secondary register. */
7140 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg
)))
7141 for (p
= get_last_insn (); p
; p
= PREV_INSN (p
))
7142 if (GET_RTX_CLASS (GET_CODE (p
)) == 'i'
7143 && reg_overlap_mentioned_for_reload_p (reloadreg
,
7145 REG_NOTES (p
) = gen_rtx_EXPR_LIST (REG_DEAD
,
7146 reloadreg
, REG_NOTES (p
));
7148 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
7149 if (! special
&& second_reloadreg
7150 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg
)))
7151 for (p
= get_last_insn (); p
; p
= PREV_INSN (p
))
7152 if (GET_RTX_CLASS (GET_CODE (p
)) == 'i'
7153 && reg_overlap_mentioned_for_reload_p (second_reloadreg
,
7155 REG_NOTES (p
) = gen_rtx_EXPR_LIST (REG_DEAD
,
7160 /* Look at all insns we emitted, just to be safe. */
7161 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
7162 if (GET_RTX_CLASS (GET_CODE (p
)) == 'i')
7164 rtx pat
= PATTERN (p
);
7166 /* If this output reload doesn't come from a spill reg,
7167 clear any memory of reloaded copies of the pseudo reg.
7168 If this output reload comes from a spill reg,
7169 reg_has_output_reload will make this do nothing. */
7170 note_stores (pat
, forget_old_reloads_1
);
7172 if (reg_mentioned_p (reload_reg_rtx
[j
], pat
))
7174 if (reload_spill_index
[j
] < 0
7175 && GET_CODE (pat
) == SET
7176 && SET_SRC (pat
) == reload_reg_rtx
[j
])
7178 int src
= REGNO (SET_SRC (pat
));
7180 reload_spill_index
[j
] = src
;
7181 SET_HARD_REG_BIT (reg_is_output_reload
, src
);
7182 if (find_regno_note (insn
, REG_DEAD
, src
))
7183 SET_HARD_REG_BIT (reg_reloaded_died
, src
);
7185 if (reload_spill_index
[j
] >= 0)
7187 int s
= reload_secondary_out_reload
[j
];
7188 rtx set
= single_set (p
);
7189 /* If this reload copies only to the secondary reload
7190 register, the secondary reload does the actual
7192 if (s
>= 0 && set
== NULL_RTX
)
7193 ; /* We can't tell what function the secondary reload
7194 has and where the actual store to the pseudo is
7195 made; leave new_spill_reg_store alone. */
7197 && SET_SRC (set
) == reload_reg_rtx
[j
]
7198 && SET_DEST (set
) == reload_reg_rtx
[s
])
7200 /* Usually the next instruction will be the
7201 secondary reload insn; if we can confirm
7202 that it is, setting new_spill_reg_store to
7203 that insn will allow an extra optimization. */
7204 rtx s_reg
= reload_reg_rtx
[s
];
7205 rtx next
= NEXT_INSN (p
);
7206 reload_out
[s
] = reload_out
[j
];
7207 set
= single_set (next
);
7208 if (set
&& SET_SRC (set
) == s_reg
7209 && ! new_spill_reg_store
[REGNO (s_reg
)])
7210 new_spill_reg_store
[REGNO (s_reg
)] = next
;
7213 new_spill_reg_store
[reload_spill_index
[j
]] = p
;
7218 if (reload_when_needed
[j
] == RELOAD_OTHER
)
7220 emit_insns (other_output_reload_insns
[reload_opnum
[j
]]);
7221 other_output_reload_insns
[reload_opnum
[j
]] = get_insns ();
7224 output_reload_insns
[reload_opnum
[j
]] = get_insns ();
7230 /* Now write all the insns we made for reloads in the order expected by
7231 the allocation functions. Prior to the insn being reloaded, we write
7232 the following reloads:
7234 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7236 RELOAD_OTHER reloads.
7238 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7239 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7240 RELOAD_FOR_INPUT reload for the operand.
7242 RELOAD_FOR_OPADDR_ADDRS reloads.
7244 RELOAD_FOR_OPERAND_ADDRESS reloads.
7246 After the insn being reloaded, we write the following:
7248 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7249 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7250 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7251 reloads for the operand. The RELOAD_OTHER output reloads are
7252 output in descending order by reload number. */
7254 emit_insns_before (other_input_address_reload_insns
, before_insn
);
7255 emit_insns_before (other_input_reload_insns
, before_insn
);
7257 for (j
= 0; j
< reload_n_operands
; j
++)
7259 emit_insns_before (inpaddr_address_reload_insns
[j
], before_insn
);
7260 emit_insns_before (input_address_reload_insns
[j
], before_insn
);
7261 emit_insns_before (input_reload_insns
[j
], before_insn
);
7264 emit_insns_before (other_operand_reload_insns
, before_insn
);
7265 emit_insns_before (operand_reload_insns
, before_insn
);
7267 for (j
= 0; j
< reload_n_operands
; j
++)
7269 emit_insns_before (outaddr_address_reload_insns
[j
], following_insn
);
7270 emit_insns_before (output_address_reload_insns
[j
], following_insn
);
7271 emit_insns_before (output_reload_insns
[j
], following_insn
);
7272 emit_insns_before (other_output_reload_insns
[j
], following_insn
);
7275 /* Move death notes from INSN
7276 to output-operand-address and output reload insns. */
7277 #ifdef PRESERVE_DEATH_INFO_REGNO_P
7280 /* Loop over those insns, last ones first. */
7281 for (insn1
= PREV_INSN (following_insn
); insn1
!= insn
;
7282 insn1
= PREV_INSN (insn1
))
7283 if (GET_CODE (insn1
) == INSN
&& GET_CODE (PATTERN (insn1
)) == SET
)
7285 rtx source
= SET_SRC (PATTERN (insn1
));
7286 rtx dest
= SET_DEST (PATTERN (insn1
));
7288 /* The note we will examine next. */
7289 rtx reg_notes
= REG_NOTES (insn
);
7290 /* The place that pointed to this note. */
7291 rtx
*prev_reg_note
= ®_NOTES (insn
);
7293 /* If the note is for something used in the source of this
7294 reload insn, or in the output address, move the note. */
7297 rtx next_reg_notes
= XEXP (reg_notes
, 1);
7298 if (REG_NOTE_KIND (reg_notes
) == REG_DEAD
7299 && GET_CODE (XEXP (reg_notes
, 0)) == REG
7300 && ((GET_CODE (dest
) != REG
7301 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes
, 0),
7303 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes
, 0),
7306 *prev_reg_note
= next_reg_notes
;
7307 XEXP (reg_notes
, 1) = REG_NOTES (insn1
);
7308 REG_NOTES (insn1
) = reg_notes
;
7311 prev_reg_note
= &XEXP (reg_notes
, 1);
7313 reg_notes
= next_reg_notes
;
7319 /* For all the spill regs newly reloaded in this instruction,
7320 record what they were reloaded from, so subsequent instructions
7321 can inherit the reloads.
7323 Update spill_reg_store for the reloads of this insn.
7324 Copy the elements that were updated in the loop above. */
7326 for (j
= 0; j
< n_reloads
; j
++)
7328 register int r
= reload_order
[j
];
7329 register int i
= reload_spill_index
[r
];
7331 /* I is nonneg if this reload used a register.
7332 If reload_reg_rtx[r] is 0, this is an optional reload
7333 that we opted to ignore. */
7335 if (i
>= 0 && reload_reg_rtx
[r
] != 0)
7338 = HARD_REGNO_NREGS (i
, GET_MODE (reload_reg_rtx
[r
]));
7340 int part_reaches_end
= 0;
7341 int all_reaches_end
= 1;
7343 /* For a multi register reload, we need to check if all or part
7344 of the value lives to the end. */
7345 for (k
= 0; k
< nr
; k
++)
7347 if (reload_reg_reaches_end_p (i
+ k
, reload_opnum
[r
],
7348 reload_when_needed
[r
]))
7349 part_reaches_end
= 1;
7351 all_reaches_end
= 0;
7354 /* Ignore reloads that don't reach the end of the insn in
7356 if (all_reaches_end
)
7358 /* First, clear out memory of what used to be in this spill reg.
7359 If consecutive registers are used, clear them all. */
7361 for (k
= 0; k
< nr
; k
++)
7362 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
);
7364 /* Maybe the spill reg contains a copy of reload_out. */
7365 if (reload_out
[r
] != 0 && GET_CODE (reload_out
[r
]) == REG
)
7367 register int nregno
= REGNO (reload_out
[r
]);
7368 int nnr
= (nregno
>= FIRST_PSEUDO_REGISTER
? 1
7369 : HARD_REGNO_NREGS (nregno
,
7370 GET_MODE (reload_reg_rtx
[r
])));
7372 spill_reg_store
[i
] = new_spill_reg_store
[i
];
7373 reg_last_reload_reg
[nregno
] = reload_reg_rtx
[r
];
7375 /* If NREGNO is a hard register, it may occupy more than
7376 one register. If it does, say what is in the
7377 rest of the registers assuming that both registers
7378 agree on how many words the object takes. If not,
7379 invalidate the subsequent registers. */
7381 if (nregno
< FIRST_PSEUDO_REGISTER
)
7382 for (k
= 1; k
< nnr
; k
++)
7383 reg_last_reload_reg
[nregno
+ k
]
7385 ? gen_rtx_REG (reg_raw_mode
[REGNO (reload_reg_rtx
[r
]) + k
],
7386 REGNO (reload_reg_rtx
[r
]) + k
)
7389 /* Now do the inverse operation. */
7390 for (k
= 0; k
< nr
; k
++)
7392 CLEAR_HARD_REG_BIT (reg_reloaded_dead
, i
+ k
);
7393 reg_reloaded_contents
[i
+ k
]
7394 = (nregno
>= FIRST_PSEUDO_REGISTER
|| nr
!= nnr
7397 reg_reloaded_insn
[i
+ k
] = insn
;
7398 SET_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
);
7402 /* Maybe the spill reg contains a copy of reload_in. Only do
7403 something if there will not be an output reload for
7404 the register being reloaded. */
7405 else if (reload_out
[r
] == 0
7406 && reload_in
[r
] != 0
7407 && spill_reg_order
[i
] >= 0
7408 && ((GET_CODE (reload_in
[r
]) == REG
7409 && ! reg_has_output_reload
[REGNO (reload_in
[r
])])
7410 || (GET_CODE (reload_in_reg
[r
]) == REG
7411 && ! reg_has_output_reload
[REGNO (reload_in_reg
[r
])])))
7413 register int nregno
;
7416 if (GET_CODE (reload_in
[r
]) == REG
)
7417 nregno
= REGNO (reload_in
[r
]);
7419 nregno
= REGNO (reload_in_reg
[r
]);
7421 nnr
= (nregno
>= FIRST_PSEUDO_REGISTER
? 1
7422 : HARD_REGNO_NREGS (nregno
,
7423 GET_MODE (reload_reg_rtx
[r
])));
7425 reg_last_reload_reg
[nregno
] = reload_reg_rtx
[r
];
7427 if (nregno
< FIRST_PSEUDO_REGISTER
)
7428 for (k
= 1; k
< nnr
; k
++)
7429 reg_last_reload_reg
[nregno
+ k
]
7431 ? gen_rtx_REG (reg_raw_mode
[REGNO (reload_reg_rtx
[r
]) + k
],
7432 REGNO (reload_reg_rtx
[r
]) + k
)
7435 /* Unless we inherited this reload, show we haven't
7436 recently done a store. */
7437 if (! reload_inherited
[r
])
7438 spill_reg_store
[i
] = 0;
7440 for (k
= 0; k
< nr
; k
++)
7442 CLEAR_HARD_REG_BIT (reg_reloaded_dead
, i
+ k
);
7443 reg_reloaded_contents
[i
+ k
]
7444 = (nregno
>= FIRST_PSEUDO_REGISTER
|| nr
!= nnr
7447 reg_reloaded_insn
[i
+ k
] = insn
;
7448 SET_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
);
7453 /* However, if part of the reload reaches the end, then we must
7454 invalidate the old info for the part that survives to the end. */
7455 else if (part_reaches_end
)
7457 for (k
= 0; k
< nr
; k
++)
7458 if (reload_reg_reaches_end_p (i
+ k
,
7460 reload_when_needed
[r
]))
7461 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
);
7465 /* The following if-statement was #if 0'd in 1.34 (or before...).
7466 It's reenabled in 1.35 because supposedly nothing else
7467 deals with this problem. */
7469 /* If a register gets output-reloaded from a non-spill register,
7470 that invalidates any previous reloaded copy of it.
7471 But forget_old_reloads_1 won't get to see it, because
7472 it thinks only about the original insn. So invalidate it here. */
7473 if (i
< 0 && reload_out
[r
] != 0 && GET_CODE (reload_out
[r
]) == REG
)
7475 register int nregno
= REGNO (reload_out
[r
]);
7476 if (nregno
>= FIRST_PSEUDO_REGISTER
)
7477 reg_last_reload_reg
[nregno
] = 0;
7480 int num_regs
= HARD_REGNO_NREGS (nregno
,GET_MODE (reload_out
[r
]));
7482 while (num_regs
-- > 0)
7483 reg_last_reload_reg
[nregno
+ num_regs
] = 0;
7487 IOR_HARD_REG_SET (reg_reloaded_dead
, reg_reloaded_died
);
7490 /* Emit code to perform a reload from IN (which may be a reload register) to
7491 OUT (which may also be a reload register). IN or OUT is from operand
7492 OPNUM with reload type TYPE.
7494 Returns first insn emitted. */
7497 gen_reload (out
, in
, opnum
, type
)
7501 enum reload_type type
;
7503 rtx last
= get_last_insn ();
7506 /* If IN is a paradoxical SUBREG, remove it and try to put the
7507 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7508 if (GET_CODE (in
) == SUBREG
7509 && (GET_MODE_SIZE (GET_MODE (in
))
7510 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
))))
7511 && (tem
= gen_lowpart_common (GET_MODE (SUBREG_REG (in
)), out
)) != 0)
7512 in
= SUBREG_REG (in
), out
= tem
;
7513 else if (GET_CODE (out
) == SUBREG
7514 && (GET_MODE_SIZE (GET_MODE (out
))
7515 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out
))))
7516 && (tem
= gen_lowpart_common (GET_MODE (SUBREG_REG (out
)), in
)) != 0)
7517 out
= SUBREG_REG (out
), in
= tem
;
7519 /* How to do this reload can get quite tricky. Normally, we are being
7520 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7521 register that didn't get a hard register. In that case we can just
7522 call emit_move_insn.
7524 We can also be asked to reload a PLUS that adds a register or a MEM to
7525 another register, constant or MEM. This can occur during frame pointer
7526 elimination and while reloading addresses. This case is handled by
7527 trying to emit a single insn to perform the add. If it is not valid,
7528 we use a two insn sequence.
7530 Finally, we could be called to handle an 'o' constraint by putting
7531 an address into a register. In that case, we first try to do this
7532 with a named pattern of "reload_load_address". If no such pattern
7533 exists, we just emit a SET insn and hope for the best (it will normally
7534 be valid on machines that use 'o').
7536 This entire process is made complex because reload will never
7537 process the insns we generate here and so we must ensure that
7538 they will fit their constraints and also by the fact that parts of
7539 IN might be being reloaded separately and replaced with spill registers.
7540 Because of this, we are, in some sense, just guessing the right approach
7541 here. The one listed above seems to work.
7543 ??? At some point, this whole thing needs to be rethought. */
7545 if (GET_CODE (in
) == PLUS
7546 && (GET_CODE (XEXP (in
, 0)) == REG
7547 || GET_CODE (XEXP (in
, 0)) == SUBREG
7548 || GET_CODE (XEXP (in
, 0)) == MEM
)
7549 && (GET_CODE (XEXP (in
, 1)) == REG
7550 || GET_CODE (XEXP (in
, 1)) == SUBREG
7551 || CONSTANT_P (XEXP (in
, 1))
7552 || GET_CODE (XEXP (in
, 1)) == MEM
))
7554 /* We need to compute the sum of a register or a MEM and another
7555 register, constant, or MEM, and put it into the reload
7556 register. The best possible way of doing this is if the machine
7557 has a three-operand ADD insn that accepts the required operands.
7559 The simplest approach is to try to generate such an insn and see if it
7560 is recognized and matches its constraints. If so, it can be used.
7562 It might be better not to actually emit the insn unless it is valid,
7563 but we need to pass the insn as an operand to `recog' and
7564 `insn_extract' and it is simpler to emit and then delete the insn if
7565 not valid than to dummy things up. */
7567 rtx op0
, op1
, tem
, insn
;
7570 op0
= find_replacement (&XEXP (in
, 0));
7571 op1
= find_replacement (&XEXP (in
, 1));
7573 /* Since constraint checking is strict, commutativity won't be
7574 checked, so we need to do that here to avoid spurious failure
7575 if the add instruction is two-address and the second operand
7576 of the add is the same as the reload reg, which is frequently
7577 the case. If the insn would be A = B + A, rearrange it so
7578 it will be A = A + B as constrain_operands expects. */
7580 if (GET_CODE (XEXP (in
, 1)) == REG
7581 && REGNO (out
) == REGNO (XEXP (in
, 1)))
7582 tem
= op0
, op0
= op1
, op1
= tem
;
7584 if (op0
!= XEXP (in
, 0) || op1
!= XEXP (in
, 1))
7585 in
= gen_rtx_PLUS (GET_MODE (in
), op0
, op1
);
7587 insn
= emit_insn (gen_rtx_SET (VOIDmode
, out
, in
));
7588 code
= recog_memoized (insn
);
7592 insn_extract (insn
);
7593 /* We want constrain operands to treat this insn strictly in
7594 its validity determination, i.e., the way it would after reload
7596 if (constrain_operands (code
, 1))
7600 delete_insns_since (last
);
7602 /* If that failed, we must use a conservative two-insn sequence.
7603 use move to copy constant, MEM, or pseudo register to the reload
7604 register since "move" will be able to handle an arbitrary operand,
7605 unlike add which can't, in general. Then add the registers.
7607 If there is another way to do this for a specific machine, a
7608 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7611 if (CONSTANT_P (op1
) || GET_CODE (op1
) == MEM
|| GET_CODE (op1
) == SUBREG
7612 || (GET_CODE (op1
) == REG
7613 && REGNO (op1
) >= FIRST_PSEUDO_REGISTER
))
7614 tem
= op0
, op0
= op1
, op1
= tem
;
7616 gen_reload (out
, op0
, opnum
, type
);
7618 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7619 This fixes a problem on the 32K where the stack pointer cannot
7620 be used as an operand of an add insn. */
7622 if (rtx_equal_p (op0
, op1
))
7625 insn
= emit_insn (gen_add2_insn (out
, op1
));
7627 /* If that failed, copy the address register to the reload register.
7628 Then add the constant to the reload register. */
7630 code
= recog_memoized (insn
);
7634 insn_extract (insn
);
7635 /* We want constrain operands to treat this insn strictly in
7636 its validity determination, i.e., the way it would after reload
7638 if (constrain_operands (code
, 1))
7640 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
7642 = gen_rtx_EXPR_LIST (REG_EQUIV
, in
, REG_NOTES (insn
));
7647 delete_insns_since (last
);
7649 gen_reload (out
, op1
, opnum
, type
);
7650 insn
= emit_insn (gen_add2_insn (out
, op0
));
7651 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_EQUIV
, in
, REG_NOTES (insn
));
7654 #ifdef SECONDARY_MEMORY_NEEDED
7655 /* If we need a memory location to do the move, do it that way. */
7656 else if (GET_CODE (in
) == REG
&& REGNO (in
) < FIRST_PSEUDO_REGISTER
7657 && GET_CODE (out
) == REG
&& REGNO (out
) < FIRST_PSEUDO_REGISTER
7658 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in
)),
7659 REGNO_REG_CLASS (REGNO (out
)),
7662 /* Get the memory to use and rewrite both registers to its mode. */
7663 rtx loc
= get_secondary_mem (in
, GET_MODE (out
), opnum
, type
);
7665 if (GET_MODE (loc
) != GET_MODE (out
))
7666 out
= gen_rtx_REG (GET_MODE (loc
), REGNO (out
));
7668 if (GET_MODE (loc
) != GET_MODE (in
))
7669 in
= gen_rtx_REG (GET_MODE (loc
), REGNO (in
));
7671 gen_reload (loc
, in
, opnum
, type
);
7672 gen_reload (out
, loc
, opnum
, type
);
7676 /* If IN is a simple operand, use gen_move_insn. */
7677 else if (GET_RTX_CLASS (GET_CODE (in
)) == 'o' || GET_CODE (in
) == SUBREG
)
7678 emit_insn (gen_move_insn (out
, in
));
7680 #ifdef HAVE_reload_load_address
7681 else if (HAVE_reload_load_address
)
7682 emit_insn (gen_reload_load_address (out
, in
));
7685 /* Otherwise, just write (set OUT IN) and hope for the best. */
7687 emit_insn (gen_rtx_SET (VOIDmode
, out
, in
));
7689 /* Return the first insn emitted.
7690 We can not just return get_last_insn, because there may have
7691 been multiple instructions emitted. Also note that gen_move_insn may
7692 emit more than one insn itself, so we can not assume that there is one
7693 insn emitted per emit_insn_before call. */
7695 return last
? NEXT_INSN (last
) : get_insns ();
7698 /* Delete a previously made output-reload
7699 whose result we now believe is not needed.
7700 First we double-check.
7702 INSN is the insn now being processed.
7703 OUTPUT_RELOAD_INSN is the insn of the output reload.
7704 J is the reload-number for this insn. */
7707 delete_output_reload (insn
, j
, output_reload_insn
)
7710 rtx output_reload_insn
;
7714 /* Get the raw pseudo-register referred to. */
7716 rtx reg
= reload_in
[j
];
7717 while (GET_CODE (reg
) == SUBREG
)
7718 reg
= SUBREG_REG (reg
);
7720 /* If the pseudo-reg we are reloading is no longer referenced
7721 anywhere between the store into it and here,
7722 and no jumps or labels intervene, then the value can get
7723 here through the reload reg alone.
7724 Otherwise, give up--return. */
7725 for (i1
= NEXT_INSN (output_reload_insn
);
7726 i1
!= insn
; i1
= NEXT_INSN (i1
))
7728 if (GET_CODE (i1
) == CODE_LABEL
|| GET_CODE (i1
) == JUMP_INSN
)
7730 if ((GET_CODE (i1
) == INSN
|| GET_CODE (i1
) == CALL_INSN
)
7731 && reg_mentioned_p (reg
, PATTERN (i1
)))
7733 /* If this is just a single USE with an REG_EQUAL note in front
7734 of INSN, this is no problem, because this mentions just the
7735 address that we are using here.
7736 But if there is more than one such USE, the insn might use
7737 the operand directly, or another reload might do that.
7738 This is analogous to the count_occurences check in the callers. */
7739 int num_occurences
= 0;
7741 while (GET_CODE (i1
) == INSN
&& GET_CODE (PATTERN (i1
)) == USE
7742 && find_reg_note (i1
, REG_EQUAL
, NULL_RTX
))
7744 num_occurences
+= rtx_equal_p (reg
, XEXP (PATTERN (i1
), 0)) != 0;
7745 i1
= NEXT_INSN (i1
);
7747 if (num_occurences
== 1 && i1
== insn
)
7753 /* The caller has already checked that REG dies or is set in INSN.
7754 It has also checked that we are optimizing, and thus some inaccurancies
7755 in the debugging information are acceptable.
7756 So we could just delete output_reload_insn.
7757 But in some cases we can improve the debugging information without
7758 sacrificing optimization - maybe even improving the code:
7759 See if the pseudo reg has been completely replaced
7760 with reload regs. If so, delete the store insn
7761 and forget we had a stack slot for the pseudo. */
7762 if (reload_out
[j
] != reload_in
[j
]
7763 && REG_N_DEATHS (REGNO (reg
)) == 1
7764 && REG_BASIC_BLOCK (REGNO (reg
)) >= 0
7765 && find_regno_note (insn
, REG_DEAD
, REGNO (reg
)))
7769 /* We know that it was used only between here
7770 and the beginning of the current basic block.
7771 (We also know that the last use before INSN was
7772 the output reload we are thinking of deleting, but never mind that.)
7773 Search that range; see if any ref remains. */
7774 for (i2
= PREV_INSN (insn
); i2
; i2
= PREV_INSN (i2
))
7776 rtx set
= single_set (i2
);
7778 /* Uses which just store in the pseudo don't count,
7779 since if they are the only uses, they are dead. */
7780 if (set
!= 0 && SET_DEST (set
) == reg
)
7782 if (GET_CODE (i2
) == CODE_LABEL
7783 || GET_CODE (i2
) == JUMP_INSN
)
7785 if ((GET_CODE (i2
) == INSN
|| GET_CODE (i2
) == CALL_INSN
)
7786 && reg_mentioned_p (reg
, PATTERN (i2
)))
7788 /* Some other ref remains; just delete the output reload we
7790 delete_insn (output_reload_insn
);
7795 /* Delete the now-dead stores into this pseudo. */
7796 for (i2
= PREV_INSN (insn
); i2
; i2
= PREV_INSN (i2
))
7798 rtx set
= single_set (i2
);
7800 if (set
!= 0 && SET_DEST (set
) == reg
)
7802 /* This might be a basic block head,
7803 thus don't use delete_insn. */
7804 PUT_CODE (i2
, NOTE
);
7805 NOTE_SOURCE_FILE (i2
) = 0;
7806 NOTE_LINE_NUMBER (i2
) = NOTE_INSN_DELETED
;
7808 if (GET_CODE (i2
) == CODE_LABEL
7809 || GET_CODE (i2
) == JUMP_INSN
)
7813 /* For the debugging info,
7814 say the pseudo lives in this reload reg. */
7815 reg_renumber
[REGNO (reg
)] = REGNO (reload_reg_rtx
[j
]);
7816 alter_reg (REGNO (reg
), -1);
7818 delete_insn (output_reload_insn
);
7822 /* Output reload-insns to reload VALUE into RELOADREG.
7823 VALUE is an autoincrement or autodecrement RTX whose operand
7824 is a register or memory location;
7825 so reloading involves incrementing that location.
7827 INC_AMOUNT is the number to increment or decrement by (always positive).
7828 This cannot be deduced from VALUE. */
7831 inc_for_reload (reloadreg
, value
, inc_amount
)
7836 /* REG or MEM to be copied and incremented. */
7837 rtx incloc
= XEXP (value
, 0);
7838 /* Nonzero if increment after copying. */
7839 int post
= (GET_CODE (value
) == POST_DEC
|| GET_CODE (value
) == POST_INC
);
7845 /* No hard register is equivalent to this register after
7846 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7847 we could inc/dec that register as well (maybe even using it for
7848 the source), but I'm not sure it's worth worrying about. */
7849 if (GET_CODE (incloc
) == REG
)
7850 reg_last_reload_reg
[REGNO (incloc
)] = 0;
7852 if (GET_CODE (value
) == PRE_DEC
|| GET_CODE (value
) == POST_DEC
)
7853 inc_amount
= - inc_amount
;
7855 inc
= GEN_INT (inc_amount
);
7857 /* If this is post-increment, first copy the location to the reload reg. */
7859 emit_insn (gen_move_insn (reloadreg
, incloc
));
7861 /* See if we can directly increment INCLOC. Use a method similar to that
7864 last
= get_last_insn ();
7865 add_insn
= emit_insn (gen_rtx_SET (VOIDmode
, incloc
,
7866 gen_rtx_PLUS (GET_MODE (incloc
),
7869 code
= recog_memoized (add_insn
);
7872 insn_extract (add_insn
);
7873 if (constrain_operands (code
, 1))
7875 /* If this is a pre-increment and we have incremented the value
7876 where it lives, copy the incremented value to RELOADREG to
7877 be used as an address. */
7880 emit_insn (gen_move_insn (reloadreg
, incloc
));
7886 delete_insns_since (last
);
7888 /* If couldn't do the increment directly, must increment in RELOADREG.
7889 The way we do this depends on whether this is pre- or post-increment.
7890 For pre-increment, copy INCLOC to the reload register, increment it
7891 there, then save back. */
7895 emit_insn (gen_move_insn (reloadreg
, incloc
));
7896 emit_insn (gen_add2_insn (reloadreg
, inc
));
7897 emit_insn (gen_move_insn (incloc
, reloadreg
));
7902 Because this might be a jump insn or a compare, and because RELOADREG
7903 may not be available after the insn in an input reload, we must do
7904 the incrementation before the insn being reloaded for.
7906 We have already copied INCLOC to RELOADREG. Increment the copy in
7907 RELOADREG, save that back, then decrement RELOADREG so it has
7908 the original value. */
7910 emit_insn (gen_add2_insn (reloadreg
, inc
));
7911 emit_insn (gen_move_insn (incloc
, reloadreg
));
7912 emit_insn (gen_add2_insn (reloadreg
, GEN_INT (-inc_amount
)));
7918 /* Return 1 if we are certain that the constraint-string STRING allows
7919 the hard register REG. Return 0 if we can't be sure of this. */
7922 constraint_accepts_reg_p (string
, reg
)
7927 int regno
= true_regnum (reg
);
7930 /* Initialize for first alternative. */
7932 /* Check that each alternative contains `g' or `r'. */
7934 switch (c
= *string
++)
7937 /* If an alternative lacks `g' or `r', we lose. */
7940 /* If an alternative lacks `g' or `r', we lose. */
7943 /* Initialize for next alternative. */
7948 /* Any general reg wins for this alternative. */
7949 if (TEST_HARD_REG_BIT (reg_class_contents
[(int) GENERAL_REGS
], regno
))
7953 /* Any reg in specified class wins for this alternative. */
7955 enum reg_class
class = REG_CLASS_FROM_LETTER (c
);
7957 if (TEST_HARD_REG_BIT (reg_class_contents
[(int) class], regno
))
7963 /* Return the number of places FIND appears within X, but don't count
7964 an occurrence if some SET_DEST is FIND. */
7967 count_occurrences (x
, find
)
7968 register rtx x
, find
;
7971 register enum rtx_code code
;
7972 register char *format_ptr
;
7980 code
= GET_CODE (x
);
7995 if (SET_DEST (x
) == find
)
7996 return count_occurrences (SET_SRC (x
), find
);
8003 format_ptr
= GET_RTX_FORMAT (code
);
8006 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
8008 switch (*format_ptr
++)
8011 count
+= count_occurrences (XEXP (x
, i
), find
);
8015 if (XVEC (x
, i
) != NULL
)
8017 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
8018 count
+= count_occurrences (XVECEXP (x
, i
, j
), find
);
8026 /* This array holds values which are equivalent to a hard register
8027 during reload_cse_regs. Each array element is an EXPR_LIST of
8028 values. Each time a hard register is set, we set the corresponding
8029 array element to the value. Each time a hard register is copied
8030 into memory, we add the memory location to the corresponding array
8031 element. We don't store values or memory addresses with side
8032 effects in this array.
8034 If the value is a CONST_INT, then the mode of the containing
8035 EXPR_LIST is the mode in which that CONST_INT was referenced.
8037 We sometimes clobber a specific entry in a list. In that case, we
8038 just set XEXP (list-entry, 0) to 0. */
8040 static rtx
*reg_values
;
8042 /* This is a preallocated REG rtx which we use as a temporary in
8043 reload_cse_invalidate_regno, so that we don't need to allocate a
8044 new one each time through a loop in that function. */
8046 static rtx invalidate_regno_rtx
;
8048 /* This is a set of registers for which we must remove REG_DEAD notes in
8049 previous insns, because our modifications made them invalid. That can
8050 happen if we introduced the register into the current insn, or we deleted
8051 the current insn which used to set the register. */
8053 static HARD_REG_SET no_longer_dead_regs
;
8055 /* Invalidate any entries in reg_values which depend on REGNO,
8056 including those for REGNO itself. This is called if REGNO is
8057 changing. If CLOBBER is true, then always forget anything we
8058 currently know about REGNO. MODE is the mode of the assignment to
8059 REGNO, which is used to determine how many hard registers are being
8060 changed. If MODE is VOIDmode, then only REGNO is being changed;
8061 this is used when invalidating call clobbered registers across a
8065 reload_cse_invalidate_regno (regno
, mode
, clobber
)
8067 enum machine_mode mode
;
8073 /* Our callers don't always go through true_regnum; we may see a
8074 pseudo-register here from a CLOBBER or the like. We probably
8075 won't ever see a pseudo-register that has a real register number,
8076 for we check anyhow for safety. */
8077 if (regno
>= FIRST_PSEUDO_REGISTER
)
8078 regno
= reg_renumber
[regno
];
8082 if (mode
== VOIDmode
)
8083 endregno
= regno
+ 1;
8085 endregno
= regno
+ HARD_REGNO_NREGS (regno
, mode
);
8088 for (i
= regno
; i
< endregno
; i
++)
8091 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
8095 for (x
= reg_values
[i
]; x
; x
= XEXP (x
, 1))
8097 if (XEXP (x
, 0) != 0
8098 && refers_to_regno_p (regno
, endregno
, XEXP (x
, 0), NULL_PTR
))
8100 /* If this is the only entry on the list, clear
8101 reg_values[i]. Otherwise, just clear this entry on
8103 if (XEXP (x
, 1) == 0 && x
== reg_values
[i
])
8113 /* We must look at earlier registers, in case REGNO is part of a
8114 multi word value but is not the first register. If an earlier
8115 register has a value in a mode which overlaps REGNO, then we must
8116 invalidate that earlier register. Note that we do not need to
8117 check REGNO or later registers (we must not check REGNO itself,
8118 because we would incorrectly conclude that there was a conflict). */
8120 for (i
= 0; i
< regno
; i
++)
8124 for (x
= reg_values
[i
]; x
; x
= XEXP (x
, 1))
8126 if (XEXP (x
, 0) != 0)
8128 PUT_MODE (invalidate_regno_rtx
, GET_MODE (x
));
8129 REGNO (invalidate_regno_rtx
) = i
;
8130 if (refers_to_regno_p (regno
, endregno
, invalidate_regno_rtx
,
8133 reload_cse_invalidate_regno (i
, VOIDmode
, 1);
8141 /* The memory at address MEM_BASE is being changed.
8142 Return whether this change will invalidate VAL. */
8145 reload_cse_mem_conflict_p (mem_base
, val
)
8153 code
= GET_CODE (val
);
8156 /* Get rid of a few simple cases quickly. */
8169 if (GET_MODE (mem_base
) == BLKmode
8170 || GET_MODE (val
) == BLKmode
)
8172 if (anti_dependence (val
, mem_base
))
8174 /* The address may contain nested MEMs. */
8181 fmt
= GET_RTX_FORMAT (code
);
8183 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
8187 if (reload_cse_mem_conflict_p (mem_base
, XEXP (val
, i
)))
8190 else if (fmt
[i
] == 'E')
8194 for (j
= 0; j
< XVECLEN (val
, i
); j
++)
8195 if (reload_cse_mem_conflict_p (mem_base
, XVECEXP (val
, i
, j
)))
8203 /* Invalidate any entries in reg_values which are changed because of a
8204 store to MEM_RTX. If this is called because of a non-const call
8205 instruction, MEM_RTX is (mem:BLK const0_rtx). */
8208 reload_cse_invalidate_mem (mem_rtx
)
8213 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
8217 for (x
= reg_values
[i
]; x
; x
= XEXP (x
, 1))
8219 if (XEXP (x
, 0) != 0
8220 && reload_cse_mem_conflict_p (mem_rtx
, XEXP (x
, 0)))
8222 /* If this is the only entry on the list, clear
8223 reg_values[i]. Otherwise, just clear this entry on
8225 if (XEXP (x
, 1) == 0 && x
== reg_values
[i
])
8236 /* Invalidate DEST, which is being assigned to or clobbered. The
8237 second parameter exists so that this function can be passed to
8238 note_stores; it is ignored. */
8241 reload_cse_invalidate_rtx (dest
, ignore
)
8243 rtx ignore ATTRIBUTE_UNUSED
;
8245 while (GET_CODE (dest
) == STRICT_LOW_PART
8246 || GET_CODE (dest
) == SIGN_EXTRACT
8247 || GET_CODE (dest
) == ZERO_EXTRACT
8248 || GET_CODE (dest
) == SUBREG
)
8249 dest
= XEXP (dest
, 0);
8251 if (GET_CODE (dest
) == REG
)
8252 reload_cse_invalidate_regno (REGNO (dest
), GET_MODE (dest
), 1);
8253 else if (GET_CODE (dest
) == MEM
)
8254 reload_cse_invalidate_mem (dest
);
8257 /* Possibly delete death notes on the insns before INSN if modifying INSN
8258 extended the lifespan of the registers. */
8261 reload_cse_delete_death_notes (insn
)
8266 for (dreg
= 0; dreg
< FIRST_PSEUDO_REGISTER
; dreg
++)
8270 if (! TEST_HARD_REG_BIT (no_longer_dead_regs
, dreg
))
8273 for (trial
= prev_nonnote_insn (insn
);
8275 && GET_CODE (trial
) != CODE_LABEL
8276 && GET_CODE (trial
) != BARRIER
);
8277 trial
= prev_nonnote_insn (trial
))
8279 if (find_regno_note (trial
, REG_DEAD
, dreg
))
8281 remove_death (dreg
, trial
);
8288 /* Record that the current insn uses hard reg REGNO in mode MODE. This
8289 will be used in reload_cse_delete_death_notes to delete prior REG_DEAD
8290 notes for this register. */
8293 reload_cse_no_longer_dead (regno
, mode
)
8295 enum machine_mode mode
;
8297 int nregs
= HARD_REGNO_NREGS (regno
, mode
);
8300 SET_HARD_REG_BIT (no_longer_dead_regs
, regno
);
8306 /* Do a very simple CSE pass over the hard registers.
8308 This function detects no-op moves where we happened to assign two
8309 different pseudo-registers to the same hard register, and then
8310 copied one to the other. Reload will generate a useless
8311 instruction copying a register to itself.
8313 This function also detects cases where we load a value from memory
8314 into two different registers, and (if memory is more expensive than
8315 registers) changes it to simply copy the first register into the
8318 Another optimization is performed that scans the operands of each
8319 instruction to see whether the value is already available in a
8320 hard register. It then replaces the operand with the hard register
8321 if possible, much like an optional reload would. */
8324 reload_cse_regs (first
)
8332 init_alias_analysis ();
8334 reg_values
= (rtx
*) alloca (FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
8335 bzero ((char *)reg_values
, FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
8337 /* Create our EXPR_LIST structures on reload_obstack, so that we can
8338 free them when we are done. */
8339 push_obstacks (&reload_obstack
, &reload_obstack
);
8340 firstobj
= (char *) obstack_alloc (&reload_obstack
, 0);
8342 /* We pass this to reload_cse_invalidate_mem to invalidate all of
8343 memory for a non-const call instruction. */
8344 callmem
= gen_rtx_MEM (BLKmode
, const0_rtx
);
8346 /* This is used in reload_cse_invalidate_regno to avoid consing a
8347 new REG in a loop in that function. */
8348 invalidate_regno_rtx
= gen_rtx_REG (VOIDmode
, 0);
8350 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
8354 if (GET_CODE (insn
) == CODE_LABEL
)
8356 /* Forget all the register values at a code label. We don't
8357 try to do anything clever around jumps. */
8358 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
8364 #ifdef NON_SAVING_SETJMP
8365 if (NON_SAVING_SETJMP
&& GET_CODE (insn
) == NOTE
8366 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_SETJMP
)
8368 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
8375 if (GET_RTX_CLASS (GET_CODE (insn
)) != 'i')
8378 CLEAR_HARD_REG_SET (no_longer_dead_regs
);
8380 /* If this is a call instruction, forget anything stored in a
8381 call clobbered register, or, if this is not a const call, in
8383 if (GET_CODE (insn
) == CALL_INSN
)
8385 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
8386 if (call_used_regs
[i
])
8387 reload_cse_invalidate_regno (i
, VOIDmode
, 1);
8389 if (! CONST_CALL_P (insn
))
8390 reload_cse_invalidate_mem (callmem
);
8393 body
= PATTERN (insn
);
8394 if (GET_CODE (body
) == SET
)
8397 if (reload_cse_noop_set_p (body
, insn
))
8399 /* If this sets the return value of the function, we must keep
8400 a USE around, in case this is in a different basic block
8401 than the final USE. Otherwise, we could loose important
8402 register lifeness information on SMALL_REGISTER_CLASSES
8403 machines, where return registers might be used as spills:
8404 subsequent passes assume that spill registers are dead at
8405 the end of a basic block. */
8406 if (REG_FUNCTION_VALUE_P (SET_DEST (body
)))
8409 PATTERN (insn
) = gen_rtx_USE (VOIDmode
, SET_DEST (body
));
8410 INSN_CODE (insn
) = -1;
8411 REG_NOTES (insn
) = NULL_RTX
;
8412 push_obstacks (&reload_obstack
, &reload_obstack
);
8416 PUT_CODE (insn
, NOTE
);
8417 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
8418 NOTE_SOURCE_FILE (insn
) = 0;
8420 reload_cse_delete_death_notes (insn
);
8422 /* We're done with this insn. */
8426 /* It's not a no-op, but we can try to simplify it. */
8427 CLEAR_HARD_REG_SET (no_longer_dead_regs
);
8428 count
+= reload_cse_simplify_set (body
, insn
);
8430 if (count
> 0 && apply_change_group ())
8431 reload_cse_delete_death_notes (insn
);
8432 else if (reload_cse_simplify_operands (insn
))
8433 reload_cse_delete_death_notes (insn
);
8435 reload_cse_record_set (body
, body
);
8437 else if (GET_CODE (body
) == PARALLEL
)
8440 rtx value
= NULL_RTX
;
8442 /* If every action in a PARALLEL is a noop, we can delete
8443 the entire PARALLEL. */
8444 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
8446 rtx part
= XVECEXP (body
, 0, i
);
8447 if (GET_CODE (part
) == SET
)
8449 if (! reload_cse_noop_set_p (part
, insn
))
8451 if (REG_FUNCTION_VALUE_P (SET_DEST (part
)))
8455 value
= SET_DEST (part
);
8458 else if (GET_CODE (part
) != CLOBBER
)
8466 PATTERN (insn
) = gen_rtx_USE (VOIDmode
, value
);
8467 INSN_CODE (insn
) = -1;
8468 REG_NOTES (insn
) = NULL_RTX
;
8469 push_obstacks (&reload_obstack
, &reload_obstack
);
8473 PUT_CODE (insn
, NOTE
);
8474 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
8475 NOTE_SOURCE_FILE (insn
) = 0;
8477 reload_cse_delete_death_notes (insn
);
8479 /* We're done with this insn. */
8483 /* It's not a no-op, but we can try to simplify it. */
8484 CLEAR_HARD_REG_SET (no_longer_dead_regs
);
8485 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
8486 if (GET_CODE (XVECEXP (body
, 0, i
)) == SET
)
8487 count
+= reload_cse_simplify_set (XVECEXP (body
, 0, i
), insn
);
8489 if (count
> 0 && apply_change_group ())
8490 reload_cse_delete_death_notes (insn
);
8491 else if (reload_cse_simplify_operands (insn
))
8492 reload_cse_delete_death_notes (insn
);
8494 /* Look through the PARALLEL and record the values being
8495 set, if possible. Also handle any CLOBBERs. */
8496 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
8498 rtx x
= XVECEXP (body
, 0, i
);
8500 if (GET_CODE (x
) == SET
)
8501 reload_cse_record_set (x
, body
);
8503 note_stores (x
, reload_cse_invalidate_rtx
);
8507 note_stores (body
, reload_cse_invalidate_rtx
);
8510 /* Clobber any registers which appear in REG_INC notes. We
8511 could keep track of the changes to their values, but it is
8512 unlikely to help. */
8516 for (x
= REG_NOTES (insn
); x
; x
= XEXP (x
, 1))
8517 if (REG_NOTE_KIND (x
) == REG_INC
)
8518 reload_cse_invalidate_rtx (XEXP (x
, 0), NULL_RTX
);
8522 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
8523 after we have processed the insn. */
8524 if (GET_CODE (insn
) == CALL_INSN
)
8528 for (x
= CALL_INSN_FUNCTION_USAGE (insn
); x
; x
= XEXP (x
, 1))
8529 if (GET_CODE (XEXP (x
, 0)) == CLOBBER
)
8530 reload_cse_invalidate_rtx (XEXP (XEXP (x
, 0), 0), NULL_RTX
);
8534 /* Free all the temporary structures we created, and go back to the
8535 regular obstacks. */
8536 obstack_free (&reload_obstack
, firstobj
);
8540 /* Return whether the values known for REGNO are equal to VAL. MODE
8541 is the mode of the object that VAL is being copied to; this matters
8542 if VAL is a CONST_INT. */
8545 reload_cse_regno_equal_p (regno
, val
, mode
)
8548 enum machine_mode mode
;
8555 for (x
= reg_values
[regno
]; x
; x
= XEXP (x
, 1))
8556 if (XEXP (x
, 0) != 0
8557 && rtx_equal_p (XEXP (x
, 0), val
)
8558 && (! flag_float_store
|| GET_CODE (XEXP (x
, 0)) != MEM
8559 || GET_MODE_CLASS (GET_MODE (x
)) != MODE_FLOAT
)
8560 && (GET_CODE (val
) != CONST_INT
8561 || mode
== GET_MODE (x
)
8562 || (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (GET_MODE (x
))
8563 /* On a big endian machine if the value spans more than
8564 one register then this register holds the high part of
8565 it and we can't use it.
8567 ??? We should also compare with the high part of the
8569 && !(WORDS_BIG_ENDIAN
8570 && HARD_REGNO_NREGS (regno
, GET_MODE (x
)) > 1)
8571 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
8572 GET_MODE_BITSIZE (GET_MODE (x
))))))
8578 /* See whether a single set is a noop. SET is the set instruction we
8579 are should check, and INSN is the instruction from which it came. */
8582 reload_cse_noop_set_p (set
, insn
)
8587 enum machine_mode dest_mode
;
8591 src
= SET_SRC (set
);
8592 dest
= SET_DEST (set
);
8593 dest_mode
= GET_MODE (dest
);
8595 if (side_effects_p (src
))
8598 dreg
= true_regnum (dest
);
8599 sreg
= true_regnum (src
);
8601 /* Check for setting a register to itself. In this case, we don't
8602 have to worry about REG_DEAD notes. */
8603 if (dreg
>= 0 && dreg
== sreg
)
8609 /* Check for setting a register to itself. */
8613 /* Check for setting a register to a value which we already know
8614 is in the register. */
8615 else if (reload_cse_regno_equal_p (dreg
, src
, dest_mode
))
8618 /* Check for setting a register DREG to another register SREG
8619 where SREG is equal to a value which is already in DREG. */
8624 for (x
= reg_values
[sreg
]; x
; x
= XEXP (x
, 1))
8628 if (XEXP (x
, 0) == 0)
8631 if (dest_mode
== GET_MODE (x
))
8633 else if (GET_MODE_BITSIZE (dest_mode
)
8634 < GET_MODE_BITSIZE (GET_MODE (x
)))
8635 tmp
= gen_lowpart_common (dest_mode
, XEXP (x
, 0));
8640 && reload_cse_regno_equal_p (dreg
, tmp
, dest_mode
))
8648 else if (GET_CODE (dest
) == MEM
)
8650 /* Check for storing a register to memory when we know that the
8651 register is equivalent to the memory location. */
8653 && reload_cse_regno_equal_p (sreg
, dest
, dest_mode
)
8654 && ! side_effects_p (dest
))
8658 /* If we can delete this SET, then we need to look for an earlier
8659 REG_DEAD note on DREG, and remove it if it exists. */
8660 if (ret
&& dreg
>= 0)
8662 if (! find_regno_note (insn
, REG_UNUSED
, dreg
))
8663 reload_cse_no_longer_dead (dreg
, dest_mode
);
8669 /* Try to simplify a single SET instruction. SET is the set pattern.
8670 INSN is the instruction it came from.
8671 This function only handles one case: if we set a register to a value
8672 which is not a register, we try to find that value in some other register
8673 and change the set into a register copy. */
8676 reload_cse_simplify_set (set
, insn
)
8682 enum machine_mode dest_mode
;
8683 enum reg_class dclass
;
8686 dreg
= true_regnum (SET_DEST (set
));
8690 src
= SET_SRC (set
);
8691 if (side_effects_p (src
) || true_regnum (src
) >= 0)
8694 dclass
= REGNO_REG_CLASS (dreg
);
8696 /* If memory loads are cheaper than register copies, don't change them. */
8697 if (GET_CODE (src
) == MEM
8698 && MEMORY_MOVE_COST (GET_MODE (src
), dclass
, 1) < 2)
8701 /* If the constant is cheaper than a register, don't change it. */
8702 if (CONSTANT_P (src
)
8703 && rtx_cost (src
, SET
) < 2)
8706 dest_mode
= GET_MODE (SET_DEST (set
));
8707 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
8710 && REGISTER_MOVE_COST (REGNO_REG_CLASS (i
), dclass
) == 2
8711 && reload_cse_regno_equal_p (i
, src
, dest_mode
))
8715 /* Pop back to the real obstacks while changing the insn. */
8718 validated
= validate_change (insn
, &SET_SRC (set
),
8719 gen_rtx_REG (dest_mode
, i
), 1);
8721 /* Go back to the obstack we are using for temporary
8723 push_obstacks (&reload_obstack
, &reload_obstack
);
8725 if (validated
&& ! find_regno_note (insn
, REG_UNUSED
, i
))
8727 reload_cse_no_longer_dead (i
, dest_mode
);
8735 /* Try to replace operands in INSN with equivalent values that are already
8736 in registers. This can be viewed as optional reloading.
8738 For each non-register operand in the insn, see if any hard regs are
8739 known to be equivalent to that operand. Record the alternatives which
8740 can accept these hard registers. Among all alternatives, select the
8741 ones which are better or equal to the one currently matching, where
8742 "better" is in terms of '?' and '!' constraints. Among the remaining
8743 alternatives, select the one which replaces most operands with
8747 reload_cse_simplify_operands (insn
)
8750 #ifdef REGISTER_CONSTRAINTS
8751 int insn_code_number
, n_operands
, n_alternatives
;
8754 char *constraints
[MAX_RECOG_OPERANDS
];
8756 /* Vector recording how bad an alternative is. */
8757 int *alternative_reject
;
8758 /* Vector recording how many registers can be introduced by choosing
8759 this alternative. */
8760 int *alternative_nregs
;
8761 /* Array of vectors recording, for each operand and each alternative,
8762 which hard register to substitute, or -1 if the operand should be
8764 int *op_alt_regno
[MAX_RECOG_OPERANDS
];
8765 /* Array of alternatives, sorted in order of decreasing desirability. */
8766 int *alternative_order
;
8767 rtx reg
= gen_rtx_REG (VOIDmode
, -1);
8769 /* Find out some information about this insn. */
8770 insn_code_number
= recog_memoized (insn
);
8771 /* We don't modify asm instructions. */
8772 if (insn_code_number
< 0)
8775 n_operands
= insn_n_operands
[insn_code_number
];
8776 n_alternatives
= insn_n_alternatives
[insn_code_number
];
8778 if (n_alternatives
== 0 || n_operands
== 0)
8780 insn_extract (insn
);
8782 /* Figure out which alternative currently matches. */
8783 if (! constrain_operands (insn_code_number
, 1))
8784 fatal_insn_not_found (insn
);
8786 alternative_reject
= (int *) alloca (n_alternatives
* sizeof (int));
8787 alternative_nregs
= (int *) alloca (n_alternatives
* sizeof (int));
8788 alternative_order
= (int *) alloca (n_alternatives
* sizeof (int));
8789 bzero ((char *)alternative_reject
, n_alternatives
* sizeof (int));
8790 bzero ((char *)alternative_nregs
, n_alternatives
* sizeof (int));
8792 for (i
= 0; i
< n_operands
; i
++)
8794 enum machine_mode mode
;
8798 op_alt_regno
[i
] = (int *) alloca (n_alternatives
* sizeof (int));
8799 for (j
= 0; j
< n_alternatives
; j
++)
8800 op_alt_regno
[i
][j
] = -1;
8802 p
= constraints
[i
] = insn_operand_constraint
[insn_code_number
][i
];
8803 mode
= insn_operand_mode
[insn_code_number
][i
];
8805 /* Add the reject values for each alternative given by the constraints
8806 for this operand. */
8814 alternative_reject
[j
] += 3;
8816 alternative_reject
[j
] += 300;
8819 /* We won't change operands which are already registers. We
8820 also don't want to modify output operands. */
8821 regno
= true_regnum (recog_operand
[i
]);
8823 || constraints
[i
][0] == '='
8824 || constraints
[i
][0] == '+')
8827 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
8829 int class = (int) NO_REGS
;
8831 if (! reload_cse_regno_equal_p (regno
, recog_operand
[i
], mode
))
8834 REGNO (reg
) = regno
;
8835 PUT_MODE (reg
, mode
);
8837 /* We found a register equal to this operand. Now look for all
8838 alternatives that can accept this register and have not been
8839 assigned a register they can use yet. */
8848 case '=': case '+': case '?':
8849 case '#': case '&': case '!':
8851 case '0': case '1': case '2': case '3': case '4':
8852 case 'm': case '<': case '>': case 'V': case 'o':
8853 case 'E': case 'F': case 'G': case 'H':
8854 case 's': case 'i': case 'n':
8855 case 'I': case 'J': case 'K': case 'L':
8856 case 'M': case 'N': case 'O': case 'P':
8857 #ifdef EXTRA_CONSTRAINT
8858 case 'Q': case 'R': case 'S': case 'T': case 'U':
8861 /* These don't say anything we care about. */
8865 class = reg_class_subunion
[(int) class][(int) GENERAL_REGS
];
8870 = reg_class_subunion
[(int) class][(int) REG_CLASS_FROM_LETTER (c
)];
8873 case ',': case '\0':
8874 /* See if REGNO fits this alternative, and set it up as the
8875 replacement register if we don't have one for this
8876 alternative yet and the operand being replaced is not
8877 a cheap CONST_INT. */
8878 if (op_alt_regno
[i
][j
] == -1
8879 && reg_fits_class_p (reg
, class, 0, mode
)
8880 && (GET_CODE (recog_operand
[i
]) != CONST_INT
8881 || rtx_cost (recog_operand
[i
], SET
) > rtx_cost (reg
, SET
)))
8883 alternative_nregs
[j
]++;
8884 op_alt_regno
[i
][j
] = regno
;
8896 /* Record all alternatives which are better or equal to the currently
8897 matching one in the alternative_order array. */
8898 for (i
= j
= 0; i
< n_alternatives
; i
++)
8899 if (alternative_reject
[i
] <= alternative_reject
[which_alternative
])
8900 alternative_order
[j
++] = i
;
8903 /* Sort it. Given a small number of alternatives, a dumb algorithm
8904 won't hurt too much. */
8905 for (i
= 0; i
< n_alternatives
- 1; i
++)
8908 int best_reject
= alternative_reject
[alternative_order
[i
]];
8909 int best_nregs
= alternative_nregs
[alternative_order
[i
]];
8912 for (j
= i
+ 1; j
< n_alternatives
; j
++)
8914 int this_reject
= alternative_reject
[alternative_order
[j
]];
8915 int this_nregs
= alternative_nregs
[alternative_order
[j
]];
8917 if (this_reject
< best_reject
8918 || (this_reject
== best_reject
&& this_nregs
< best_nregs
))
8921 best_reject
= this_reject
;
8922 best_nregs
= this_nregs
;
8926 tmp
= alternative_order
[best
];
8927 alternative_order
[best
] = alternative_order
[i
];
8928 alternative_order
[i
] = tmp
;
8931 /* Substitute the operands as determined by op_alt_regno for the best
8933 j
= alternative_order
[0];
8934 CLEAR_HARD_REG_SET (no_longer_dead_regs
);
8936 /* Pop back to the real obstacks while changing the insn. */
8939 for (i
= 0; i
< n_operands
; i
++)
8941 enum machine_mode mode
= insn_operand_mode
[insn_code_number
][i
];
8942 if (op_alt_regno
[i
][j
] == -1)
8945 reload_cse_no_longer_dead (op_alt_regno
[i
][j
], mode
);
8946 validate_change (insn
, recog_operand_loc
[i
],
8947 gen_rtx_REG (mode
, op_alt_regno
[i
][j
]), 1);
8950 for (i
= insn_n_dups
[insn_code_number
] - 1; i
>= 0; i
--)
8952 int op
= recog_dup_num
[i
];
8953 enum machine_mode mode
= insn_operand_mode
[insn_code_number
][op
];
8955 if (op_alt_regno
[op
][j
] == -1)
8958 reload_cse_no_longer_dead (op_alt_regno
[op
][j
], mode
);
8959 validate_change (insn
, recog_dup_loc
[i
],
8960 gen_rtx_REG (mode
, op_alt_regno
[op
][j
]), 1);
8963 /* Go back to the obstack we are using for temporary
8965 push_obstacks (&reload_obstack
, &reload_obstack
);
8967 return apply_change_group ();
8973 /* These two variables are used to pass information from
8974 reload_cse_record_set to reload_cse_check_clobber. */
8976 static int reload_cse_check_clobbered
;
8977 static rtx reload_cse_check_src
;
8979 /* See if DEST overlaps with RELOAD_CSE_CHECK_SRC. If it does, set
8980 RELOAD_CSE_CHECK_CLOBBERED. This is called via note_stores. The
8981 second argument, which is passed by note_stores, is ignored. */
8984 reload_cse_check_clobber (dest
, ignore
)
8986 rtx ignore ATTRIBUTE_UNUSED
;
8988 if (reg_overlap_mentioned_p (dest
, reload_cse_check_src
))
8989 reload_cse_check_clobbered
= 1;
8992 /* Record the result of a SET instruction. SET is the set pattern.
8993 BODY is the pattern of the insn that it came from. */
8996 reload_cse_record_set (set
, body
)
9002 enum machine_mode dest_mode
;
9004 dest
= SET_DEST (set
);
9005 src
= SET_SRC (set
);
9006 dreg
= true_regnum (dest
);
9007 sreg
= true_regnum (src
);
9008 dest_mode
= GET_MODE (dest
);
9010 /* Some machines don't define AUTO_INC_DEC, but they still use push
9011 instructions. We need to catch that case here in order to
9012 invalidate the stack pointer correctly. Note that invalidating
9013 the stack pointer is different from invalidating DEST. */
9015 while (GET_CODE (x
) == SUBREG
9016 || GET_CODE (x
) == ZERO_EXTRACT
9017 || GET_CODE (x
) == SIGN_EXTRACT
9018 || GET_CODE (x
) == STRICT_LOW_PART
)
9020 if (push_operand (x
, GET_MODE (x
)))
9022 reload_cse_invalidate_rtx (stack_pointer_rtx
, NULL_RTX
);
9023 reload_cse_invalidate_rtx (dest
, NULL_RTX
);
9027 /* We can only handle an assignment to a register, or a store of a
9028 register to a memory location. For other cases, we just clobber
9029 the destination. We also have to just clobber if there are side
9030 effects in SRC or DEST. */
9031 if ((dreg
< 0 && GET_CODE (dest
) != MEM
)
9032 || side_effects_p (src
)
9033 || side_effects_p (dest
))
9035 reload_cse_invalidate_rtx (dest
, NULL_RTX
);
9040 /* We don't try to handle values involving CC, because it's a pain
9041 to keep track of when they have to be invalidated. */
9042 if (reg_mentioned_p (cc0_rtx
, src
)
9043 || reg_mentioned_p (cc0_rtx
, dest
))
9045 reload_cse_invalidate_rtx (dest
, NULL_RTX
);
9050 /* If BODY is a PARALLEL, then we need to see whether the source of
9051 SET is clobbered by some other instruction in the PARALLEL. */
9052 if (GET_CODE (body
) == PARALLEL
)
9056 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
9060 x
= XVECEXP (body
, 0, i
);
9064 reload_cse_check_clobbered
= 0;
9065 reload_cse_check_src
= src
;
9066 note_stores (x
, reload_cse_check_clobber
);
9067 if (reload_cse_check_clobbered
)
9069 reload_cse_invalidate_rtx (dest
, NULL_RTX
);
9079 /* This is an assignment to a register. Update the value we
9080 have stored for the register. */
9085 /* This is a copy from one register to another. Any values
9086 which were valid for SREG are now valid for DREG. If the
9087 mode changes, we use gen_lowpart_common to extract only
9088 the part of the value that is copied. */
9089 reg_values
[dreg
] = 0;
9090 for (x
= reg_values
[sreg
]; x
; x
= XEXP (x
, 1))
9094 if (XEXP (x
, 0) == 0)
9096 if (dest_mode
== GET_MODE (XEXP (x
, 0)))
9098 else if (GET_MODE_BITSIZE (dest_mode
)
9099 > GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))))
9102 tmp
= gen_lowpart_common (dest_mode
, XEXP (x
, 0));
9104 reg_values
[dreg
] = gen_rtx_EXPR_LIST (dest_mode
, tmp
,
9109 reg_values
[dreg
] = gen_rtx_EXPR_LIST (dest_mode
, src
, NULL_RTX
);
9111 /* We've changed DREG, so invalidate any values held by other
9112 registers that depend upon it. */
9113 reload_cse_invalidate_regno (dreg
, dest_mode
, 0);
9115 /* If this assignment changes more than one hard register,
9116 forget anything we know about the others. */
9117 for (i
= 1; i
< HARD_REGNO_NREGS (dreg
, dest_mode
); i
++)
9118 reg_values
[dreg
+ i
] = 0;
9120 else if (GET_CODE (dest
) == MEM
)
9122 /* Invalidate conflicting memory locations. */
9123 reload_cse_invalidate_mem (dest
);
9125 /* If we're storing a register to memory, add DEST to the list
9127 if (sreg
>= 0 && ! side_effects_p (dest
))
9128 reg_values
[sreg
] = gen_rtx_EXPR_LIST (dest_mode
, dest
,
9133 /* We should have bailed out earlier. */