1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains subroutines used only from the file reload1.c.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
58 find_reloads can alter the operands of the instruction it is called on.
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
72 Using a reload register for several reloads in one insn:
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload. */
88 /* We do not enable this with CHECKING_P, since it is awfully slow. */
93 #include "coretypes.h"
105 #include "rtl-error.h"
107 #include "addresses.h"
110 /* True if X is a constant that can be forced into the constant pool.
111 MODE is the mode of the operand, or VOIDmode if not known. */
112 #define CONST_POOL_OK_P(MODE, X) \
113 ((MODE) != VOIDmode \
115 && GET_CODE (X) != HIGH \
116 && !targetm.cannot_force_const_mem (MODE, X))
118 /* True if C is a non-empty register class that has too few registers
119 to be safely used as a reload target class. */
122 small_register_class_p (reg_class_t rclass
)
124 return (reg_class_size
[(int) rclass
] == 1
125 || (reg_class_size
[(int) rclass
] >= 1
126 && targetm
.class_likely_spilled_p (rclass
)));
130 /* All reloads of the current insn are recorded here. See reload.h for
133 struct reload rld
[MAX_RELOADS
];
135 /* All the "earlyclobber" operands of the current insn
136 are recorded here. */
138 rtx reload_earlyclobbers
[MAX_RECOG_OPERANDS
];
140 int reload_n_operands
;
142 /* Replacing reloads.
144 If `replace_reloads' is nonzero, then as each reload is recorded
145 an entry is made for it in the table `replacements'.
146 Then later `subst_reloads' can look through that table and
147 perform all the replacements needed. */
149 /* Nonzero means record the places to replace. */
150 static int replace_reloads
;
152 /* Each replacement is recorded with a structure like this. */
155 rtx
*where
; /* Location to store in */
156 int what
; /* which reload this is for */
157 machine_mode mode
; /* mode it must have */
160 static struct replacement replacements
[MAX_RECOG_OPERANDS
* ((MAX_REGS_PER_ADDRESS
* 2) + 1)];
162 /* Number of replacements currently recorded. */
163 static int n_replacements
;
165 /* Used to track what is modified by an operand. */
168 int reg_flag
; /* Nonzero if referencing a register. */
169 int safe
; /* Nonzero if this can't conflict with anything. */
170 rtx base
; /* Base address for MEM. */
171 HOST_WIDE_INT start
; /* Starting offset or register number. */
172 HOST_WIDE_INT end
; /* Ending offset or register number. */
175 #ifdef SECONDARY_MEMORY_NEEDED
177 /* Save MEMs needed to copy from one class of registers to another. One MEM
178 is used per mode, but normally only one or two modes are ever used.
180 We keep two versions, before and after register elimination. The one
181 after register elimination is record separately for each operand. This
182 is done in case the address is not valid to be sure that we separately
185 static rtx secondary_memlocs
[NUM_MACHINE_MODES
];
186 static rtx secondary_memlocs_elim
[NUM_MACHINE_MODES
][MAX_RECOG_OPERANDS
];
187 static int secondary_memlocs_elim_used
= 0;
190 /* The instruction we are doing reloads for;
191 so we can test whether a register dies in it. */
192 static rtx_insn
*this_insn
;
194 /* Nonzero if this instruction is a user-specified asm with operands. */
195 static int this_insn_is_asm
;
197 /* If hard_regs_live_known is nonzero,
198 we can tell which hard regs are currently live,
199 at least enough to succeed in choosing dummy reloads. */
200 static int hard_regs_live_known
;
202 /* Indexed by hard reg number,
203 element is nonnegative if hard reg has been spilled.
204 This vector is passed to `find_reloads' as an argument
205 and is not changed here. */
206 static short *static_reload_reg_p
;
208 /* Set to 1 in subst_reg_equivs if it changes anything. */
209 static int subst_reg_equivs_changed
;
211 /* On return from push_reload, holds the reload-number for the OUT
212 operand, which can be different for that from the input operand. */
213 static int output_reloadnum
;
215 /* Compare two RTX's. */
216 #define MATCHES(x, y) \
217 (x == y || (x != 0 && (REG_P (x) \
218 ? REG_P (y) && REGNO (x) == REGNO (y) \
219 : rtx_equal_p (x, y) && ! side_effects_p (x))))
221 /* Indicates if two reloads purposes are for similar enough things that we
222 can merge their reloads. */
223 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
224 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
225 || ((when1) == (when2) && (op1) == (op2)) \
226 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
227 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
228 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
229 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
230 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
232 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
233 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
234 ((when1) != (when2) \
235 || ! ((op1) == (op2) \
236 || (when1) == RELOAD_FOR_INPUT \
237 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
238 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
240 /* If we are going to reload an address, compute the reload type to
242 #define ADDR_TYPE(type) \
243 ((type) == RELOAD_FOR_INPUT_ADDRESS \
244 ? RELOAD_FOR_INPADDR_ADDRESS \
245 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
246 ? RELOAD_FOR_OUTADDR_ADDRESS \
249 static int push_secondary_reload (int, rtx
, int, int, enum reg_class
,
250 machine_mode
, enum reload_type
,
251 enum insn_code
*, secondary_reload_info
*);
252 static enum reg_class
find_valid_class (machine_mode
, machine_mode
,
254 static void push_replacement (rtx
*, int, machine_mode
);
255 static void dup_replacements (rtx
*, rtx
*);
256 static void combine_reloads (void);
257 static int find_reusable_reload (rtx
*, rtx
, enum reg_class
,
258 enum reload_type
, int, int);
259 static rtx
find_dummy_reload (rtx
, rtx
, rtx
*, rtx
*, machine_mode
,
260 machine_mode
, reg_class_t
, int, int);
261 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx
);
262 static struct decomposition
decompose (rtx
);
263 static int immune_p (rtx
, rtx
, struct decomposition
);
264 static bool alternative_allows_const_pool_ref (rtx
, const char *, int);
265 static rtx
find_reloads_toplev (rtx
, int, enum reload_type
, int, int,
267 static rtx
make_memloc (rtx
, int);
268 static int maybe_memory_address_addr_space_p (machine_mode
, rtx
,
269 addr_space_t
, rtx
*);
270 static int find_reloads_address (machine_mode
, rtx
*, rtx
, rtx
*,
271 int, enum reload_type
, int, rtx_insn
*);
272 static rtx
subst_reg_equivs (rtx
, rtx_insn
*);
273 static rtx
subst_indexed_address (rtx
);
274 static void update_auto_inc_notes (rtx_insn
*, int, int);
275 static int find_reloads_address_1 (machine_mode
, addr_space_t
, rtx
, int,
276 enum rtx_code
, enum rtx_code
, rtx
*,
277 int, enum reload_type
,int, rtx_insn
*);
278 static void find_reloads_address_part (rtx
, rtx
*, enum reg_class
,
280 enum reload_type
, int);
281 static rtx
find_reloads_subreg_address (rtx
, int, enum reload_type
,
282 int, rtx_insn
*, int *);
283 static void copy_replacements_1 (rtx
*, rtx
*, int);
284 static int find_inc_amount (rtx
, rtx
);
285 static int refers_to_mem_for_reload_p (rtx
);
286 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
289 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
293 push_reg_equiv_alt_mem (int regno
, rtx mem
)
297 for (it
= reg_equiv_alt_mem_list (regno
); it
; it
= XEXP (it
, 1))
298 if (rtx_equal_p (XEXP (it
, 0), mem
))
301 reg_equiv_alt_mem_list (regno
)
302 = alloc_EXPR_LIST (REG_EQUIV
, mem
,
303 reg_equiv_alt_mem_list (regno
));
306 /* Determine if any secondary reloads are needed for loading (if IN_P is
307 nonzero) or storing (if IN_P is zero) X to or from a reload register of
308 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
309 are needed, push them.
311 Return the reload number of the secondary reload we made, or -1 if
312 we didn't need one. *PICODE is set to the insn_code to use if we do
313 need a secondary reload. */
316 push_secondary_reload (int in_p
, rtx x
, int opnum
, int optional
,
317 enum reg_class reload_class
,
318 machine_mode reload_mode
, enum reload_type type
,
319 enum insn_code
*picode
, secondary_reload_info
*prev_sri
)
321 enum reg_class rclass
= NO_REGS
;
322 enum reg_class scratch_class
;
323 machine_mode mode
= reload_mode
;
324 enum insn_code icode
= CODE_FOR_nothing
;
325 enum insn_code t_icode
= CODE_FOR_nothing
;
326 enum reload_type secondary_type
;
327 int s_reload
, t_reload
= -1;
328 const char *scratch_constraint
;
329 secondary_reload_info sri
;
331 if (type
== RELOAD_FOR_INPUT_ADDRESS
332 || type
== RELOAD_FOR_OUTPUT_ADDRESS
333 || type
== RELOAD_FOR_INPADDR_ADDRESS
334 || type
== RELOAD_FOR_OUTADDR_ADDRESS
)
335 secondary_type
= type
;
337 secondary_type
= in_p
? RELOAD_FOR_INPUT_ADDRESS
: RELOAD_FOR_OUTPUT_ADDRESS
;
339 *picode
= CODE_FOR_nothing
;
341 /* If X is a paradoxical SUBREG, use the inner value to determine both the
342 mode and object being reloaded. */
343 if (paradoxical_subreg_p (x
))
346 reload_mode
= GET_MODE (x
);
349 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
350 is still a pseudo-register by now, it *must* have an equivalent MEM
351 but we don't want to assume that), use that equivalent when seeing if
352 a secondary reload is needed since whether or not a reload is needed
353 might be sensitive to the form of the MEM. */
355 if (REG_P (x
) && REGNO (x
) >= FIRST_PSEUDO_REGISTER
356 && reg_equiv_mem (REGNO (x
)))
357 x
= reg_equiv_mem (REGNO (x
));
359 sri
.icode
= CODE_FOR_nothing
;
360 sri
.prev_sri
= prev_sri
;
361 rclass
= (enum reg_class
) targetm
.secondary_reload (in_p
, x
, reload_class
,
363 icode
= (enum insn_code
) sri
.icode
;
365 /* If we don't need any secondary registers, done. */
366 if (rclass
== NO_REGS
&& icode
== CODE_FOR_nothing
)
369 if (rclass
!= NO_REGS
)
370 t_reload
= push_secondary_reload (in_p
, x
, opnum
, optional
, rclass
,
371 reload_mode
, type
, &t_icode
, &sri
);
373 /* If we will be using an insn, the secondary reload is for a
376 if (icode
!= CODE_FOR_nothing
)
378 /* If IN_P is nonzero, the reload register will be the output in
379 operand 0. If IN_P is zero, the reload register will be the input
380 in operand 1. Outputs should have an initial "=", which we must
383 /* ??? It would be useful to be able to handle only two, or more than
384 three, operands, but for now we can only handle the case of having
385 exactly three: output, input and one temp/scratch. */
386 gcc_assert (insn_data
[(int) icode
].n_operands
== 3);
388 /* ??? We currently have no way to represent a reload that needs
389 an icode to reload from an intermediate tertiary reload register.
390 We should probably have a new field in struct reload to tag a
391 chain of scratch operand reloads onto. */
392 gcc_assert (rclass
== NO_REGS
);
394 scratch_constraint
= insn_data
[(int) icode
].operand
[2].constraint
;
395 gcc_assert (*scratch_constraint
== '=');
396 scratch_constraint
++;
397 if (*scratch_constraint
== '&')
398 scratch_constraint
++;
399 scratch_class
= (reg_class_for_constraint
400 (lookup_constraint (scratch_constraint
)));
402 rclass
= scratch_class
;
403 mode
= insn_data
[(int) icode
].operand
[2].mode
;
406 /* This case isn't valid, so fail. Reload is allowed to use the same
407 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
408 in the case of a secondary register, we actually need two different
409 registers for correct code. We fail here to prevent the possibility of
410 silently generating incorrect code later.
412 The convention is that secondary input reloads are valid only if the
413 secondary_class is different from class. If you have such a case, you
414 can not use secondary reloads, you must work around the problem some
417 Allow this when a reload_in/out pattern is being used. I.e. assume
418 that the generated code handles this case. */
420 gcc_assert (!in_p
|| rclass
!= reload_class
|| icode
!= CODE_FOR_nothing
421 || t_icode
!= CODE_FOR_nothing
);
423 /* See if we can reuse an existing secondary reload. */
424 for (s_reload
= 0; s_reload
< n_reloads
; s_reload
++)
425 if (rld
[s_reload
].secondary_p
426 && (reg_class_subset_p (rclass
, rld
[s_reload
].rclass
)
427 || reg_class_subset_p (rld
[s_reload
].rclass
, rclass
))
428 && ((in_p
&& rld
[s_reload
].inmode
== mode
)
429 || (! in_p
&& rld
[s_reload
].outmode
== mode
))
430 && ((in_p
&& rld
[s_reload
].secondary_in_reload
== t_reload
)
431 || (! in_p
&& rld
[s_reload
].secondary_out_reload
== t_reload
))
432 && ((in_p
&& rld
[s_reload
].secondary_in_icode
== t_icode
)
433 || (! in_p
&& rld
[s_reload
].secondary_out_icode
== t_icode
))
434 && (small_register_class_p (rclass
)
435 || targetm
.small_register_classes_for_mode_p (VOIDmode
))
436 && MERGABLE_RELOADS (secondary_type
, rld
[s_reload
].when_needed
,
437 opnum
, rld
[s_reload
].opnum
))
440 rld
[s_reload
].inmode
= mode
;
442 rld
[s_reload
].outmode
= mode
;
444 if (reg_class_subset_p (rclass
, rld
[s_reload
].rclass
))
445 rld
[s_reload
].rclass
= rclass
;
447 rld
[s_reload
].opnum
= MIN (rld
[s_reload
].opnum
, opnum
);
448 rld
[s_reload
].optional
&= optional
;
449 rld
[s_reload
].secondary_p
= 1;
450 if (MERGE_TO_OTHER (secondary_type
, rld
[s_reload
].when_needed
,
451 opnum
, rld
[s_reload
].opnum
))
452 rld
[s_reload
].when_needed
= RELOAD_OTHER
;
457 if (s_reload
== n_reloads
)
459 #ifdef SECONDARY_MEMORY_NEEDED
460 /* If we need a memory location to copy between the two reload regs,
461 set it up now. Note that we do the input case before making
462 the reload and the output case after. This is due to the
463 way reloads are output. */
465 if (in_p
&& icode
== CODE_FOR_nothing
466 && SECONDARY_MEMORY_NEEDED (rclass
, reload_class
, mode
))
468 get_secondary_mem (x
, reload_mode
, opnum
, type
);
470 /* We may have just added new reloads. Make sure we add
471 the new reload at the end. */
472 s_reload
= n_reloads
;
476 /* We need to make a new secondary reload for this register class. */
477 rld
[s_reload
].in
= rld
[s_reload
].out
= 0;
478 rld
[s_reload
].rclass
= rclass
;
480 rld
[s_reload
].inmode
= in_p
? mode
: VOIDmode
;
481 rld
[s_reload
].outmode
= ! in_p
? mode
: VOIDmode
;
482 rld
[s_reload
].reg_rtx
= 0;
483 rld
[s_reload
].optional
= optional
;
484 rld
[s_reload
].inc
= 0;
485 /* Maybe we could combine these, but it seems too tricky. */
486 rld
[s_reload
].nocombine
= 1;
487 rld
[s_reload
].in_reg
= 0;
488 rld
[s_reload
].out_reg
= 0;
489 rld
[s_reload
].opnum
= opnum
;
490 rld
[s_reload
].when_needed
= secondary_type
;
491 rld
[s_reload
].secondary_in_reload
= in_p
? t_reload
: -1;
492 rld
[s_reload
].secondary_out_reload
= ! in_p
? t_reload
: -1;
493 rld
[s_reload
].secondary_in_icode
= in_p
? t_icode
: CODE_FOR_nothing
;
494 rld
[s_reload
].secondary_out_icode
495 = ! in_p
? t_icode
: CODE_FOR_nothing
;
496 rld
[s_reload
].secondary_p
= 1;
500 #ifdef SECONDARY_MEMORY_NEEDED
501 if (! in_p
&& icode
== CODE_FOR_nothing
502 && SECONDARY_MEMORY_NEEDED (reload_class
, rclass
, mode
))
503 get_secondary_mem (x
, mode
, opnum
, type
);
511 /* If a secondary reload is needed, return its class. If both an intermediate
512 register and a scratch register is needed, we return the class of the
513 intermediate register. */
515 secondary_reload_class (bool in_p
, reg_class_t rclass
, machine_mode mode
,
518 enum insn_code icode
;
519 secondary_reload_info sri
;
521 sri
.icode
= CODE_FOR_nothing
;
524 = (enum reg_class
) targetm
.secondary_reload (in_p
, x
, rclass
, mode
, &sri
);
525 icode
= (enum insn_code
) sri
.icode
;
527 /* If there are no secondary reloads at all, we return NO_REGS.
528 If an intermediate register is needed, we return its class. */
529 if (icode
== CODE_FOR_nothing
|| rclass
!= NO_REGS
)
532 /* No intermediate register is needed, but we have a special reload
533 pattern, which we assume for now needs a scratch register. */
534 return scratch_reload_class (icode
);
537 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
538 three operands, verify that operand 2 is an output operand, and return
540 ??? We'd like to be able to handle any pattern with at least 2 operands,
541 for zero or more scratch registers, but that needs more infrastructure. */
543 scratch_reload_class (enum insn_code icode
)
545 const char *scratch_constraint
;
546 enum reg_class rclass
;
548 gcc_assert (insn_data
[(int) icode
].n_operands
== 3);
549 scratch_constraint
= insn_data
[(int) icode
].operand
[2].constraint
;
550 gcc_assert (*scratch_constraint
== '=');
551 scratch_constraint
++;
552 if (*scratch_constraint
== '&')
553 scratch_constraint
++;
554 rclass
= reg_class_for_constraint (lookup_constraint (scratch_constraint
));
555 gcc_assert (rclass
!= NO_REGS
);
559 #ifdef SECONDARY_MEMORY_NEEDED
561 /* Return a memory location that will be used to copy X in mode MODE.
562 If we haven't already made a location for this mode in this insn,
563 call find_reloads_address on the location being returned. */
566 get_secondary_mem (rtx x ATTRIBUTE_UNUSED
, machine_mode mode
,
567 int opnum
, enum reload_type type
)
572 /* By default, if MODE is narrower than a word, widen it to a word.
573 This is required because most machines that require these memory
574 locations do not support short load and stores from all registers
575 (e.g., FP registers). */
577 #ifdef SECONDARY_MEMORY_NEEDED_MODE
578 mode
= SECONDARY_MEMORY_NEEDED_MODE (mode
);
580 if (GET_MODE_BITSIZE (mode
) < BITS_PER_WORD
&& INTEGRAL_MODE_P (mode
))
581 mode
= mode_for_size (BITS_PER_WORD
, GET_MODE_CLASS (mode
), 0);
584 /* If we already have made a MEM for this operand in MODE, return it. */
585 if (secondary_memlocs_elim
[(int) mode
][opnum
] != 0)
586 return secondary_memlocs_elim
[(int) mode
][opnum
];
588 /* If this is the first time we've tried to get a MEM for this mode,
589 allocate a new one. `something_changed' in reload will get set
590 by noticing that the frame size has changed. */
592 if (secondary_memlocs
[(int) mode
] == 0)
594 #ifdef SECONDARY_MEMORY_NEEDED_RTX
595 secondary_memlocs
[(int) mode
] = SECONDARY_MEMORY_NEEDED_RTX (mode
);
597 secondary_memlocs
[(int) mode
]
598 = assign_stack_local (mode
, GET_MODE_SIZE (mode
), 0);
602 /* Get a version of the address doing any eliminations needed. If that
603 didn't give us a new MEM, make a new one if it isn't valid. */
605 loc
= eliminate_regs (secondary_memlocs
[(int) mode
], VOIDmode
, NULL_RTX
);
606 mem_valid
= strict_memory_address_addr_space_p (mode
, XEXP (loc
, 0),
607 MEM_ADDR_SPACE (loc
));
609 if (! mem_valid
&& loc
== secondary_memlocs
[(int) mode
])
610 loc
= copy_rtx (loc
);
612 /* The only time the call below will do anything is if the stack
613 offset is too large. In that case IND_LEVELS doesn't matter, so we
614 can just pass a zero. Adjust the type to be the address of the
615 corresponding object. If the address was valid, save the eliminated
616 address. If it wasn't valid, we need to make a reload each time, so
621 type
= (type
== RELOAD_FOR_INPUT
? RELOAD_FOR_INPUT_ADDRESS
622 : type
== RELOAD_FOR_OUTPUT
? RELOAD_FOR_OUTPUT_ADDRESS
625 find_reloads_address (mode
, &loc
, XEXP (loc
, 0), &XEXP (loc
, 0),
629 secondary_memlocs_elim
[(int) mode
][opnum
] = loc
;
630 if (secondary_memlocs_elim_used
<= (int)mode
)
631 secondary_memlocs_elim_used
= (int)mode
+ 1;
635 /* Clear any secondary memory locations we've made. */
638 clear_secondary_mem (void)
640 memset (secondary_memlocs
, 0, sizeof secondary_memlocs
);
642 #endif /* SECONDARY_MEMORY_NEEDED */
645 /* Find the largest class which has at least one register valid in
646 mode INNER, and which for every such register, that register number
647 plus N is also valid in OUTER (if in range) and is cheap to move
648 into REGNO. Such a class must exist. */
650 static enum reg_class
651 find_valid_class (machine_mode outer ATTRIBUTE_UNUSED
,
652 machine_mode inner ATTRIBUTE_UNUSED
, int n
,
653 unsigned int dest_regno ATTRIBUTE_UNUSED
)
658 enum reg_class best_class
= NO_REGS
;
659 enum reg_class dest_class ATTRIBUTE_UNUSED
= REGNO_REG_CLASS (dest_regno
);
660 unsigned int best_size
= 0;
663 for (rclass
= 1; rclass
< N_REG_CLASSES
; rclass
++)
667 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
- n
&& ! bad
; regno
++)
668 if (TEST_HARD_REG_BIT (reg_class_contents
[rclass
], regno
))
670 if (HARD_REGNO_MODE_OK (regno
, inner
))
673 if (TEST_HARD_REG_BIT (reg_class_contents
[rclass
], regno
+ n
)
674 && ! HARD_REGNO_MODE_OK (regno
+ n
, outer
))
681 cost
= register_move_cost (outer
, (enum reg_class
) rclass
, dest_class
);
683 if ((reg_class_size
[rclass
] > best_size
684 && (best_cost
< 0 || best_cost
>= cost
))
687 best_class
= (enum reg_class
) rclass
;
688 best_size
= reg_class_size
[rclass
];
689 best_cost
= register_move_cost (outer
, (enum reg_class
) rclass
,
694 gcc_assert (best_size
!= 0);
699 /* We are trying to reload a subreg of something that is not a register.
700 Find the largest class which contains only registers valid in
701 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
702 which we would eventually like to obtain the object. */
704 static enum reg_class
705 find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED
,
706 machine_mode mode ATTRIBUTE_UNUSED
,
707 enum reg_class dest_class ATTRIBUTE_UNUSED
)
712 enum reg_class best_class
= NO_REGS
;
713 unsigned int best_size
= 0;
716 for (rclass
= 1; rclass
< N_REG_CLASSES
; rclass
++)
718 unsigned int computed_rclass_size
= 0;
720 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
722 if (in_hard_reg_set_p (reg_class_contents
[rclass
], mode
, regno
)
723 && (HARD_REGNO_MODE_OK (regno
, mode
)))
724 computed_rclass_size
++;
727 cost
= register_move_cost (outer
, (enum reg_class
) rclass
, dest_class
);
729 if ((computed_rclass_size
> best_size
730 && (best_cost
< 0 || best_cost
>= cost
))
733 best_class
= (enum reg_class
) rclass
;
734 best_size
= computed_rclass_size
;
735 best_cost
= register_move_cost (outer
, (enum reg_class
) rclass
,
740 gcc_assert (best_size
!= 0);
742 #ifdef LIMIT_RELOAD_CLASS
743 best_class
= LIMIT_RELOAD_CLASS (mode
, best_class
);
748 /* Return the number of a previously made reload that can be combined with
749 a new one, or n_reloads if none of the existing reloads can be used.
750 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
751 push_reload, they determine the kind of the new reload that we try to
752 combine. P_IN points to the corresponding value of IN, which can be
753 modified by this function.
754 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
757 find_reusable_reload (rtx
*p_in
, rtx out
, enum reg_class rclass
,
758 enum reload_type type
, int opnum
, int dont_share
)
762 /* We can't merge two reloads if the output of either one is
765 if (earlyclobber_operand_p (out
))
768 /* We can use an existing reload if the class is right
769 and at least one of IN and OUT is a match
770 and the other is at worst neutral.
771 (A zero compared against anything is neutral.)
773 For targets with small register classes, don't use existing reloads
774 unless they are for the same thing since that can cause us to need
775 more reload registers than we otherwise would. */
777 for (i
= 0; i
< n_reloads
; i
++)
778 if ((reg_class_subset_p (rclass
, rld
[i
].rclass
)
779 || reg_class_subset_p (rld
[i
].rclass
, rclass
))
780 /* If the existing reload has a register, it must fit our class. */
781 && (rld
[i
].reg_rtx
== 0
782 || TEST_HARD_REG_BIT (reg_class_contents
[(int) rclass
],
783 true_regnum (rld
[i
].reg_rtx
)))
784 && ((in
!= 0 && MATCHES (rld
[i
].in
, in
) && ! dont_share
785 && (out
== 0 || rld
[i
].out
== 0 || MATCHES (rld
[i
].out
, out
)))
786 || (out
!= 0 && MATCHES (rld
[i
].out
, out
)
787 && (in
== 0 || rld
[i
].in
== 0 || MATCHES (rld
[i
].in
, in
))))
788 && (rld
[i
].out
== 0 || ! earlyclobber_operand_p (rld
[i
].out
))
789 && (small_register_class_p (rclass
)
790 || targetm
.small_register_classes_for_mode_p (VOIDmode
))
791 && MERGABLE_RELOADS (type
, rld
[i
].when_needed
, opnum
, rld
[i
].opnum
))
794 /* Reloading a plain reg for input can match a reload to postincrement
795 that reg, since the postincrement's value is the right value.
796 Likewise, it can match a preincrement reload, since we regard
797 the preincrementation as happening before any ref in this insn
799 for (i
= 0; i
< n_reloads
; i
++)
800 if ((reg_class_subset_p (rclass
, rld
[i
].rclass
)
801 || reg_class_subset_p (rld
[i
].rclass
, rclass
))
802 /* If the existing reload has a register, it must fit our
804 && (rld
[i
].reg_rtx
== 0
805 || TEST_HARD_REG_BIT (reg_class_contents
[(int) rclass
],
806 true_regnum (rld
[i
].reg_rtx
)))
807 && out
== 0 && rld
[i
].out
== 0 && rld
[i
].in
!= 0
809 && GET_RTX_CLASS (GET_CODE (rld
[i
].in
)) == RTX_AUTOINC
810 && MATCHES (XEXP (rld
[i
].in
, 0), in
))
811 || (REG_P (rld
[i
].in
)
812 && GET_RTX_CLASS (GET_CODE (in
)) == RTX_AUTOINC
813 && MATCHES (XEXP (in
, 0), rld
[i
].in
)))
814 && (rld
[i
].out
== 0 || ! earlyclobber_operand_p (rld
[i
].out
))
815 && (small_register_class_p (rclass
)
816 || targetm
.small_register_classes_for_mode_p (VOIDmode
))
817 && MERGABLE_RELOADS (type
, rld
[i
].when_needed
,
818 opnum
, rld
[i
].opnum
))
820 /* Make sure reload_in ultimately has the increment,
821 not the plain register. */
829 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
830 expression. MODE is the mode that X will be used in. OUTPUT is true if
831 the function is invoked for the output part of an enclosing reload. */
834 reload_inner_reg_of_subreg (rtx x
, machine_mode mode
, bool output
)
838 /* Only SUBREGs are problematical. */
839 if (GET_CODE (x
) != SUBREG
)
842 inner
= SUBREG_REG (x
);
844 /* If INNER is a constant or PLUS, then INNER will need reloading. */
845 if (CONSTANT_P (inner
) || GET_CODE (inner
) == PLUS
)
848 /* If INNER is not a hard register, then INNER will not need reloading. */
849 if (!(REG_P (inner
) && HARD_REGISTER_P (inner
)))
852 /* If INNER is not ok for MODE, then INNER will need reloading. */
853 if (!HARD_REGNO_MODE_OK (subreg_regno (x
), mode
))
856 /* If this is for an output, and the outer part is a word or smaller,
857 INNER is larger than a word and the number of registers in INNER is
858 not the same as the number of words in INNER, then INNER will need
859 reloading (with an in-out reload). */
861 && GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
862 && GET_MODE_SIZE (GET_MODE (inner
)) > UNITS_PER_WORD
863 && ((GET_MODE_SIZE (GET_MODE (inner
)) / UNITS_PER_WORD
)
864 != (int) hard_regno_nregs
[REGNO (inner
)][GET_MODE (inner
)]));
867 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
868 requiring an extra reload register. The caller has already found that
869 IN contains some reference to REGNO, so check that we can produce the
870 new value in a single step. E.g. if we have
871 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
872 instruction that adds one to a register, this should succeed.
873 However, if we have something like
874 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
875 needs to be loaded into a register first, we need a separate reload
877 Such PLUS reloads are generated by find_reload_address_part.
878 The out-of-range PLUS expressions are usually introduced in the instruction
879 patterns by register elimination and substituting pseudos without a home
880 by their function-invariant equivalences. */
882 can_reload_into (rtx in
, int regno
, machine_mode mode
)
887 struct recog_data_d save_recog_data
;
889 /* For matching constraints, we often get notional input reloads where
890 we want to use the original register as the reload register. I.e.
891 technically this is a non-optional input-output reload, but IN is
892 already a valid register, and has been chosen as the reload register.
893 Speed this up, since it trivially works. */
897 /* To test MEMs properly, we'd have to take into account all the reloads
898 that are already scheduled, which can become quite complicated.
899 And since we've already handled address reloads for this MEM, it
900 should always succeed anyway. */
904 /* If we can make a simple SET insn that does the job, everything should
906 dst
= gen_rtx_REG (mode
, regno
);
907 test_insn
= make_insn_raw (gen_rtx_SET (dst
, in
));
908 save_recog_data
= recog_data
;
909 if (recog_memoized (test_insn
) >= 0)
911 extract_insn (test_insn
);
912 r
= constrain_operands (1, get_enabled_alternatives (test_insn
));
914 recog_data
= save_recog_data
;
918 /* Record one reload that needs to be performed.
919 IN is an rtx saying where the data are to be found before this instruction.
920 OUT says where they must be stored after the instruction.
921 (IN is zero for data not read, and OUT is zero for data not written.)
922 INLOC and OUTLOC point to the places in the instructions where
923 IN and OUT were found.
924 If IN and OUT are both nonzero, it means the same register must be used
925 to reload both IN and OUT.
927 RCLASS is a register class required for the reloaded data.
928 INMODE is the machine mode that the instruction requires
929 for the reg that replaces IN and OUTMODE is likewise for OUT.
931 If IN is zero, then OUT's location and mode should be passed as
934 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
936 OPTIONAL nonzero means this reload does not need to be performed:
937 it can be discarded if that is more convenient.
939 OPNUM and TYPE say what the purpose of this reload is.
941 The return value is the reload-number for this reload.
943 If both IN and OUT are nonzero, in some rare cases we might
944 want to make two separate reloads. (Actually we never do this now.)
945 Therefore, the reload-number for OUT is stored in
946 output_reloadnum when we return; the return value applies to IN.
947 Usually (presently always), when IN and OUT are nonzero,
948 the two reload-numbers are equal, but the caller should be careful to
952 push_reload (rtx in
, rtx out
, rtx
*inloc
, rtx
*outloc
,
953 enum reg_class rclass
, machine_mode inmode
,
954 machine_mode outmode
, int strict_low
, int optional
,
955 int opnum
, enum reload_type type
)
959 int dont_remove_subreg
= 0;
960 #ifdef LIMIT_RELOAD_CLASS
961 rtx
*in_subreg_loc
= 0, *out_subreg_loc
= 0;
963 int secondary_in_reload
= -1, secondary_out_reload
= -1;
964 enum insn_code secondary_in_icode
= CODE_FOR_nothing
;
965 enum insn_code secondary_out_icode
= CODE_FOR_nothing
;
966 enum reg_class subreg_in_class ATTRIBUTE_UNUSED
;
967 subreg_in_class
= NO_REGS
;
969 /* INMODE and/or OUTMODE could be VOIDmode if no mode
970 has been specified for the operand. In that case,
971 use the operand's mode as the mode to reload. */
972 if (inmode
== VOIDmode
&& in
!= 0)
973 inmode
= GET_MODE (in
);
974 if (outmode
== VOIDmode
&& out
!= 0)
975 outmode
= GET_MODE (out
);
977 /* If find_reloads and friends until now missed to replace a pseudo
978 with a constant of reg_equiv_constant something went wrong
980 Note that it can't simply be done here if we missed it earlier
981 since the constant might need to be pushed into the literal pool
982 and the resulting memref would probably need further
984 if (in
!= 0 && REG_P (in
))
986 int regno
= REGNO (in
);
988 gcc_assert (regno
< FIRST_PSEUDO_REGISTER
989 || reg_renumber
[regno
] >= 0
990 || reg_equiv_constant (regno
) == NULL_RTX
);
993 /* reg_equiv_constant only contains constants which are obviously
994 not appropriate as destination. So if we would need to replace
995 the destination pseudo with a constant we are in real
997 if (out
!= 0 && REG_P (out
))
999 int regno
= REGNO (out
);
1001 gcc_assert (regno
< FIRST_PSEUDO_REGISTER
1002 || reg_renumber
[regno
] >= 0
1003 || reg_equiv_constant (regno
) == NULL_RTX
);
1006 /* If we have a read-write operand with an address side-effect,
1007 change either IN or OUT so the side-effect happens only once. */
1008 if (in
!= 0 && out
!= 0 && MEM_P (in
) && rtx_equal_p (in
, out
))
1009 switch (GET_CODE (XEXP (in
, 0)))
1011 case POST_INC
: case POST_DEC
: case POST_MODIFY
:
1012 in
= replace_equiv_address_nv (in
, XEXP (XEXP (in
, 0), 0));
1015 case PRE_INC
: case PRE_DEC
: case PRE_MODIFY
:
1016 out
= replace_equiv_address_nv (out
, XEXP (XEXP (out
, 0), 0));
1023 /* If we are reloading a (SUBREG constant ...), really reload just the
1024 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1025 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1026 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1027 register is a pseudo, also reload the inside expression.
1028 For machines that extend byte loads, do this for any SUBREG of a pseudo
1029 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1030 M2 is an integral mode that gets extended when loaded.
1031 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1032 where either M1 is not valid for R or M2 is wider than a word but we
1033 only need one register to store an M2-sized quantity in R.
1034 (However, if OUT is nonzero, we need to reload the reg *and*
1035 the subreg, so do nothing here, and let following statement handle it.)
1037 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1038 we can't handle it here because CONST_INT does not indicate a mode.
1040 Similarly, we must reload the inside expression if we have a
1041 STRICT_LOW_PART (presumably, in == out in this case).
1043 Also reload the inner expression if it does not require a secondary
1044 reload but the SUBREG does.
1046 Finally, reload the inner expression if it is a register that is in
1047 the class whose registers cannot be referenced in a different size
1048 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1049 cannot reload just the inside since we might end up with the wrong
1050 register class. But if it is inside a STRICT_LOW_PART, we have
1051 no choice, so we hope we do get the right register class there. */
1053 if (in
!= 0 && GET_CODE (in
) == SUBREG
1054 && (subreg_lowpart_p (in
) || strict_low
)
1055 #ifdef CANNOT_CHANGE_MODE_CLASS
1056 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in
)), inmode
, rclass
)
1058 && contains_allocatable_reg_of_mode
[rclass
][GET_MODE (SUBREG_REG (in
))]
1059 && (CONSTANT_P (SUBREG_REG (in
))
1060 || GET_CODE (SUBREG_REG (in
)) == PLUS
1062 || (((REG_P (SUBREG_REG (in
))
1063 && REGNO (SUBREG_REG (in
)) >= FIRST_PSEUDO_REGISTER
)
1064 || MEM_P (SUBREG_REG (in
)))
1065 && (paradoxical_subreg_p (inmode
, GET_MODE (SUBREG_REG (in
)))
1066 || (GET_MODE_SIZE (inmode
) <= UNITS_PER_WORD
1067 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
)))
1069 && paradoxical_subreg_p (inmode
,
1070 GET_MODE (SUBREG_REG (in
)))
1071 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in
)))
1072 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in
))) != UNKNOWN
)
1073 || (WORD_REGISTER_OPERATIONS
1074 && (GET_MODE_PRECISION (inmode
)
1075 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in
))))
1076 && ((GET_MODE_SIZE (inmode
) - 1) / UNITS_PER_WORD
==
1077 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
))) - 1)
1078 / UNITS_PER_WORD
)))))
1079 || (REG_P (SUBREG_REG (in
))
1080 && REGNO (SUBREG_REG (in
)) < FIRST_PSEUDO_REGISTER
1081 /* The case where out is nonzero
1082 is handled differently in the following statement. */
1083 && (out
== 0 || subreg_lowpart_p (in
))
1084 && ((GET_MODE_SIZE (inmode
) <= UNITS_PER_WORD
1085 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
)))
1087 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
)))
1089 != (int) hard_regno_nregs
[REGNO (SUBREG_REG (in
))]
1090 [GET_MODE (SUBREG_REG (in
))]))
1091 || ! HARD_REGNO_MODE_OK (subreg_regno (in
), inmode
)))
1092 || (secondary_reload_class (1, rclass
, inmode
, in
) != NO_REGS
1093 && (secondary_reload_class (1, rclass
, GET_MODE (SUBREG_REG (in
)),
1096 #ifdef CANNOT_CHANGE_MODE_CLASS
1097 || (REG_P (SUBREG_REG (in
))
1098 && REGNO (SUBREG_REG (in
)) < FIRST_PSEUDO_REGISTER
1099 && REG_CANNOT_CHANGE_MODE_P
1100 (REGNO (SUBREG_REG (in
)), GET_MODE (SUBREG_REG (in
)), inmode
))
1104 #ifdef LIMIT_RELOAD_CLASS
1105 in_subreg_loc
= inloc
;
1107 inloc
= &SUBREG_REG (in
);
1110 if (!WORD_REGISTER_OPERATIONS
1111 && LOAD_EXTEND_OP (GET_MODE (in
)) == UNKNOWN
1113 /* This is supposed to happen only for paradoxical subregs made by
1114 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1115 gcc_assert (GET_MODE_SIZE (GET_MODE (in
)) <= GET_MODE_SIZE (inmode
));
1117 inmode
= GET_MODE (in
);
1120 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1121 where M1 is not valid for R if it was not handled by the code above.
1123 Similar issue for (SUBREG constant ...) if it was not handled by the
1124 code above. This can happen if SUBREG_BYTE != 0.
1126 However, we must reload the inner reg *as well as* the subreg in
1129 if (in
!= 0 && reload_inner_reg_of_subreg (in
, inmode
, false))
1131 if (REG_P (SUBREG_REG (in
)))
1133 = find_valid_class (inmode
, GET_MODE (SUBREG_REG (in
)),
1134 subreg_regno_offset (REGNO (SUBREG_REG (in
)),
1135 GET_MODE (SUBREG_REG (in
)),
1138 REGNO (SUBREG_REG (in
)));
1139 else if (CONSTANT_P (SUBREG_REG (in
))
1140 || GET_CODE (SUBREG_REG (in
)) == PLUS
)
1141 subreg_in_class
= find_valid_class_1 (inmode
,
1142 GET_MODE (SUBREG_REG (in
)),
1145 /* This relies on the fact that emit_reload_insns outputs the
1146 instructions for input reloads of type RELOAD_OTHER in the same
1147 order as the reloads. Thus if the outer reload is also of type
1148 RELOAD_OTHER, we are guaranteed that this inner reload will be
1149 output before the outer reload. */
1150 push_reload (SUBREG_REG (in
), NULL_RTX
, &SUBREG_REG (in
), (rtx
*) 0,
1151 subreg_in_class
, VOIDmode
, VOIDmode
, 0, 0, opnum
, type
);
1152 dont_remove_subreg
= 1;
1155 /* Similarly for paradoxical and problematical SUBREGs on the output.
1156 Note that there is no reason we need worry about the previous value
1157 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1158 entitled to clobber it all (except in the case of a word mode subreg
1159 or of a STRICT_LOW_PART, in that latter case the constraint should
1160 label it input-output.) */
1161 if (out
!= 0 && GET_CODE (out
) == SUBREG
1162 && (subreg_lowpart_p (out
) || strict_low
)
1163 #ifdef CANNOT_CHANGE_MODE_CLASS
1164 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out
)), outmode
, rclass
)
1166 && contains_allocatable_reg_of_mode
[rclass
][GET_MODE (SUBREG_REG (out
))]
1167 && (CONSTANT_P (SUBREG_REG (out
))
1169 || (((REG_P (SUBREG_REG (out
))
1170 && REGNO (SUBREG_REG (out
)) >= FIRST_PSEUDO_REGISTER
)
1171 || MEM_P (SUBREG_REG (out
)))
1172 && (paradoxical_subreg_p (outmode
, GET_MODE (SUBREG_REG (out
)))
1173 || (WORD_REGISTER_OPERATIONS
1174 && (GET_MODE_PRECISION (outmode
)
1175 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out
))))
1176 && ((GET_MODE_SIZE (outmode
) - 1) / UNITS_PER_WORD
==
1177 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out
))) - 1)
1178 / UNITS_PER_WORD
)))))
1179 || (REG_P (SUBREG_REG (out
))
1180 && REGNO (SUBREG_REG (out
)) < FIRST_PSEUDO_REGISTER
1181 /* The case of a word mode subreg
1182 is handled differently in the following statement. */
1183 && ! (GET_MODE_SIZE (outmode
) <= UNITS_PER_WORD
1184 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out
)))
1186 && ! HARD_REGNO_MODE_OK (subreg_regno (out
), outmode
))
1187 || (secondary_reload_class (0, rclass
, outmode
, out
) != NO_REGS
1188 && (secondary_reload_class (0, rclass
, GET_MODE (SUBREG_REG (out
)),
1191 #ifdef CANNOT_CHANGE_MODE_CLASS
1192 || (REG_P (SUBREG_REG (out
))
1193 && REGNO (SUBREG_REG (out
)) < FIRST_PSEUDO_REGISTER
1194 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out
)),
1195 GET_MODE (SUBREG_REG (out
)),
1200 #ifdef LIMIT_RELOAD_CLASS
1201 out_subreg_loc
= outloc
;
1203 outloc
= &SUBREG_REG (out
);
1205 gcc_assert (WORD_REGISTER_OPERATIONS
|| !MEM_P (out
)
1206 || GET_MODE_SIZE (GET_MODE (out
))
1207 <= GET_MODE_SIZE (outmode
));
1208 outmode
= GET_MODE (out
);
1211 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1212 where either M1 is not valid for R or M2 is wider than a word but we
1213 only need one register to store an M2-sized quantity in R.
1215 However, we must reload the inner reg *as well as* the subreg in
1216 that case and the inner reg is an in-out reload. */
1218 if (out
!= 0 && reload_inner_reg_of_subreg (out
, outmode
, true))
1220 enum reg_class in_out_class
1221 = find_valid_class (outmode
, GET_MODE (SUBREG_REG (out
)),
1222 subreg_regno_offset (REGNO (SUBREG_REG (out
)),
1223 GET_MODE (SUBREG_REG (out
)),
1226 REGNO (SUBREG_REG (out
)));
1228 /* This relies on the fact that emit_reload_insns outputs the
1229 instructions for output reloads of type RELOAD_OTHER in reverse
1230 order of the reloads. Thus if the outer reload is also of type
1231 RELOAD_OTHER, we are guaranteed that this inner reload will be
1232 output after the outer reload. */
1233 push_reload (SUBREG_REG (out
), SUBREG_REG (out
), &SUBREG_REG (out
),
1234 &SUBREG_REG (out
), in_out_class
, VOIDmode
, VOIDmode
,
1235 0, 0, opnum
, RELOAD_OTHER
);
1236 dont_remove_subreg
= 1;
1239 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1240 if (in
!= 0 && out
!= 0 && MEM_P (out
)
1241 && (REG_P (in
) || MEM_P (in
) || GET_CODE (in
) == PLUS
)
1242 && reg_overlap_mentioned_for_reload_p (in
, XEXP (out
, 0)))
1245 /* If IN is a SUBREG of a hard register, make a new REG. This
1246 simplifies some of the cases below. */
1248 if (in
!= 0 && GET_CODE (in
) == SUBREG
&& REG_P (SUBREG_REG (in
))
1249 && REGNO (SUBREG_REG (in
)) < FIRST_PSEUDO_REGISTER
1250 && ! dont_remove_subreg
)
1251 in
= gen_rtx_REG (GET_MODE (in
), subreg_regno (in
));
1253 /* Similarly for OUT. */
1254 if (out
!= 0 && GET_CODE (out
) == SUBREG
1255 && REG_P (SUBREG_REG (out
))
1256 && REGNO (SUBREG_REG (out
)) < FIRST_PSEUDO_REGISTER
1257 && ! dont_remove_subreg
)
1258 out
= gen_rtx_REG (GET_MODE (out
), subreg_regno (out
));
1260 /* Narrow down the class of register wanted if that is
1261 desirable on this machine for efficiency. */
1263 reg_class_t preferred_class
= rclass
;
1266 preferred_class
= targetm
.preferred_reload_class (in
, rclass
);
1268 /* Output reloads may need analogous treatment, different in detail. */
1271 = targetm
.preferred_output_reload_class (out
, preferred_class
);
1273 /* Discard what the target said if we cannot do it. */
1274 if (preferred_class
!= NO_REGS
1275 || (optional
&& type
== RELOAD_FOR_OUTPUT
))
1276 rclass
= (enum reg_class
) preferred_class
;
1279 /* Make sure we use a class that can handle the actual pseudo
1280 inside any subreg. For example, on the 386, QImode regs
1281 can appear within SImode subregs. Although GENERAL_REGS
1282 can handle SImode, QImode needs a smaller class. */
1283 #ifdef LIMIT_RELOAD_CLASS
1285 rclass
= LIMIT_RELOAD_CLASS (inmode
, rclass
);
1286 else if (in
!= 0 && GET_CODE (in
) == SUBREG
)
1287 rclass
= LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in
)), rclass
);
1290 rclass
= LIMIT_RELOAD_CLASS (outmode
, rclass
);
1291 if (out
!= 0 && GET_CODE (out
) == SUBREG
)
1292 rclass
= LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out
)), rclass
);
1295 /* Verify that this class is at least possible for the mode that
1297 if (this_insn_is_asm
)
1300 if (paradoxical_subreg_p (inmode
, outmode
))
1304 if (mode
== VOIDmode
)
1306 error_for_asm (this_insn
, "cannot reload integer constant "
1307 "operand in %<asm%>");
1312 outmode
= word_mode
;
1314 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1315 if (HARD_REGNO_MODE_OK (i
, mode
)
1316 && in_hard_reg_set_p (reg_class_contents
[(int) rclass
], mode
, i
))
1318 if (i
== FIRST_PSEUDO_REGISTER
)
1320 error_for_asm (this_insn
, "impossible register constraint "
1322 /* Avoid further trouble with this insn. */
1323 PATTERN (this_insn
) = gen_rtx_USE (VOIDmode
, const0_rtx
);
1324 /* We used to continue here setting class to ALL_REGS, but it triggers
1325 sanity check on i386 for:
1326 void foo(long double d)
1330 Returning zero here ought to be safe as we take care in
1331 find_reloads to not process the reloads when instruction was
1338 /* Optional output reloads are always OK even if we have no register class,
1339 since the function of these reloads is only to have spill_reg_store etc.
1340 set, so that the storing insn can be deleted later. */
1341 gcc_assert (rclass
!= NO_REGS
1342 || (optional
!= 0 && type
== RELOAD_FOR_OUTPUT
));
1344 i
= find_reusable_reload (&in
, out
, rclass
, type
, opnum
, dont_share
);
1348 /* See if we need a secondary reload register to move between CLASS
1349 and IN or CLASS and OUT. Get the icode and push any required reloads
1350 needed for each of them if so. */
1354 = push_secondary_reload (1, in
, opnum
, optional
, rclass
, inmode
, type
,
1355 &secondary_in_icode
, NULL
);
1356 if (out
!= 0 && GET_CODE (out
) != SCRATCH
)
1357 secondary_out_reload
1358 = push_secondary_reload (0, out
, opnum
, optional
, rclass
, outmode
,
1359 type
, &secondary_out_icode
, NULL
);
1361 /* We found no existing reload suitable for re-use.
1362 So add an additional reload. */
1364 #ifdef SECONDARY_MEMORY_NEEDED
1365 if (subreg_in_class
== NO_REGS
1368 || (GET_CODE (in
) == SUBREG
&& REG_P (SUBREG_REG (in
))))
1369 && reg_or_subregno (in
) < FIRST_PSEUDO_REGISTER
)
1370 subreg_in_class
= REGNO_REG_CLASS (reg_or_subregno (in
));
1371 /* If a memory location is needed for the copy, make one. */
1372 if (subreg_in_class
!= NO_REGS
1373 && SECONDARY_MEMORY_NEEDED (subreg_in_class
, rclass
, inmode
))
1374 get_secondary_mem (in
, inmode
, opnum
, type
);
1380 rld
[i
].rclass
= rclass
;
1381 rld
[i
].inmode
= inmode
;
1382 rld
[i
].outmode
= outmode
;
1384 rld
[i
].optional
= optional
;
1386 rld
[i
].nocombine
= 0;
1387 rld
[i
].in_reg
= inloc
? *inloc
: 0;
1388 rld
[i
].out_reg
= outloc
? *outloc
: 0;
1389 rld
[i
].opnum
= opnum
;
1390 rld
[i
].when_needed
= type
;
1391 rld
[i
].secondary_in_reload
= secondary_in_reload
;
1392 rld
[i
].secondary_out_reload
= secondary_out_reload
;
1393 rld
[i
].secondary_in_icode
= secondary_in_icode
;
1394 rld
[i
].secondary_out_icode
= secondary_out_icode
;
1395 rld
[i
].secondary_p
= 0;
1399 #ifdef SECONDARY_MEMORY_NEEDED
1402 || (GET_CODE (out
) == SUBREG
&& REG_P (SUBREG_REG (out
))))
1403 && reg_or_subregno (out
) < FIRST_PSEUDO_REGISTER
1404 && SECONDARY_MEMORY_NEEDED (rclass
,
1405 REGNO_REG_CLASS (reg_or_subregno (out
)),
1407 get_secondary_mem (out
, outmode
, opnum
, type
);
1412 /* We are reusing an existing reload,
1413 but we may have additional information for it.
1414 For example, we may now have both IN and OUT
1415 while the old one may have just one of them. */
1417 /* The modes can be different. If they are, we want to reload in
1418 the larger mode, so that the value is valid for both modes. */
1419 if (inmode
!= VOIDmode
1420 && GET_MODE_SIZE (inmode
) > GET_MODE_SIZE (rld
[i
].inmode
))
1421 rld
[i
].inmode
= inmode
;
1422 if (outmode
!= VOIDmode
1423 && GET_MODE_SIZE (outmode
) > GET_MODE_SIZE (rld
[i
].outmode
))
1424 rld
[i
].outmode
= outmode
;
1427 rtx in_reg
= inloc
? *inloc
: 0;
1428 /* If we merge reloads for two distinct rtl expressions that
1429 are identical in content, there might be duplicate address
1430 reloads. Remove the extra set now, so that if we later find
1431 that we can inherit this reload, we can get rid of the
1432 address reloads altogether.
1434 Do not do this if both reloads are optional since the result
1435 would be an optional reload which could potentially leave
1436 unresolved address replacements.
1438 It is not sufficient to call transfer_replacements since
1439 choose_reload_regs will remove the replacements for address
1440 reloads of inherited reloads which results in the same
1442 if (rld
[i
].in
!= in
&& rtx_equal_p (in
, rld
[i
].in
)
1443 && ! (rld
[i
].optional
&& optional
))
1445 /* We must keep the address reload with the lower operand
1447 if (opnum
> rld
[i
].opnum
)
1449 remove_address_replacements (in
);
1451 in_reg
= rld
[i
].in_reg
;
1454 remove_address_replacements (rld
[i
].in
);
1456 /* When emitting reloads we don't necessarily look at the in-
1457 and outmode, but also directly at the operands (in and out).
1458 So we can't simply overwrite them with whatever we have found
1459 for this (to-be-merged) reload, we have to "merge" that too.
1460 Reusing another reload already verified that we deal with the
1461 same operands, just possibly in different modes. So we
1462 overwrite the operands only when the new mode is larger.
1463 See also PR33613. */
1465 || GET_MODE_SIZE (GET_MODE (in
))
1466 > GET_MODE_SIZE (GET_MODE (rld
[i
].in
)))
1470 && GET_MODE_SIZE (GET_MODE (in_reg
))
1471 > GET_MODE_SIZE (GET_MODE (rld
[i
].in_reg
))))
1472 rld
[i
].in_reg
= in_reg
;
1478 && GET_MODE_SIZE (GET_MODE (out
))
1479 > GET_MODE_SIZE (GET_MODE (rld
[i
].out
))))
1483 || GET_MODE_SIZE (GET_MODE (*outloc
))
1484 > GET_MODE_SIZE (GET_MODE (rld
[i
].out_reg
))))
1485 rld
[i
].out_reg
= *outloc
;
1487 if (reg_class_subset_p (rclass
, rld
[i
].rclass
))
1488 rld
[i
].rclass
= rclass
;
1489 rld
[i
].optional
&= optional
;
1490 if (MERGE_TO_OTHER (type
, rld
[i
].when_needed
,
1491 opnum
, rld
[i
].opnum
))
1492 rld
[i
].when_needed
= RELOAD_OTHER
;
1493 rld
[i
].opnum
= MIN (rld
[i
].opnum
, opnum
);
1496 /* If the ostensible rtx being reloaded differs from the rtx found
1497 in the location to substitute, this reload is not safe to combine
1498 because we cannot reliably tell whether it appears in the insn. */
1500 if (in
!= 0 && in
!= *inloc
)
1501 rld
[i
].nocombine
= 1;
1504 /* This was replaced by changes in find_reloads_address_1 and the new
1505 function inc_for_reload, which go with a new meaning of reload_inc. */
1507 /* If this is an IN/OUT reload in an insn that sets the CC,
1508 it must be for an autoincrement. It doesn't work to store
1509 the incremented value after the insn because that would clobber the CC.
1510 So we must do the increment of the value reloaded from,
1511 increment it, store it back, then decrement again. */
1512 if (out
!= 0 && sets_cc0_p (PATTERN (this_insn
)))
1516 rld
[i
].inc
= find_inc_amount (PATTERN (this_insn
), in
);
1517 /* If we did not find a nonzero amount-to-increment-by,
1518 that contradicts the belief that IN is being incremented
1519 in an address in this insn. */
1520 gcc_assert (rld
[i
].inc
!= 0);
1524 /* If we will replace IN and OUT with the reload-reg,
1525 record where they are located so that substitution need
1526 not do a tree walk. */
1528 if (replace_reloads
)
1532 struct replacement
*r
= &replacements
[n_replacements
++];
1537 if (outloc
!= 0 && outloc
!= inloc
)
1539 struct replacement
*r
= &replacements
[n_replacements
++];
1546 /* If this reload is just being introduced and it has both
1547 an incoming quantity and an outgoing quantity that are
1548 supposed to be made to match, see if either one of the two
1549 can serve as the place to reload into.
1551 If one of them is acceptable, set rld[i].reg_rtx
1554 if (in
!= 0 && out
!= 0 && in
!= out
&& rld
[i
].reg_rtx
== 0)
1556 rld
[i
].reg_rtx
= find_dummy_reload (in
, out
, inloc
, outloc
,
1559 earlyclobber_operand_p (out
));
1561 /* If the outgoing register already contains the same value
1562 as the incoming one, we can dispense with loading it.
1563 The easiest way to tell the caller that is to give a phony
1564 value for the incoming operand (same as outgoing one). */
1565 if (rld
[i
].reg_rtx
== out
1566 && (REG_P (in
) || CONSTANT_P (in
))
1567 && 0 != find_equiv_reg (in
, this_insn
, NO_REGS
, REGNO (out
),
1568 static_reload_reg_p
, i
, inmode
))
1572 /* If this is an input reload and the operand contains a register that
1573 dies in this insn and is used nowhere else, see if it is the right class
1574 to be used for this reload. Use it if so. (This occurs most commonly
1575 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1576 this if it is also an output reload that mentions the register unless
1577 the output is a SUBREG that clobbers an entire register.
1579 Note that the operand might be one of the spill regs, if it is a
1580 pseudo reg and we are in a block where spilling has not taken place.
1581 But if there is no spilling in this block, that is OK.
1582 An explicitly used hard reg cannot be a spill reg. */
1584 if (rld
[i
].reg_rtx
== 0 && in
!= 0 && hard_regs_live_known
)
1588 machine_mode rel_mode
= inmode
;
1590 if (out
&& GET_MODE_SIZE (outmode
) > GET_MODE_SIZE (inmode
))
1593 for (note
= REG_NOTES (this_insn
); note
; note
= XEXP (note
, 1))
1594 if (REG_NOTE_KIND (note
) == REG_DEAD
1595 && REG_P (XEXP (note
, 0))
1596 && (regno
= REGNO (XEXP (note
, 0))) < FIRST_PSEUDO_REGISTER
1597 && reg_mentioned_p (XEXP (note
, 0), in
)
1598 /* Check that a former pseudo is valid; see find_dummy_reload. */
1599 && (ORIGINAL_REGNO (XEXP (note
, 0)) < FIRST_PSEUDO_REGISTER
1600 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
1601 ORIGINAL_REGNO (XEXP (note
, 0)))
1602 && hard_regno_nregs
[regno
][GET_MODE (XEXP (note
, 0))] == 1))
1603 && ! refers_to_regno_for_reload_p (regno
,
1604 end_hard_regno (rel_mode
,
1606 PATTERN (this_insn
), inloc
)
1607 && ! find_reg_fusage (this_insn
, USE
, XEXP (note
, 0))
1608 /* If this is also an output reload, IN cannot be used as
1609 the reload register if it is set in this insn unless IN
1611 && (out
== 0 || in
== out
1612 || ! hard_reg_set_here_p (regno
,
1613 end_hard_regno (rel_mode
, regno
),
1614 PATTERN (this_insn
)))
1615 /* ??? Why is this code so different from the previous?
1616 Is there any simple coherent way to describe the two together?
1617 What's going on here. */
1619 || (GET_CODE (in
) == SUBREG
1620 && (((GET_MODE_SIZE (GET_MODE (in
)) + (UNITS_PER_WORD
- 1))
1622 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
)))
1623 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))))
1624 /* Make sure the operand fits in the reg that dies. */
1625 && (GET_MODE_SIZE (rel_mode
)
1626 <= GET_MODE_SIZE (GET_MODE (XEXP (note
, 0))))
1627 && HARD_REGNO_MODE_OK (regno
, inmode
)
1628 && HARD_REGNO_MODE_OK (regno
, outmode
))
1631 unsigned int nregs
= MAX (hard_regno_nregs
[regno
][inmode
],
1632 hard_regno_nregs
[regno
][outmode
]);
1634 for (offs
= 0; offs
< nregs
; offs
++)
1635 if (fixed_regs
[regno
+ offs
]
1636 || ! TEST_HARD_REG_BIT (reg_class_contents
[(int) rclass
],
1641 && (! (refers_to_regno_for_reload_p
1642 (regno
, end_hard_regno (inmode
, regno
), in
, (rtx
*) 0))
1643 || can_reload_into (in
, regno
, inmode
)))
1645 rld
[i
].reg_rtx
= gen_rtx_REG (rel_mode
, regno
);
1652 output_reloadnum
= i
;
1657 /* Record an additional place we must replace a value
1658 for which we have already recorded a reload.
1659 RELOADNUM is the value returned by push_reload
1660 when the reload was recorded.
1661 This is used in insn patterns that use match_dup. */
1664 push_replacement (rtx
*loc
, int reloadnum
, machine_mode mode
)
1666 if (replace_reloads
)
1668 struct replacement
*r
= &replacements
[n_replacements
++];
1669 r
->what
= reloadnum
;
1675 /* Duplicate any replacement we have recorded to apply at
1676 location ORIG_LOC to also be performed at DUP_LOC.
1677 This is used in insn patterns that use match_dup. */
1680 dup_replacements (rtx
*dup_loc
, rtx
*orig_loc
)
1682 int i
, n
= n_replacements
;
1684 for (i
= 0; i
< n
; i
++)
1686 struct replacement
*r
= &replacements
[i
];
1687 if (r
->where
== orig_loc
)
1688 push_replacement (dup_loc
, r
->what
, r
->mode
);
1692 /* Transfer all replacements that used to be in reload FROM to be in
1696 transfer_replacements (int to
, int from
)
1700 for (i
= 0; i
< n_replacements
; i
++)
1701 if (replacements
[i
].what
== from
)
1702 replacements
[i
].what
= to
;
1705 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1706 or a subpart of it. If we have any replacements registered for IN_RTX,
1707 cancel the reloads that were supposed to load them.
1708 Return nonzero if we canceled any reloads. */
1710 remove_address_replacements (rtx in_rtx
)
1713 char reload_flags
[MAX_RELOADS
];
1714 int something_changed
= 0;
1716 memset (reload_flags
, 0, sizeof reload_flags
);
1717 for (i
= 0, j
= 0; i
< n_replacements
; i
++)
1719 if (loc_mentioned_in_p (replacements
[i
].where
, in_rtx
))
1720 reload_flags
[replacements
[i
].what
] |= 1;
1723 replacements
[j
++] = replacements
[i
];
1724 reload_flags
[replacements
[i
].what
] |= 2;
1727 /* Note that the following store must be done before the recursive calls. */
1730 for (i
= n_reloads
- 1; i
>= 0; i
--)
1732 if (reload_flags
[i
] == 1)
1734 deallocate_reload_reg (i
);
1735 remove_address_replacements (rld
[i
].in
);
1737 something_changed
= 1;
1740 return something_changed
;
1743 /* If there is only one output reload, and it is not for an earlyclobber
1744 operand, try to combine it with a (logically unrelated) input reload
1745 to reduce the number of reload registers needed.
1747 This is safe if the input reload does not appear in
1748 the value being output-reloaded, because this implies
1749 it is not needed any more once the original insn completes.
1751 If that doesn't work, see we can use any of the registers that
1752 die in this insn as a reload register. We can if it is of the right
1753 class and does not appear in the value being output-reloaded. */
1756 combine_reloads (void)
1759 int output_reload
= -1;
1760 int secondary_out
= -1;
1763 /* Find the output reload; return unless there is exactly one
1764 and that one is mandatory. */
1766 for (i
= 0; i
< n_reloads
; i
++)
1767 if (rld
[i
].out
!= 0)
1769 if (output_reload
>= 0)
1774 if (output_reload
< 0 || rld
[output_reload
].optional
)
1777 /* An input-output reload isn't combinable. */
1779 if (rld
[output_reload
].in
!= 0)
1782 /* If this reload is for an earlyclobber operand, we can't do anything. */
1783 if (earlyclobber_operand_p (rld
[output_reload
].out
))
1786 /* If there is a reload for part of the address of this operand, we would
1787 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1788 its life to the point where doing this combine would not lower the
1789 number of spill registers needed. */
1790 for (i
= 0; i
< n_reloads
; i
++)
1791 if ((rld
[i
].when_needed
== RELOAD_FOR_OUTPUT_ADDRESS
1792 || rld
[i
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
)
1793 && rld
[i
].opnum
== rld
[output_reload
].opnum
)
1796 /* Check each input reload; can we combine it? */
1798 for (i
= 0; i
< n_reloads
; i
++)
1799 if (rld
[i
].in
&& ! rld
[i
].optional
&& ! rld
[i
].nocombine
1800 /* Life span of this reload must not extend past main insn. */
1801 && rld
[i
].when_needed
!= RELOAD_FOR_OUTPUT_ADDRESS
1802 && rld
[i
].when_needed
!= RELOAD_FOR_OUTADDR_ADDRESS
1803 && rld
[i
].when_needed
!= RELOAD_OTHER
1804 && (ira_reg_class_max_nregs
[(int)rld
[i
].rclass
][(int) rld
[i
].inmode
]
1805 == ira_reg_class_max_nregs
[(int) rld
[output_reload
].rclass
]
1806 [(int) rld
[output_reload
].outmode
])
1808 && rld
[i
].reg_rtx
== 0
1809 #ifdef SECONDARY_MEMORY_NEEDED
1810 /* Don't combine two reloads with different secondary
1811 memory locations. */
1812 && (secondary_memlocs_elim
[(int) rld
[output_reload
].outmode
][rld
[i
].opnum
] == 0
1813 || secondary_memlocs_elim
[(int) rld
[output_reload
].outmode
][rld
[output_reload
].opnum
] == 0
1814 || rtx_equal_p (secondary_memlocs_elim
[(int) rld
[output_reload
].outmode
][rld
[i
].opnum
],
1815 secondary_memlocs_elim
[(int) rld
[output_reload
].outmode
][rld
[output_reload
].opnum
]))
1817 && (targetm
.small_register_classes_for_mode_p (VOIDmode
)
1818 ? (rld
[i
].rclass
== rld
[output_reload
].rclass
)
1819 : (reg_class_subset_p (rld
[i
].rclass
,
1820 rld
[output_reload
].rclass
)
1821 || reg_class_subset_p (rld
[output_reload
].rclass
,
1823 && (MATCHES (rld
[i
].in
, rld
[output_reload
].out
)
1824 /* Args reversed because the first arg seems to be
1825 the one that we imagine being modified
1826 while the second is the one that might be affected. */
1827 || (! reg_overlap_mentioned_for_reload_p (rld
[output_reload
].out
,
1829 /* However, if the input is a register that appears inside
1830 the output, then we also can't share.
1831 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1832 If the same reload reg is used for both reg 69 and the
1833 result to be stored in memory, then that result
1834 will clobber the address of the memory ref. */
1835 && ! (REG_P (rld
[i
].in
)
1836 && reg_overlap_mentioned_for_reload_p (rld
[i
].in
,
1837 rld
[output_reload
].out
))))
1838 && ! reload_inner_reg_of_subreg (rld
[i
].in
, rld
[i
].inmode
,
1839 rld
[i
].when_needed
!= RELOAD_FOR_INPUT
)
1840 && (reg_class_size
[(int) rld
[i
].rclass
]
1841 || targetm
.small_register_classes_for_mode_p (VOIDmode
))
1842 /* We will allow making things slightly worse by combining an
1843 input and an output, but no worse than that. */
1844 && (rld
[i
].when_needed
== RELOAD_FOR_INPUT
1845 || rld
[i
].when_needed
== RELOAD_FOR_OUTPUT
))
1849 /* We have found a reload to combine with! */
1850 rld
[i
].out
= rld
[output_reload
].out
;
1851 rld
[i
].out_reg
= rld
[output_reload
].out_reg
;
1852 rld
[i
].outmode
= rld
[output_reload
].outmode
;
1853 /* Mark the old output reload as inoperative. */
1854 rld
[output_reload
].out
= 0;
1855 /* The combined reload is needed for the entire insn. */
1856 rld
[i
].when_needed
= RELOAD_OTHER
;
1857 /* If the output reload had a secondary reload, copy it. */
1858 if (rld
[output_reload
].secondary_out_reload
!= -1)
1860 rld
[i
].secondary_out_reload
1861 = rld
[output_reload
].secondary_out_reload
;
1862 rld
[i
].secondary_out_icode
1863 = rld
[output_reload
].secondary_out_icode
;
1866 #ifdef SECONDARY_MEMORY_NEEDED
1867 /* Copy any secondary MEM. */
1868 if (secondary_memlocs_elim
[(int) rld
[output_reload
].outmode
][rld
[output_reload
].opnum
] != 0)
1869 secondary_memlocs_elim
[(int) rld
[output_reload
].outmode
][rld
[i
].opnum
]
1870 = secondary_memlocs_elim
[(int) rld
[output_reload
].outmode
][rld
[output_reload
].opnum
];
1872 /* If required, minimize the register class. */
1873 if (reg_class_subset_p (rld
[output_reload
].rclass
,
1875 rld
[i
].rclass
= rld
[output_reload
].rclass
;
1877 /* Transfer all replacements from the old reload to the combined. */
1878 for (j
= 0; j
< n_replacements
; j
++)
1879 if (replacements
[j
].what
== output_reload
)
1880 replacements
[j
].what
= i
;
1885 /* If this insn has only one operand that is modified or written (assumed
1886 to be the first), it must be the one corresponding to this reload. It
1887 is safe to use anything that dies in this insn for that output provided
1888 that it does not occur in the output (we already know it isn't an
1889 earlyclobber. If this is an asm insn, give up. */
1891 if (INSN_CODE (this_insn
) == -1)
1894 for (i
= 1; i
< insn_data
[INSN_CODE (this_insn
)].n_operands
; i
++)
1895 if (insn_data
[INSN_CODE (this_insn
)].operand
[i
].constraint
[0] == '='
1896 || insn_data
[INSN_CODE (this_insn
)].operand
[i
].constraint
[0] == '+')
1899 /* See if some hard register that dies in this insn and is not used in
1900 the output is the right class. Only works if the register we pick
1901 up can fully hold our output reload. */
1902 for (note
= REG_NOTES (this_insn
); note
; note
= XEXP (note
, 1))
1903 if (REG_NOTE_KIND (note
) == REG_DEAD
1904 && REG_P (XEXP (note
, 0))
1905 && !reg_overlap_mentioned_for_reload_p (XEXP (note
, 0),
1906 rld
[output_reload
].out
)
1907 && (regno
= REGNO (XEXP (note
, 0))) < FIRST_PSEUDO_REGISTER
1908 && HARD_REGNO_MODE_OK (regno
, rld
[output_reload
].outmode
)
1909 && TEST_HARD_REG_BIT (reg_class_contents
[(int) rld
[output_reload
].rclass
],
1911 && (hard_regno_nregs
[regno
][rld
[output_reload
].outmode
]
1912 <= hard_regno_nregs
[regno
][GET_MODE (XEXP (note
, 0))])
1913 /* Ensure that a secondary or tertiary reload for this output
1914 won't want this register. */
1915 && ((secondary_out
= rld
[output_reload
].secondary_out_reload
) == -1
1916 || (!(TEST_HARD_REG_BIT
1917 (reg_class_contents
[(int) rld
[secondary_out
].rclass
], regno
))
1918 && ((secondary_out
= rld
[secondary_out
].secondary_out_reload
) == -1
1919 || !(TEST_HARD_REG_BIT
1920 (reg_class_contents
[(int) rld
[secondary_out
].rclass
],
1922 && !fixed_regs
[regno
]
1923 /* Check that a former pseudo is valid; see find_dummy_reload. */
1924 && (ORIGINAL_REGNO (XEXP (note
, 0)) < FIRST_PSEUDO_REGISTER
1925 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
1926 ORIGINAL_REGNO (XEXP (note
, 0)))
1927 && hard_regno_nregs
[regno
][GET_MODE (XEXP (note
, 0))] == 1)))
1929 rld
[output_reload
].reg_rtx
1930 = gen_rtx_REG (rld
[output_reload
].outmode
, regno
);
1935 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1936 See if one of IN and OUT is a register that may be used;
1937 this is desirable since a spill-register won't be needed.
1938 If so, return the register rtx that proves acceptable.
1940 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1941 RCLASS is the register class required for the reload.
1943 If FOR_REAL is >= 0, it is the number of the reload,
1944 and in some cases when it can be discovered that OUT doesn't need
1945 to be computed, clear out rld[FOR_REAL].out.
1947 If FOR_REAL is -1, this should not be done, because this call
1948 is just to see if a register can be found, not to find and install it.
1950 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1951 puts an additional constraint on being able to use IN for OUT since
1952 IN must not appear elsewhere in the insn (it is assumed that IN itself
1953 is safe from the earlyclobber). */
1956 find_dummy_reload (rtx real_in
, rtx real_out
, rtx
*inloc
, rtx
*outloc
,
1957 machine_mode inmode
, machine_mode outmode
,
1958 reg_class_t rclass
, int for_real
, int earlyclobber
)
1966 /* If operands exceed a word, we can't use either of them
1967 unless they have the same size. */
1968 if (GET_MODE_SIZE (outmode
) != GET_MODE_SIZE (inmode
)
1969 && (GET_MODE_SIZE (outmode
) > UNITS_PER_WORD
1970 || GET_MODE_SIZE (inmode
) > UNITS_PER_WORD
))
1973 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1974 respectively refers to a hard register. */
1976 /* Find the inside of any subregs. */
1977 while (GET_CODE (out
) == SUBREG
)
1979 if (REG_P (SUBREG_REG (out
))
1980 && REGNO (SUBREG_REG (out
)) < FIRST_PSEUDO_REGISTER
)
1981 out_offset
+= subreg_regno_offset (REGNO (SUBREG_REG (out
)),
1982 GET_MODE (SUBREG_REG (out
)),
1985 out
= SUBREG_REG (out
);
1987 while (GET_CODE (in
) == SUBREG
)
1989 if (REG_P (SUBREG_REG (in
))
1990 && REGNO (SUBREG_REG (in
)) < FIRST_PSEUDO_REGISTER
)
1991 in_offset
+= subreg_regno_offset (REGNO (SUBREG_REG (in
)),
1992 GET_MODE (SUBREG_REG (in
)),
1995 in
= SUBREG_REG (in
);
1998 /* Narrow down the reg class, the same way push_reload will;
1999 otherwise we might find a dummy now, but push_reload won't. */
2001 reg_class_t preferred_class
= targetm
.preferred_reload_class (in
, rclass
);
2002 if (preferred_class
!= NO_REGS
)
2003 rclass
= (enum reg_class
) preferred_class
;
2006 /* See if OUT will do. */
2008 && REGNO (out
) < FIRST_PSEUDO_REGISTER
)
2010 unsigned int regno
= REGNO (out
) + out_offset
;
2011 unsigned int nwords
= hard_regno_nregs
[regno
][outmode
];
2014 /* When we consider whether the insn uses OUT,
2015 ignore references within IN. They don't prevent us
2016 from copying IN into OUT, because those refs would
2017 move into the insn that reloads IN.
2019 However, we only ignore IN in its role as this reload.
2020 If the insn uses IN elsewhere and it contains OUT,
2021 that counts. We can't be sure it's the "same" operand
2022 so it might not go through this reload.
2024 We also need to avoid using OUT if it, or part of it, is a
2025 fixed register. Modifying such registers, even transiently,
2026 may have undefined effects on the machine, such as modifying
2027 the stack pointer. */
2029 *inloc
= const0_rtx
;
2031 if (regno
< FIRST_PSEUDO_REGISTER
2032 && HARD_REGNO_MODE_OK (regno
, outmode
)
2033 && ! refers_to_regno_for_reload_p (regno
, regno
+ nwords
,
2034 PATTERN (this_insn
), outloc
))
2038 for (i
= 0; i
< nwords
; i
++)
2039 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) rclass
],
2041 || fixed_regs
[regno
+ i
])
2046 if (REG_P (real_out
))
2049 value
= gen_rtx_REG (outmode
, regno
);
2056 /* Consider using IN if OUT was not acceptable
2057 or if OUT dies in this insn (like the quotient in a divmod insn).
2058 We can't use IN unless it is dies in this insn,
2059 which means we must know accurately which hard regs are live.
2060 Also, the result can't go in IN if IN is used within OUT,
2061 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2062 if (hard_regs_live_known
2064 && REGNO (in
) < FIRST_PSEUDO_REGISTER
2066 || find_reg_note (this_insn
, REG_UNUSED
, real_out
))
2067 && find_reg_note (this_insn
, REG_DEAD
, real_in
)
2068 && !fixed_regs
[REGNO (in
)]
2069 && HARD_REGNO_MODE_OK (REGNO (in
),
2070 /* The only case where out and real_out might
2071 have different modes is where real_out
2072 is a subreg, and in that case, out
2074 (GET_MODE (out
) != VOIDmode
2075 ? GET_MODE (out
) : outmode
))
2076 && (ORIGINAL_REGNO (in
) < FIRST_PSEUDO_REGISTER
2077 /* However only do this if we can be sure that this input
2078 operand doesn't correspond with an uninitialized pseudo.
2079 global can assign some hardreg to it that is the same as
2080 the one assigned to a different, also live pseudo (as it
2081 can ignore the conflict). We must never introduce writes
2082 to such hardregs, as they would clobber the other live
2083 pseudo. See PR 20973. */
2084 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
2085 ORIGINAL_REGNO (in
))
2086 /* Similarly, only do this if we can be sure that the death
2087 note is still valid. global can assign some hardreg to
2088 the pseudo referenced in the note and simultaneously a
2089 subword of this hardreg to a different, also live pseudo,
2090 because only another subword of the hardreg is actually
2091 used in the insn. This cannot happen if the pseudo has
2092 been assigned exactly one hardreg. See PR 33732. */
2093 && hard_regno_nregs
[REGNO (in
)][GET_MODE (in
)] == 1)))
2095 unsigned int regno
= REGNO (in
) + in_offset
;
2096 unsigned int nwords
= hard_regno_nregs
[regno
][inmode
];
2098 if (! refers_to_regno_for_reload_p (regno
, regno
+ nwords
, out
, (rtx
*) 0)
2099 && ! hard_reg_set_here_p (regno
, regno
+ nwords
,
2100 PATTERN (this_insn
))
2102 || ! refers_to_regno_for_reload_p (regno
, regno
+ nwords
,
2103 PATTERN (this_insn
), inloc
)))
2107 for (i
= 0; i
< nwords
; i
++)
2108 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) rclass
],
2114 /* If we were going to use OUT as the reload reg
2115 and changed our mind, it means OUT is a dummy that
2116 dies here. So don't bother copying value to it. */
2117 if (for_real
>= 0 && value
== real_out
)
2118 rld
[for_real
].out
= 0;
2119 if (REG_P (real_in
))
2122 value
= gen_rtx_REG (inmode
, regno
);
2130 /* This page contains subroutines used mainly for determining
2131 whether the IN or an OUT of a reload can serve as the
2134 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2137 earlyclobber_operand_p (rtx x
)
2141 for (i
= 0; i
< n_earlyclobbers
; i
++)
2142 if (reload_earlyclobbers
[i
] == x
)
2148 /* Return 1 if expression X alters a hard reg in the range
2149 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2150 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2151 X should be the body of an instruction. */
2154 hard_reg_set_here_p (unsigned int beg_regno
, unsigned int end_regno
, rtx x
)
2156 if (GET_CODE (x
) == SET
|| GET_CODE (x
) == CLOBBER
)
2158 rtx op0
= SET_DEST (x
);
2160 while (GET_CODE (op0
) == SUBREG
)
2161 op0
= SUBREG_REG (op0
);
2164 unsigned int r
= REGNO (op0
);
2166 /* See if this reg overlaps range under consideration. */
2168 && end_hard_regno (GET_MODE (op0
), r
) > beg_regno
)
2172 else if (GET_CODE (x
) == PARALLEL
)
2174 int i
= XVECLEN (x
, 0) - 1;
2177 if (hard_reg_set_here_p (beg_regno
, end_regno
, XVECEXP (x
, 0, i
)))
2184 /* Return 1 if ADDR is a valid memory address for mode MODE
2185 in address space AS, and check that each pseudo reg has the
2186 proper kind of hard reg. */
2189 strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED
,
2190 rtx addr
, addr_space_t as
)
2192 #ifdef GO_IF_LEGITIMATE_ADDRESS
2193 gcc_assert (ADDR_SPACE_GENERIC_P (as
));
2194 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
2200 return targetm
.addr_space
.legitimate_address_p (mode
, addr
, 1, as
);
2204 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2205 if they are the same hard reg, and has special hacks for
2206 autoincrement and autodecrement.
2207 This is specifically intended for find_reloads to use
2208 in determining whether two operands match.
2209 X is the operand whose number is the lower of the two.
2211 The value is 2 if Y contains a pre-increment that matches
2212 a non-incrementing address in X. */
2214 /* ??? To be completely correct, we should arrange to pass
2215 for X the output operand and for Y the input operand.
2216 For now, we assume that the output operand has the lower number
2217 because that is natural in (SET output (... input ...)). */
2220 operands_match_p (rtx x
, rtx y
)
2223 RTX_CODE code
= GET_CODE (x
);
2229 if ((code
== REG
|| (code
== SUBREG
&& REG_P (SUBREG_REG (x
))))
2230 && (REG_P (y
) || (GET_CODE (y
) == SUBREG
2231 && REG_P (SUBREG_REG (y
)))))
2237 i
= REGNO (SUBREG_REG (x
));
2238 if (i
>= FIRST_PSEUDO_REGISTER
)
2240 i
+= subreg_regno_offset (REGNO (SUBREG_REG (x
)),
2241 GET_MODE (SUBREG_REG (x
)),
2248 if (GET_CODE (y
) == SUBREG
)
2250 j
= REGNO (SUBREG_REG (y
));
2251 if (j
>= FIRST_PSEUDO_REGISTER
)
2253 j
+= subreg_regno_offset (REGNO (SUBREG_REG (y
)),
2254 GET_MODE (SUBREG_REG (y
)),
2261 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2262 multiple hard register group of scalar integer registers, so that
2263 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2265 if (REG_WORDS_BIG_ENDIAN
&& GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
2266 && SCALAR_INT_MODE_P (GET_MODE (x
))
2267 && i
< FIRST_PSEUDO_REGISTER
)
2268 i
+= hard_regno_nregs
[i
][GET_MODE (x
)] - 1;
2269 if (REG_WORDS_BIG_ENDIAN
&& GET_MODE_SIZE (GET_MODE (y
)) > UNITS_PER_WORD
2270 && SCALAR_INT_MODE_P (GET_MODE (y
))
2271 && j
< FIRST_PSEUDO_REGISTER
)
2272 j
+= hard_regno_nregs
[j
][GET_MODE (y
)] - 1;
2276 /* If two operands must match, because they are really a single
2277 operand of an assembler insn, then two postincrements are invalid
2278 because the assembler insn would increment only once.
2279 On the other hand, a postincrement matches ordinary indexing
2280 if the postincrement is the output operand. */
2281 if (code
== POST_DEC
|| code
== POST_INC
|| code
== POST_MODIFY
)
2282 return operands_match_p (XEXP (x
, 0), y
);
2283 /* Two preincrements are invalid
2284 because the assembler insn would increment only once.
2285 On the other hand, a preincrement matches ordinary indexing
2286 if the preincrement is the input operand.
2287 In this case, return 2, since some callers need to do special
2288 things when this happens. */
2289 if (GET_CODE (y
) == PRE_DEC
|| GET_CODE (y
) == PRE_INC
2290 || GET_CODE (y
) == PRE_MODIFY
)
2291 return operands_match_p (x
, XEXP (y
, 0)) ? 2 : 0;
2295 /* Now we have disposed of all the cases in which different rtx codes
2297 if (code
!= GET_CODE (y
))
2300 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2301 if (GET_MODE (x
) != GET_MODE (y
))
2304 /* MEMs referring to different address space are not equivalent. */
2305 if (code
== MEM
&& MEM_ADDR_SPACE (x
) != MEM_ADDR_SPACE (y
))
2314 return label_ref_label (x
) == label_ref_label (y
);
2316 return XSTR (x
, 0) == XSTR (y
, 0);
2322 /* Compare the elements. If any pair of corresponding elements
2323 fail to match, return 0 for the whole things. */
2326 fmt
= GET_RTX_FORMAT (code
);
2327 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2333 if (XWINT (x
, i
) != XWINT (y
, i
))
2338 if (XINT (x
, i
) != XINT (y
, i
))
2343 val
= operands_match_p (XEXP (x
, i
), XEXP (y
, i
));
2346 /* If any subexpression returns 2,
2347 we should return 2 if we are successful. */
2356 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
2358 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; --j
)
2360 val
= operands_match_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
));
2368 /* It is believed that rtx's at this level will never
2369 contain anything but integers and other rtx's,
2370 except for within LABEL_REFs and SYMBOL_REFs. */
2375 return 1 + success_2
;
2378 /* Describe the range of registers or memory referenced by X.
2379 If X is a register, set REG_FLAG and put the first register
2380 number into START and the last plus one into END.
2381 If X is a memory reference, put a base address into BASE
2382 and a range of integer offsets into START and END.
2383 If X is pushing on the stack, we can assume it causes no trouble,
2384 so we set the SAFE field. */
2386 static struct decomposition
2389 struct decomposition val
;
2392 memset (&val
, 0, sizeof (val
));
2394 switch (GET_CODE (x
))
2398 rtx base
= NULL_RTX
, offset
= 0;
2399 rtx addr
= XEXP (x
, 0);
2401 if (GET_CODE (addr
) == PRE_DEC
|| GET_CODE (addr
) == PRE_INC
2402 || GET_CODE (addr
) == POST_DEC
|| GET_CODE (addr
) == POST_INC
)
2404 val
.base
= XEXP (addr
, 0);
2405 val
.start
= -GET_MODE_SIZE (GET_MODE (x
));
2406 val
.end
= GET_MODE_SIZE (GET_MODE (x
));
2407 val
.safe
= REGNO (val
.base
) == STACK_POINTER_REGNUM
;
2411 if (GET_CODE (addr
) == PRE_MODIFY
|| GET_CODE (addr
) == POST_MODIFY
)
2413 if (GET_CODE (XEXP (addr
, 1)) == PLUS
2414 && XEXP (addr
, 0) == XEXP (XEXP (addr
, 1), 0)
2415 && CONSTANT_P (XEXP (XEXP (addr
, 1), 1)))
2417 val
.base
= XEXP (addr
, 0);
2418 val
.start
= -INTVAL (XEXP (XEXP (addr
, 1), 1));
2419 val
.end
= INTVAL (XEXP (XEXP (addr
, 1), 1));
2420 val
.safe
= REGNO (val
.base
) == STACK_POINTER_REGNUM
;
2425 if (GET_CODE (addr
) == CONST
)
2427 addr
= XEXP (addr
, 0);
2430 if (GET_CODE (addr
) == PLUS
)
2432 if (CONSTANT_P (XEXP (addr
, 0)))
2434 base
= XEXP (addr
, 1);
2435 offset
= XEXP (addr
, 0);
2437 else if (CONSTANT_P (XEXP (addr
, 1)))
2439 base
= XEXP (addr
, 0);
2440 offset
= XEXP (addr
, 1);
2447 offset
= const0_rtx
;
2449 if (GET_CODE (offset
) == CONST
)
2450 offset
= XEXP (offset
, 0);
2451 if (GET_CODE (offset
) == PLUS
)
2453 if (CONST_INT_P (XEXP (offset
, 0)))
2455 base
= gen_rtx_PLUS (GET_MODE (base
), base
, XEXP (offset
, 1));
2456 offset
= XEXP (offset
, 0);
2458 else if (CONST_INT_P (XEXP (offset
, 1)))
2460 base
= gen_rtx_PLUS (GET_MODE (base
), base
, XEXP (offset
, 0));
2461 offset
= XEXP (offset
, 1);
2465 base
= gen_rtx_PLUS (GET_MODE (base
), base
, offset
);
2466 offset
= const0_rtx
;
2469 else if (!CONST_INT_P (offset
))
2471 base
= gen_rtx_PLUS (GET_MODE (base
), base
, offset
);
2472 offset
= const0_rtx
;
2475 if (all_const
&& GET_CODE (base
) == PLUS
)
2476 base
= gen_rtx_CONST (GET_MODE (base
), base
);
2478 gcc_assert (CONST_INT_P (offset
));
2480 val
.start
= INTVAL (offset
);
2481 val
.end
= val
.start
+ GET_MODE_SIZE (GET_MODE (x
));
2488 val
.start
= true_regnum (x
);
2489 if (val
.start
< 0 || val
.start
>= FIRST_PSEUDO_REGISTER
)
2491 /* A pseudo with no hard reg. */
2492 val
.start
= REGNO (x
);
2493 val
.end
= val
.start
+ 1;
2497 val
.end
= end_hard_regno (GET_MODE (x
), val
.start
);
2501 if (!REG_P (SUBREG_REG (x
)))
2502 /* This could be more precise, but it's good enough. */
2503 return decompose (SUBREG_REG (x
));
2505 val
.start
= true_regnum (x
);
2506 if (val
.start
< 0 || val
.start
>= FIRST_PSEUDO_REGISTER
)
2507 return decompose (SUBREG_REG (x
));
2510 val
.end
= val
.start
+ subreg_nregs (x
);
2514 /* This hasn't been assigned yet, so it can't conflict yet. */
2519 gcc_assert (CONSTANT_P (x
));
2526 /* Return 1 if altering Y will not modify the value of X.
2527 Y is also described by YDATA, which should be decompose (Y). */
2530 immune_p (rtx x
, rtx y
, struct decomposition ydata
)
2532 struct decomposition xdata
;
2535 return !refers_to_regno_for_reload_p (ydata
.start
, ydata
.end
, x
, (rtx
*) 0);
2539 gcc_assert (MEM_P (y
));
2540 /* If Y is memory and X is not, Y can't affect X. */
2544 xdata
= decompose (x
);
2546 if (! rtx_equal_p (xdata
.base
, ydata
.base
))
2548 /* If bases are distinct symbolic constants, there is no overlap. */
2549 if (CONSTANT_P (xdata
.base
) && CONSTANT_P (ydata
.base
))
2551 /* Constants and stack slots never overlap. */
2552 if (CONSTANT_P (xdata
.base
)
2553 && (ydata
.base
== frame_pointer_rtx
2554 || ydata
.base
== hard_frame_pointer_rtx
2555 || ydata
.base
== stack_pointer_rtx
))
2557 if (CONSTANT_P (ydata
.base
)
2558 && (xdata
.base
== frame_pointer_rtx
2559 || xdata
.base
== hard_frame_pointer_rtx
2560 || xdata
.base
== stack_pointer_rtx
))
2562 /* If either base is variable, we don't know anything. */
2566 return (xdata
.start
>= ydata
.end
|| ydata
.start
>= xdata
.end
);
2569 /* Similar, but calls decompose. */
2572 safe_from_earlyclobber (rtx op
, rtx clobber
)
2574 struct decomposition early_data
;
2576 early_data
= decompose (clobber
);
2577 return immune_p (op
, clobber
, early_data
);
2580 /* Main entry point of this file: search the body of INSN
2581 for values that need reloading and record them with push_reload.
2582 REPLACE nonzero means record also where the values occur
2583 so that subst_reloads can be used.
2585 IND_LEVELS says how many levels of indirection are supported by this
2586 machine; a value of zero means that a memory reference is not a valid
2589 LIVE_KNOWN says we have valid information about which hard
2590 regs are live at each point in the program; this is true when
2591 we are called from global_alloc but false when stupid register
2592 allocation has been done.
2594 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2595 which is nonnegative if the reg has been commandeered for reloading into.
2596 It is copied into STATIC_RELOAD_REG_P and referenced from there
2597 by various subroutines.
2599 Return TRUE if some operands need to be changed, because of swapping
2600 commutative operands, reg_equiv_address substitution, or whatever. */
2603 find_reloads (rtx_insn
*insn
, int replace
, int ind_levels
, int live_known
,
2604 short *reload_reg_p
)
2606 int insn_code_number
;
2609 /* These start out as the constraints for the insn
2610 and they are chewed up as we consider alternatives. */
2611 const char *constraints
[MAX_RECOG_OPERANDS
];
2612 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2614 enum reg_class preferred_class
[MAX_RECOG_OPERANDS
];
2615 char pref_or_nothing
[MAX_RECOG_OPERANDS
];
2616 /* Nonzero for a MEM operand whose entire address needs a reload.
2617 May be -1 to indicate the entire address may or may not need a reload. */
2618 int address_reloaded
[MAX_RECOG_OPERANDS
];
2619 /* Nonzero for an address operand that needs to be completely reloaded.
2620 May be -1 to indicate the entire operand may or may not need a reload. */
2621 int address_operand_reloaded
[MAX_RECOG_OPERANDS
];
2622 /* Value of enum reload_type to use for operand. */
2623 enum reload_type operand_type
[MAX_RECOG_OPERANDS
];
2624 /* Value of enum reload_type to use within address of operand. */
2625 enum reload_type address_type
[MAX_RECOG_OPERANDS
];
2626 /* Save the usage of each operand. */
2627 enum reload_usage
{ RELOAD_READ
, RELOAD_READ_WRITE
, RELOAD_WRITE
} modified
[MAX_RECOG_OPERANDS
];
2628 int no_input_reloads
= 0, no_output_reloads
= 0;
2630 reg_class_t this_alternative
[MAX_RECOG_OPERANDS
];
2631 char this_alternative_match_win
[MAX_RECOG_OPERANDS
];
2632 char this_alternative_win
[MAX_RECOG_OPERANDS
];
2633 char this_alternative_offmemok
[MAX_RECOG_OPERANDS
];
2634 char this_alternative_earlyclobber
[MAX_RECOG_OPERANDS
];
2635 int this_alternative_matches
[MAX_RECOG_OPERANDS
];
2636 reg_class_t goal_alternative
[MAX_RECOG_OPERANDS
];
2637 int this_alternative_number
;
2638 int goal_alternative_number
= 0;
2639 int operand_reloadnum
[MAX_RECOG_OPERANDS
];
2640 int goal_alternative_matches
[MAX_RECOG_OPERANDS
];
2641 int goal_alternative_matched
[MAX_RECOG_OPERANDS
];
2642 char goal_alternative_match_win
[MAX_RECOG_OPERANDS
];
2643 char goal_alternative_win
[MAX_RECOG_OPERANDS
];
2644 char goal_alternative_offmemok
[MAX_RECOG_OPERANDS
];
2645 char goal_alternative_earlyclobber
[MAX_RECOG_OPERANDS
];
2646 int goal_alternative_swapped
;
2649 char operands_match
[MAX_RECOG_OPERANDS
][MAX_RECOG_OPERANDS
];
2650 rtx substed_operand
[MAX_RECOG_OPERANDS
];
2651 rtx body
= PATTERN (insn
);
2652 rtx set
= single_set (insn
);
2653 int goal_earlyclobber
= 0, this_earlyclobber
;
2654 machine_mode operand_mode
[MAX_RECOG_OPERANDS
];
2660 n_earlyclobbers
= 0;
2661 replace_reloads
= replace
;
2662 hard_regs_live_known
= live_known
;
2663 static_reload_reg_p
= reload_reg_p
;
2665 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2666 neither are insns that SET cc0. Insns that use CC0 are not allowed
2667 to have any input reloads. */
2668 if (JUMP_P (insn
) || CALL_P (insn
))
2669 no_output_reloads
= 1;
2671 if (HAVE_cc0
&& reg_referenced_p (cc0_rtx
, PATTERN (insn
)))
2672 no_input_reloads
= 1;
2673 if (HAVE_cc0
&& reg_set_p (cc0_rtx
, PATTERN (insn
)))
2674 no_output_reloads
= 1;
2676 #ifdef SECONDARY_MEMORY_NEEDED
2677 /* The eliminated forms of any secondary memory locations are per-insn, so
2678 clear them out here. */
2680 if (secondary_memlocs_elim_used
)
2682 memset (secondary_memlocs_elim
, 0,
2683 sizeof (secondary_memlocs_elim
[0]) * secondary_memlocs_elim_used
);
2684 secondary_memlocs_elim_used
= 0;
2688 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2689 is cheap to move between them. If it is not, there may not be an insn
2690 to do the copy, so we may need a reload. */
2691 if (GET_CODE (body
) == SET
2692 && REG_P (SET_DEST (body
))
2693 && REGNO (SET_DEST (body
)) < FIRST_PSEUDO_REGISTER
2694 && REG_P (SET_SRC (body
))
2695 && REGNO (SET_SRC (body
)) < FIRST_PSEUDO_REGISTER
2696 && register_move_cost (GET_MODE (SET_SRC (body
)),
2697 REGNO_REG_CLASS (REGNO (SET_SRC (body
))),
2698 REGNO_REG_CLASS (REGNO (SET_DEST (body
)))) == 2)
2701 extract_insn (insn
);
2703 noperands
= reload_n_operands
= recog_data
.n_operands
;
2704 n_alternatives
= recog_data
.n_alternatives
;
2706 /* Just return "no reloads" if insn has no operands with constraints. */
2707 if (noperands
== 0 || n_alternatives
== 0)
2710 insn_code_number
= INSN_CODE (insn
);
2711 this_insn_is_asm
= insn_code_number
< 0;
2713 memcpy (operand_mode
, recog_data
.operand_mode
,
2714 noperands
* sizeof (machine_mode
));
2715 memcpy (constraints
, recog_data
.constraints
,
2716 noperands
* sizeof (const char *));
2720 /* If we will need to know, later, whether some pair of operands
2721 are the same, we must compare them now and save the result.
2722 Reloading the base and index registers will clobber them
2723 and afterward they will fail to match. */
2725 for (i
= 0; i
< noperands
; i
++)
2731 substed_operand
[i
] = recog_data
.operand
[i
];
2734 modified
[i
] = RELOAD_READ
;
2736 /* Scan this operand's constraint to see if it is an output operand,
2737 an in-out operand, is commutative, or should match another. */
2741 p
+= CONSTRAINT_LEN (c
, p
);
2745 modified
[i
] = RELOAD_WRITE
;
2748 modified
[i
] = RELOAD_READ_WRITE
;
2752 /* The last operand should not be marked commutative. */
2753 gcc_assert (i
!= noperands
- 1);
2755 /* We currently only support one commutative pair of
2756 operands. Some existing asm code currently uses more
2757 than one pair. Previously, that would usually work,
2758 but sometimes it would crash the compiler. We
2759 continue supporting that case as well as we can by
2760 silently ignoring all but the first pair. In the
2761 future we may handle it correctly. */
2762 if (commutative
< 0)
2765 gcc_assert (this_insn_is_asm
);
2768 /* Use of ISDIGIT is tempting here, but it may get expensive because
2769 of locale support we don't want. */
2770 case '0': case '1': case '2': case '3': case '4':
2771 case '5': case '6': case '7': case '8': case '9':
2773 c
= strtoul (p
- 1, &end
, 10);
2776 operands_match
[c
][i
]
2777 = operands_match_p (recog_data
.operand
[c
],
2778 recog_data
.operand
[i
]);
2780 /* An operand may not match itself. */
2781 gcc_assert (c
!= i
);
2783 /* If C can be commuted with C+1, and C might need to match I,
2784 then C+1 might also need to match I. */
2785 if (commutative
>= 0)
2787 if (c
== commutative
|| c
== commutative
+ 1)
2789 int other
= c
+ (c
== commutative
? 1 : -1);
2790 operands_match
[other
][i
]
2791 = operands_match_p (recog_data
.operand
[other
],
2792 recog_data
.operand
[i
]);
2794 if (i
== commutative
|| i
== commutative
+ 1)
2796 int other
= i
+ (i
== commutative
? 1 : -1);
2797 operands_match
[c
][other
]
2798 = operands_match_p (recog_data
.operand
[c
],
2799 recog_data
.operand
[other
]);
2801 /* Note that C is supposed to be less than I.
2802 No need to consider altering both C and I because in
2803 that case we would alter one into the other. */
2810 /* Examine each operand that is a memory reference or memory address
2811 and reload parts of the addresses into index registers.
2812 Also here any references to pseudo regs that didn't get hard regs
2813 but are equivalent to constants get replaced in the insn itself
2814 with those constants. Nobody will ever see them again.
2816 Finally, set up the preferred classes of each operand. */
2818 for (i
= 0; i
< noperands
; i
++)
2820 RTX_CODE code
= GET_CODE (recog_data
.operand
[i
]);
2822 address_reloaded
[i
] = 0;
2823 address_operand_reloaded
[i
] = 0;
2824 operand_type
[i
] = (modified
[i
] == RELOAD_READ
? RELOAD_FOR_INPUT
2825 : modified
[i
] == RELOAD_WRITE
? RELOAD_FOR_OUTPUT
2828 = (modified
[i
] == RELOAD_READ
? RELOAD_FOR_INPUT_ADDRESS
2829 : modified
[i
] == RELOAD_WRITE
? RELOAD_FOR_OUTPUT_ADDRESS
2832 if (*constraints
[i
] == 0)
2833 /* Ignore things like match_operator operands. */
2835 else if (insn_extra_address_constraint
2836 (lookup_constraint (constraints
[i
])))
2838 address_operand_reloaded
[i
]
2839 = find_reloads_address (recog_data
.operand_mode
[i
], (rtx
*) 0,
2840 recog_data
.operand
[i
],
2841 recog_data
.operand_loc
[i
],
2842 i
, operand_type
[i
], ind_levels
, insn
);
2844 /* If we now have a simple operand where we used to have a
2845 PLUS or MULT, re-recognize and try again. */
2846 if ((OBJECT_P (*recog_data
.operand_loc
[i
])
2847 || GET_CODE (*recog_data
.operand_loc
[i
]) == SUBREG
)
2848 && (GET_CODE (recog_data
.operand
[i
]) == MULT
2849 || GET_CODE (recog_data
.operand
[i
]) == PLUS
))
2851 INSN_CODE (insn
) = -1;
2852 retval
= find_reloads (insn
, replace
, ind_levels
, live_known
,
2857 recog_data
.operand
[i
] = *recog_data
.operand_loc
[i
];
2858 substed_operand
[i
] = recog_data
.operand
[i
];
2860 /* Address operands are reloaded in their existing mode,
2861 no matter what is specified in the machine description. */
2862 operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2864 /* If the address is a single CONST_INT pick address mode
2865 instead otherwise we will later not know in which mode
2866 the reload should be performed. */
2867 if (operand_mode
[i
] == VOIDmode
)
2868 operand_mode
[i
] = Pmode
;
2871 else if (code
== MEM
)
2874 = find_reloads_address (GET_MODE (recog_data
.operand
[i
]),
2875 recog_data
.operand_loc
[i
],
2876 XEXP (recog_data
.operand
[i
], 0),
2877 &XEXP (recog_data
.operand
[i
], 0),
2878 i
, address_type
[i
], ind_levels
, insn
);
2879 recog_data
.operand
[i
] = *recog_data
.operand_loc
[i
];
2880 substed_operand
[i
] = recog_data
.operand
[i
];
2882 else if (code
== SUBREG
)
2884 rtx reg
= SUBREG_REG (recog_data
.operand
[i
]);
2886 = find_reloads_toplev (recog_data
.operand
[i
], i
, address_type
[i
],
2889 && &SET_DEST (set
) == recog_data
.operand_loc
[i
],
2891 &address_reloaded
[i
]);
2893 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2894 that didn't get a hard register, emit a USE with a REG_EQUAL
2895 note in front so that we might inherit a previous, possibly
2901 && (GET_MODE_SIZE (GET_MODE (reg
))
2902 >= GET_MODE_SIZE (GET_MODE (op
)))
2903 && reg_equiv_constant (REGNO (reg
)) == 0)
2904 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode
, reg
),
2906 REG_EQUAL
, reg_equiv_memory_loc (REGNO (reg
)));
2908 substed_operand
[i
] = recog_data
.operand
[i
] = op
;
2910 else if (code
== PLUS
|| GET_RTX_CLASS (code
) == RTX_UNARY
)
2911 /* We can get a PLUS as an "operand" as a result of register
2912 elimination. See eliminate_regs and gen_reload. We handle
2913 a unary operator by reloading the operand. */
2914 substed_operand
[i
] = recog_data
.operand
[i
]
2915 = find_reloads_toplev (recog_data
.operand
[i
], i
, address_type
[i
],
2916 ind_levels
, 0, insn
,
2917 &address_reloaded
[i
]);
2918 else if (code
== REG
)
2920 /* This is equivalent to calling find_reloads_toplev.
2921 The code is duplicated for speed.
2922 When we find a pseudo always equivalent to a constant,
2923 we replace it by the constant. We must be sure, however,
2924 that we don't try to replace it in the insn in which it
2926 int regno
= REGNO (recog_data
.operand
[i
]);
2927 if (reg_equiv_constant (regno
) != 0
2928 && (set
== 0 || &SET_DEST (set
) != recog_data
.operand_loc
[i
]))
2930 /* Record the existing mode so that the check if constants are
2931 allowed will work when operand_mode isn't specified. */
2933 if (operand_mode
[i
] == VOIDmode
)
2934 operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2936 substed_operand
[i
] = recog_data
.operand
[i
]
2937 = reg_equiv_constant (regno
);
2939 if (reg_equiv_memory_loc (regno
) != 0
2940 && (reg_equiv_address (regno
) != 0 || num_not_at_initial_offset
))
2941 /* We need not give a valid is_set_dest argument since the case
2942 of a constant equivalence was checked above. */
2943 substed_operand
[i
] = recog_data
.operand
[i
]
2944 = find_reloads_toplev (recog_data
.operand
[i
], i
, address_type
[i
],
2945 ind_levels
, 0, insn
,
2946 &address_reloaded
[i
]);
2948 /* If the operand is still a register (we didn't replace it with an
2949 equivalent), get the preferred class to reload it into. */
2950 code
= GET_CODE (recog_data
.operand
[i
]);
2952 = ((code
== REG
&& REGNO (recog_data
.operand
[i
])
2953 >= FIRST_PSEUDO_REGISTER
)
2954 ? reg_preferred_class (REGNO (recog_data
.operand
[i
]))
2958 && REGNO (recog_data
.operand
[i
]) >= FIRST_PSEUDO_REGISTER
2959 && reg_alternate_class (REGNO (recog_data
.operand
[i
])) == NO_REGS
);
2962 /* If this is simply a copy from operand 1 to operand 0, merge the
2963 preferred classes for the operands. */
2964 if (set
!= 0 && noperands
>= 2 && recog_data
.operand
[0] == SET_DEST (set
)
2965 && recog_data
.operand
[1] == SET_SRC (set
))
2967 preferred_class
[0] = preferred_class
[1]
2968 = reg_class_subunion
[(int) preferred_class
[0]][(int) preferred_class
[1]];
2969 pref_or_nothing
[0] |= pref_or_nothing
[1];
2970 pref_or_nothing
[1] |= pref_or_nothing
[0];
2973 /* Now see what we need for pseudo-regs that didn't get hard regs
2974 or got the wrong kind of hard reg. For this, we must consider
2975 all the operands together against the register constraints. */
2977 best
= MAX_RECOG_OPERANDS
* 2 + 600;
2979 goal_alternative_swapped
= 0;
2981 /* The constraints are made of several alternatives.
2982 Each operand's constraint looks like foo,bar,... with commas
2983 separating the alternatives. The first alternatives for all
2984 operands go together, the second alternatives go together, etc.
2986 First loop over alternatives. */
2988 alternative_mask enabled
= get_enabled_alternatives (insn
);
2989 for (this_alternative_number
= 0;
2990 this_alternative_number
< n_alternatives
;
2991 this_alternative_number
++)
2995 if (!TEST_BIT (enabled
, this_alternative_number
))
2999 for (i
= 0; i
< recog_data
.n_operands
; i
++)
3000 constraints
[i
] = skip_alternative (constraints
[i
]);
3005 /* If insn is commutative (it's safe to exchange a certain pair
3006 of operands) then we need to try each alternative twice, the
3007 second time matching those two operands as if we had
3008 exchanged them. To do this, really exchange them in
3010 for (swapped
= 0; swapped
< (commutative
>= 0 ? 2 : 1); swapped
++)
3012 /* Loop over operands for one constraint alternative. */
3013 /* LOSERS counts those that don't fit this alternative
3014 and would require loading. */
3016 /* BAD is set to 1 if it some operand can't fit this alternative
3017 even after reloading. */
3019 /* REJECT is a count of how undesirable this alternative says it is
3020 if any reloading is required. If the alternative matches exactly
3021 then REJECT is ignored, but otherwise it gets this much
3022 counted against it in addition to the reloading needed. Each
3023 ? counts three times here since we want the disparaging caused by
3024 a bad register class to only count 1/3 as much. */
3029 recog_data
.operand
[commutative
] = substed_operand
[commutative
+ 1];
3030 recog_data
.operand
[commutative
+ 1] = substed_operand
[commutative
];
3031 /* Swap the duplicates too. */
3032 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3033 if (recog_data
.dup_num
[i
] == commutative
3034 || recog_data
.dup_num
[i
] == commutative
+ 1)
3035 *recog_data
.dup_loc
[i
]
3036 = recog_data
.operand
[(int) recog_data
.dup_num
[i
]];
3038 std::swap (preferred_class
[commutative
],
3039 preferred_class
[commutative
+ 1]);
3040 std::swap (pref_or_nothing
[commutative
],
3041 pref_or_nothing
[commutative
+ 1]);
3042 std::swap (address_reloaded
[commutative
],
3043 address_reloaded
[commutative
+ 1]);
3046 this_earlyclobber
= 0;
3048 for (i
= 0; i
< noperands
; i
++)
3050 const char *p
= constraints
[i
];
3055 /* 0 => this operand can be reloaded somehow for this alternative. */
3057 /* 0 => this operand can be reloaded if the alternative allows regs. */
3061 rtx operand
= recog_data
.operand
[i
];
3063 /* Nonzero means this is a MEM that must be reloaded into a reg
3064 regardless of what the constraint says. */
3065 int force_reload
= 0;
3067 /* Nonzero if a constant forced into memory would be OK for this
3070 int earlyclobber
= 0;
3071 enum constraint_num cn
;
3074 /* If the predicate accepts a unary operator, it means that
3075 we need to reload the operand, but do not do this for
3076 match_operator and friends. */
3077 if (UNARY_P (operand
) && *p
!= 0)
3078 operand
= XEXP (operand
, 0);
3080 /* If the operand is a SUBREG, extract
3081 the REG or MEM (or maybe even a constant) within.
3082 (Constants can occur as a result of reg_equiv_constant.) */
3084 while (GET_CODE (operand
) == SUBREG
)
3086 /* Offset only matters when operand is a REG and
3087 it is a hard reg. This is because it is passed
3088 to reg_fits_class_p if it is a REG and all pseudos
3089 return 0 from that function. */
3090 if (REG_P (SUBREG_REG (operand
))
3091 && REGNO (SUBREG_REG (operand
)) < FIRST_PSEUDO_REGISTER
)
3093 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand
)),
3094 GET_MODE (SUBREG_REG (operand
)),
3095 SUBREG_BYTE (operand
),
3096 GET_MODE (operand
)) < 0)
3098 offset
+= subreg_regno_offset (REGNO (SUBREG_REG (operand
)),
3099 GET_MODE (SUBREG_REG (operand
)),
3100 SUBREG_BYTE (operand
),
3101 GET_MODE (operand
));
3103 operand
= SUBREG_REG (operand
);
3104 /* Force reload if this is a constant or PLUS or if there may
3105 be a problem accessing OPERAND in the outer mode. */
3106 if (CONSTANT_P (operand
)
3107 || GET_CODE (operand
) == PLUS
3108 /* We must force a reload of paradoxical SUBREGs
3109 of a MEM because the alignment of the inner value
3110 may not be enough to do the outer reference. On
3111 big-endian machines, it may also reference outside
3114 On machines that extend byte operations and we have a
3115 SUBREG where both the inner and outer modes are no wider
3116 than a word and the inner mode is narrower, is integral,
3117 and gets extended when loaded from memory, combine.c has
3118 made assumptions about the behavior of the machine in such
3119 register access. If the data is, in fact, in memory we
3120 must always load using the size assumed to be in the
3121 register and let the insn do the different-sized
3124 This is doubly true if WORD_REGISTER_OPERATIONS. In
3125 this case eliminate_regs has left non-paradoxical
3126 subregs for push_reload to see. Make sure it does
3127 by forcing the reload.
3129 ??? When is it right at this stage to have a subreg
3130 of a mem that is _not_ to be handled specially? IMO
3131 those should have been reduced to just a mem. */
3132 || ((MEM_P (operand
)
3134 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
))
3135 && (WORD_REGISTER_OPERATIONS
3136 || ((GET_MODE_BITSIZE (GET_MODE (operand
))
3137 < BIGGEST_ALIGNMENT
)
3138 && paradoxical_subreg_p (operand_mode
[i
],
3139 GET_MODE (operand
)))
3141 || ((GET_MODE_SIZE (operand_mode
[i
])
3143 && (GET_MODE_SIZE (GET_MODE (operand
))
3145 && paradoxical_subreg_p (operand_mode
[i
],
3147 && INTEGRAL_MODE_P (GET_MODE (operand
))
3148 && LOAD_EXTEND_OP (GET_MODE (operand
))
3154 this_alternative
[i
] = NO_REGS
;
3155 this_alternative_win
[i
] = 0;
3156 this_alternative_match_win
[i
] = 0;
3157 this_alternative_offmemok
[i
] = 0;
3158 this_alternative_earlyclobber
[i
] = 0;
3159 this_alternative_matches
[i
] = -1;
3161 /* An empty constraint or empty alternative
3162 allows anything which matched the pattern. */
3163 if (*p
== 0 || *p
== ',')
3166 /* Scan this alternative's specs for this operand;
3167 set WIN if the operand fits any letter in this alternative.
3168 Otherwise, clear BADOP if this operand could
3169 fit some letter after reloads,
3170 or set WINREG if this operand could fit after reloads
3171 provided the constraint allows some registers. */
3174 switch ((c
= *p
, len
= CONSTRAINT_LEN (c
, p
)), c
)
3192 /* Ignore rest of this alternative as far as
3193 reloading is concerned. */
3196 while (*p
&& *p
!= ',');
3200 case '0': case '1': case '2': case '3': case '4':
3201 case '5': case '6': case '7': case '8': case '9':
3202 m
= strtoul (p
, &end
, 10);
3206 this_alternative_matches
[i
] = m
;
3207 /* We are supposed to match a previous operand.
3208 If we do, we win if that one did.
3209 If we do not, count both of the operands as losers.
3210 (This is too conservative, since most of the time
3211 only a single reload insn will be needed to make
3212 the two operands win. As a result, this alternative
3213 may be rejected when it is actually desirable.) */
3214 if ((swapped
&& (m
!= commutative
|| i
!= commutative
+ 1))
3215 /* If we are matching as if two operands were swapped,
3216 also pretend that operands_match had been computed
3218 But if I is the second of those and C is the first,
3219 don't exchange them, because operands_match is valid
3220 only on one side of its diagonal. */
3222 [(m
== commutative
|| m
== commutative
+ 1)
3223 ? 2 * commutative
+ 1 - m
: m
]
3224 [(i
== commutative
|| i
== commutative
+ 1)
3225 ? 2 * commutative
+ 1 - i
: i
])
3226 : operands_match
[m
][i
])
3228 /* If we are matching a non-offsettable address where an
3229 offsettable address was expected, then we must reject
3230 this combination, because we can't reload it. */
3231 if (this_alternative_offmemok
[m
]
3232 && MEM_P (recog_data
.operand
[m
])
3233 && this_alternative
[m
] == NO_REGS
3234 && ! this_alternative_win
[m
])
3237 did_match
= this_alternative_win
[m
];
3241 /* Operands don't match. */
3244 /* Retroactively mark the operand we had to match
3245 as a loser, if it wasn't already. */
3246 if (this_alternative_win
[m
])
3248 this_alternative_win
[m
] = 0;
3249 if (this_alternative
[m
] == NO_REGS
)
3251 /* But count the pair only once in the total badness of
3252 this alternative, if the pair can be a dummy reload.
3253 The pointers in operand_loc are not swapped; swap
3254 them by hand if necessary. */
3255 if (swapped
&& i
== commutative
)
3256 loc1
= commutative
+ 1;
3257 else if (swapped
&& i
== commutative
+ 1)
3261 if (swapped
&& m
== commutative
)
3262 loc2
= commutative
+ 1;
3263 else if (swapped
&& m
== commutative
+ 1)
3268 = find_dummy_reload (recog_data
.operand
[i
],
3269 recog_data
.operand
[m
],
3270 recog_data
.operand_loc
[loc1
],
3271 recog_data
.operand_loc
[loc2
],
3272 operand_mode
[i
], operand_mode
[m
],
3273 this_alternative
[m
], -1,
3274 this_alternative_earlyclobber
[m
]);
3279 /* This can be fixed with reloads if the operand
3280 we are supposed to match can be fixed with reloads. */
3282 this_alternative
[i
] = this_alternative
[m
];
3284 /* If we have to reload this operand and some previous
3285 operand also had to match the same thing as this
3286 operand, we don't know how to do that. So reject this
3288 if (! did_match
|| force_reload
)
3289 for (j
= 0; j
< i
; j
++)
3290 if (this_alternative_matches
[j
]
3291 == this_alternative_matches
[i
])
3299 /* All necessary reloads for an address_operand
3300 were handled in find_reloads_address. */
3302 = base_reg_class (VOIDmode
, ADDR_SPACE_GENERIC
,
3308 case TARGET_MEM_CONSTRAINT
:
3313 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
3314 && reg_renumber
[REGNO (operand
)] < 0))
3316 if (CONST_POOL_OK_P (operand_mode
[i
], operand
))
3323 && ! address_reloaded
[i
]
3324 && (GET_CODE (XEXP (operand
, 0)) == PRE_DEC
3325 || GET_CODE (XEXP (operand
, 0)) == POST_DEC
))
3331 && ! address_reloaded
[i
]
3332 && (GET_CODE (XEXP (operand
, 0)) == PRE_INC
3333 || GET_CODE (XEXP (operand
, 0)) == POST_INC
))
3337 /* Memory operand whose address is not offsettable. */
3342 && ! (ind_levels
? offsettable_memref_p (operand
)
3343 : offsettable_nonstrict_memref_p (operand
))
3344 /* Certain mem addresses will become offsettable
3345 after they themselves are reloaded. This is important;
3346 we don't want our own handling of unoffsettables
3347 to override the handling of reg_equiv_address. */
3348 && !(REG_P (XEXP (operand
, 0))
3350 || reg_equiv_address (REGNO (XEXP (operand
, 0))) != 0)))
3354 /* Memory operand whose address is offsettable. */
3358 if ((MEM_P (operand
)
3359 /* If IND_LEVELS, find_reloads_address won't reload a
3360 pseudo that didn't get a hard reg, so we have to
3361 reject that case. */
3362 && ((ind_levels
? offsettable_memref_p (operand
)
3363 : offsettable_nonstrict_memref_p (operand
))
3364 /* A reloaded address is offsettable because it is now
3365 just a simple register indirect. */
3366 || address_reloaded
[i
] == 1))
3368 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
3369 && reg_renumber
[REGNO (operand
)] < 0
3370 /* If reg_equiv_address is nonzero, we will be
3371 loading it into a register; hence it will be
3372 offsettable, but we cannot say that reg_equiv_mem
3373 is offsettable without checking. */
3374 && ((reg_equiv_mem (REGNO (operand
)) != 0
3375 && offsettable_memref_p (reg_equiv_mem (REGNO (operand
))))
3376 || (reg_equiv_address (REGNO (operand
)) != 0))))
3378 if (CONST_POOL_OK_P (operand_mode
[i
], operand
)
3386 /* Output operand that is stored before the need for the
3387 input operands (and their index registers) is over. */
3388 earlyclobber
= 1, this_earlyclobber
= 1;
3398 /* A PLUS is never a valid operand, but reload can make
3399 it from a register when eliminating registers. */
3400 && GET_CODE (operand
) != PLUS
3401 /* A SCRATCH is not a valid operand. */
3402 && GET_CODE (operand
) != SCRATCH
3403 && (! CONSTANT_P (operand
)
3405 || LEGITIMATE_PIC_OPERAND_P (operand
))
3406 && (GENERAL_REGS
== ALL_REGS
3408 || (REGNO (operand
) >= FIRST_PSEUDO_REGISTER
3409 && reg_renumber
[REGNO (operand
)] < 0)))
3415 cn
= lookup_constraint (p
);
3416 switch (get_constraint_type (cn
))
3419 cl
= reg_class_for_constraint (cn
);
3425 if (CONST_INT_P (operand
)
3426 && (insn_const_int_ok_for_constraint
3427 (INTVAL (operand
), cn
)))
3434 if (constraint_satisfied_p (operand
, cn
))
3436 /* If the address was already reloaded,
3438 else if (MEM_P (operand
) && address_reloaded
[i
] == 1)
3440 /* Likewise if the address will be reloaded because
3441 reg_equiv_address is nonzero. For reg_equiv_mem
3442 we have to check. */
3443 else if (REG_P (operand
)
3444 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
3445 && reg_renumber
[REGNO (operand
)] < 0
3446 && ((reg_equiv_mem (REGNO (operand
)) != 0
3447 && (constraint_satisfied_p
3448 (reg_equiv_mem (REGNO (operand
)),
3450 || (reg_equiv_address (REGNO (operand
))
3454 /* If we didn't already win, we can reload
3455 constants via force_const_mem, and other
3456 MEMs by reloading the address like for 'o'. */
3457 if (CONST_POOL_OK_P (operand_mode
[i
], operand
)
3464 case CT_SPECIAL_MEMORY
:
3467 if (constraint_satisfied_p (operand
, cn
))
3469 /* Likewise if the address will be reloaded because
3470 reg_equiv_address is nonzero. For reg_equiv_mem
3471 we have to check. */
3472 else if (REG_P (operand
)
3473 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
3474 && reg_renumber
[REGNO (operand
)] < 0
3475 && reg_equiv_mem (REGNO (operand
)) != 0
3476 && (constraint_satisfied_p
3477 (reg_equiv_mem (REGNO (operand
)), cn
)))
3482 if (constraint_satisfied_p (operand
, cn
))
3485 /* If we didn't already win, we can reload
3486 the address into a base register. */
3488 = base_reg_class (VOIDmode
, ADDR_SPACE_GENERIC
,
3494 if (constraint_satisfied_p (operand
, cn
))
3502 = reg_class_subunion
[this_alternative
[i
]][cl
];
3503 if (GET_MODE (operand
) == BLKmode
)
3507 && reg_fits_class_p (operand
, this_alternative
[i
],
3508 offset
, GET_MODE (recog_data
.operand
[i
])))
3512 while ((p
+= len
), c
);
3514 if (swapped
== (commutative
>= 0 ? 1 : 0))
3517 /* If this operand could be handled with a reg,
3518 and some reg is allowed, then this operand can be handled. */
3519 if (winreg
&& this_alternative
[i
] != NO_REGS
3520 && (win
|| !class_only_fixed_regs
[this_alternative
[i
]]))
3523 /* Record which operands fit this alternative. */
3524 this_alternative_earlyclobber
[i
] = earlyclobber
;
3525 if (win
&& ! force_reload
)
3526 this_alternative_win
[i
] = 1;
3527 else if (did_match
&& ! force_reload
)
3528 this_alternative_match_win
[i
] = 1;
3531 int const_to_mem
= 0;
3533 this_alternative_offmemok
[i
] = offmemok
;
3537 /* Alternative loses if it has no regs for a reg operand. */
3539 && this_alternative
[i
] == NO_REGS
3540 && this_alternative_matches
[i
] < 0)
3543 /* If this is a constant that is reloaded into the desired
3544 class by copying it to memory first, count that as another
3545 reload. This is consistent with other code and is
3546 required to avoid choosing another alternative when
3547 the constant is moved into memory by this function on
3548 an early reload pass. Note that the test here is
3549 precisely the same as in the code below that calls
3551 if (CONST_POOL_OK_P (operand_mode
[i
], operand
)
3552 && ((targetm
.preferred_reload_class (operand
,
3553 this_alternative
[i
])
3555 || no_input_reloads
))
3558 if (this_alternative
[i
] != NO_REGS
)
3562 /* Alternative loses if it requires a type of reload not
3563 permitted for this insn. We can always reload SCRATCH
3564 and objects with a REG_UNUSED note. */
3565 if (GET_CODE (operand
) != SCRATCH
3566 && modified
[i
] != RELOAD_READ
&& no_output_reloads
3567 && ! find_reg_note (insn
, REG_UNUSED
, operand
))
3569 else if (modified
[i
] != RELOAD_WRITE
&& no_input_reloads
3573 /* If we can't reload this value at all, reject this
3574 alternative. Note that we could also lose due to
3575 LIMIT_RELOAD_CLASS, but we don't check that
3578 if (! CONSTANT_P (operand
) && this_alternative
[i
] != NO_REGS
)
3580 if (targetm
.preferred_reload_class (operand
,
3581 this_alternative
[i
])
3585 if (operand_type
[i
] == RELOAD_FOR_OUTPUT
3586 && (targetm
.preferred_output_reload_class (operand
,
3587 this_alternative
[i
])
3592 /* We prefer to reload pseudos over reloading other things,
3593 since such reloads may be able to be eliminated later.
3594 If we are reloading a SCRATCH, we won't be generating any
3595 insns, just using a register, so it is also preferred.
3596 So bump REJECT in other cases. Don't do this in the
3597 case where we are forcing a constant into memory and
3598 it will then win since we don't want to have a different
3599 alternative match then. */
3600 if (! (REG_P (operand
)
3601 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
)
3602 && GET_CODE (operand
) != SCRATCH
3603 && ! (const_to_mem
&& constmemok
))
3606 /* Input reloads can be inherited more often than output
3607 reloads can be removed, so penalize output reloads. */
3608 if (operand_type
[i
] != RELOAD_FOR_INPUT
3609 && GET_CODE (operand
) != SCRATCH
)
3613 /* If this operand is a pseudo register that didn't get
3614 a hard reg and this alternative accepts some
3615 register, see if the class that we want is a subset
3616 of the preferred class for this register. If not,
3617 but it intersects that class, use the preferred class
3618 instead. If it does not intersect the preferred
3619 class, show that usage of this alternative should be
3620 discouraged; it will be discouraged more still if the
3621 register is `preferred or nothing'. We do this
3622 because it increases the chance of reusing our spill
3623 register in a later insn and avoiding a pair of
3624 memory stores and loads.
3626 Don't bother with this if this alternative will
3627 accept this operand.
3629 Don't do this for a multiword operand, since it is
3630 only a small win and has the risk of requiring more
3631 spill registers, which could cause a large loss.
3633 Don't do this if the preferred class has only one
3634 register because we might otherwise exhaust the
3637 if (! win
&& ! did_match
3638 && this_alternative
[i
] != NO_REGS
3639 && GET_MODE_SIZE (operand_mode
[i
]) <= UNITS_PER_WORD
3640 && reg_class_size
[(int) preferred_class
[i
]] > 0
3641 && ! small_register_class_p (preferred_class
[i
]))
3643 if (! reg_class_subset_p (this_alternative
[i
],
3644 preferred_class
[i
]))
3646 /* Since we don't have a way of forming the intersection,
3647 we just do something special if the preferred class
3648 is a subset of the class we have; that's the most
3649 common case anyway. */
3650 if (reg_class_subset_p (preferred_class
[i
],
3651 this_alternative
[i
]))
3652 this_alternative
[i
] = preferred_class
[i
];
3654 reject
+= (2 + 2 * pref_or_nothing
[i
]);
3659 /* Now see if any output operands that are marked "earlyclobber"
3660 in this alternative conflict with any input operands
3661 or any memory addresses. */
3663 for (i
= 0; i
< noperands
; i
++)
3664 if (this_alternative_earlyclobber
[i
]
3665 && (this_alternative_win
[i
] || this_alternative_match_win
[i
]))
3667 struct decomposition early_data
;
3669 early_data
= decompose (recog_data
.operand
[i
]);
3671 gcc_assert (modified
[i
] != RELOAD_READ
);
3673 if (this_alternative
[i
] == NO_REGS
)
3675 this_alternative_earlyclobber
[i
] = 0;
3676 gcc_assert (this_insn_is_asm
);
3677 error_for_asm (this_insn
,
3678 "%<&%> constraint used with no register class");
3681 for (j
= 0; j
< noperands
; j
++)
3682 /* Is this an input operand or a memory ref? */
3683 if ((MEM_P (recog_data
.operand
[j
])
3684 || modified
[j
] != RELOAD_WRITE
)
3686 /* Ignore things like match_operator operands. */
3687 && !recog_data
.is_operator
[j
]
3688 /* Don't count an input operand that is constrained to match
3689 the early clobber operand. */
3690 && ! (this_alternative_matches
[j
] == i
3691 && rtx_equal_p (recog_data
.operand
[i
],
3692 recog_data
.operand
[j
]))
3693 /* Is it altered by storing the earlyclobber operand? */
3694 && !immune_p (recog_data
.operand
[j
], recog_data
.operand
[i
],
3697 /* If the output is in a non-empty few-regs class,
3698 it's costly to reload it, so reload the input instead. */
3699 if (small_register_class_p (this_alternative
[i
])
3700 && (REG_P (recog_data
.operand
[j
])
3701 || GET_CODE (recog_data
.operand
[j
]) == SUBREG
))
3704 this_alternative_win
[j
] = 0;
3705 this_alternative_match_win
[j
] = 0;
3710 /* If an earlyclobber operand conflicts with something,
3711 it must be reloaded, so request this and count the cost. */
3715 this_alternative_win
[i
] = 0;
3716 this_alternative_match_win
[j
] = 0;
3717 for (j
= 0; j
< noperands
; j
++)
3718 if (this_alternative_matches
[j
] == i
3719 && this_alternative_match_win
[j
])
3721 this_alternative_win
[j
] = 0;
3722 this_alternative_match_win
[j
] = 0;
3728 /* If one alternative accepts all the operands, no reload required,
3729 choose that alternative; don't consider the remaining ones. */
3732 /* Unswap these so that they are never swapped at `finish'. */
3735 recog_data
.operand
[commutative
] = substed_operand
[commutative
];
3736 recog_data
.operand
[commutative
+ 1]
3737 = substed_operand
[commutative
+ 1];
3739 for (i
= 0; i
< noperands
; i
++)
3741 goal_alternative_win
[i
] = this_alternative_win
[i
];
3742 goal_alternative_match_win
[i
] = this_alternative_match_win
[i
];
3743 goal_alternative
[i
] = this_alternative
[i
];
3744 goal_alternative_offmemok
[i
] = this_alternative_offmemok
[i
];
3745 goal_alternative_matches
[i
] = this_alternative_matches
[i
];
3746 goal_alternative_earlyclobber
[i
]
3747 = this_alternative_earlyclobber
[i
];
3749 goal_alternative_number
= this_alternative_number
;
3750 goal_alternative_swapped
= swapped
;
3751 goal_earlyclobber
= this_earlyclobber
;
3755 /* REJECT, set by the ! and ? constraint characters and when a register
3756 would be reloaded into a non-preferred class, discourages the use of
3757 this alternative for a reload goal. REJECT is incremented by six
3758 for each ? and two for each non-preferred class. */
3759 losers
= losers
* 6 + reject
;
3761 /* If this alternative can be made to work by reloading,
3762 and it needs less reloading than the others checked so far,
3763 record it as the chosen goal for reloading. */
3768 for (i
= 0; i
< noperands
; i
++)
3770 goal_alternative
[i
] = this_alternative
[i
];
3771 goal_alternative_win
[i
] = this_alternative_win
[i
];
3772 goal_alternative_match_win
[i
]
3773 = this_alternative_match_win
[i
];
3774 goal_alternative_offmemok
[i
]
3775 = this_alternative_offmemok
[i
];
3776 goal_alternative_matches
[i
] = this_alternative_matches
[i
];
3777 goal_alternative_earlyclobber
[i
]
3778 = this_alternative_earlyclobber
[i
];
3780 goal_alternative_swapped
= swapped
;
3782 goal_alternative_number
= this_alternative_number
;
3783 goal_earlyclobber
= this_earlyclobber
;
3789 /* If the commutative operands have been swapped, swap
3790 them back in order to check the next alternative. */
3791 recog_data
.operand
[commutative
] = substed_operand
[commutative
];
3792 recog_data
.operand
[commutative
+ 1] = substed_operand
[commutative
+ 1];
3793 /* Unswap the duplicates too. */
3794 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3795 if (recog_data
.dup_num
[i
] == commutative
3796 || recog_data
.dup_num
[i
] == commutative
+ 1)
3797 *recog_data
.dup_loc
[i
]
3798 = recog_data
.operand
[(int) recog_data
.dup_num
[i
]];
3800 /* Unswap the operand related information as well. */
3801 std::swap (preferred_class
[commutative
],
3802 preferred_class
[commutative
+ 1]);
3803 std::swap (pref_or_nothing
[commutative
],
3804 pref_or_nothing
[commutative
+ 1]);
3805 std::swap (address_reloaded
[commutative
],
3806 address_reloaded
[commutative
+ 1]);
3811 /* The operands don't meet the constraints.
3812 goal_alternative describes the alternative
3813 that we could reach by reloading the fewest operands.
3814 Reload so as to fit it. */
3816 if (best
== MAX_RECOG_OPERANDS
* 2 + 600)
3818 /* No alternative works with reloads?? */
3819 if (insn_code_number
>= 0)
3820 fatal_insn ("unable to generate reloads for:", insn
);
3821 error_for_asm (insn
, "inconsistent operand constraints in an %<asm%>");
3822 /* Avoid further trouble with this insn. */
3823 PATTERN (insn
) = gen_rtx_USE (VOIDmode
, const0_rtx
);
3828 /* Jump to `finish' from above if all operands are valid already.
3829 In that case, goal_alternative_win is all 1. */
3832 /* Right now, for any pair of operands I and J that are required to match,
3834 goal_alternative_matches[J] is I.
3835 Set up goal_alternative_matched as the inverse function:
3836 goal_alternative_matched[I] = J. */
3838 for (i
= 0; i
< noperands
; i
++)
3839 goal_alternative_matched
[i
] = -1;
3841 for (i
= 0; i
< noperands
; i
++)
3842 if (! goal_alternative_win
[i
]
3843 && goal_alternative_matches
[i
] >= 0)
3844 goal_alternative_matched
[goal_alternative_matches
[i
]] = i
;
3846 for (i
= 0; i
< noperands
; i
++)
3847 goal_alternative_win
[i
] |= goal_alternative_match_win
[i
];
3849 /* If the best alternative is with operands 1 and 2 swapped,
3850 consider them swapped before reporting the reloads. Update the
3851 operand numbers of any reloads already pushed. */
3853 if (goal_alternative_swapped
)
3855 std::swap (substed_operand
[commutative
],
3856 substed_operand
[commutative
+ 1]);
3857 std::swap (recog_data
.operand
[commutative
],
3858 recog_data
.operand
[commutative
+ 1]);
3859 std::swap (*recog_data
.operand_loc
[commutative
],
3860 *recog_data
.operand_loc
[commutative
+ 1]);
3862 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3863 if (recog_data
.dup_num
[i
] == commutative
3864 || recog_data
.dup_num
[i
] == commutative
+ 1)
3865 *recog_data
.dup_loc
[i
]
3866 = recog_data
.operand
[(int) recog_data
.dup_num
[i
]];
3868 for (i
= 0; i
< n_reloads
; i
++)
3870 if (rld
[i
].opnum
== commutative
)
3871 rld
[i
].opnum
= commutative
+ 1;
3872 else if (rld
[i
].opnum
== commutative
+ 1)
3873 rld
[i
].opnum
= commutative
;
3877 for (i
= 0; i
< noperands
; i
++)
3879 operand_reloadnum
[i
] = -1;
3881 /* If this is an earlyclobber operand, we need to widen the scope.
3882 The reload must remain valid from the start of the insn being
3883 reloaded until after the operand is stored into its destination.
3884 We approximate this with RELOAD_OTHER even though we know that we
3885 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3887 One special case that is worth checking is when we have an
3888 output that is earlyclobber but isn't used past the insn (typically
3889 a SCRATCH). In this case, we only need have the reload live
3890 through the insn itself, but not for any of our input or output
3892 But we must not accidentally narrow the scope of an existing
3893 RELOAD_OTHER reload - leave these alone.
3895 In any case, anything needed to address this operand can remain
3896 however they were previously categorized. */
3898 if (goal_alternative_earlyclobber
[i
] && operand_type
[i
] != RELOAD_OTHER
)
3900 = (find_reg_note (insn
, REG_UNUSED
, recog_data
.operand
[i
])
3901 ? RELOAD_FOR_INSN
: RELOAD_OTHER
);
3904 /* Any constants that aren't allowed and can't be reloaded
3905 into registers are here changed into memory references. */
3906 for (i
= 0; i
< noperands
; i
++)
3907 if (! goal_alternative_win
[i
])
3909 rtx op
= recog_data
.operand
[i
];
3910 rtx subreg
= NULL_RTX
;
3911 rtx plus
= NULL_RTX
;
3912 machine_mode mode
= operand_mode
[i
];
3914 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3915 push_reload so we have to let them pass here. */
3916 if (GET_CODE (op
) == SUBREG
)
3919 op
= SUBREG_REG (op
);
3920 mode
= GET_MODE (op
);
3923 if (GET_CODE (op
) == PLUS
)
3929 if (CONST_POOL_OK_P (mode
, op
)
3930 && ((targetm
.preferred_reload_class (op
, goal_alternative
[i
])
3932 || no_input_reloads
))
3934 int this_address_reloaded
;
3935 rtx tem
= force_const_mem (mode
, op
);
3937 /* If we stripped a SUBREG or a PLUS above add it back. */
3938 if (plus
!= NULL_RTX
)
3939 tem
= gen_rtx_PLUS (mode
, XEXP (plus
, 0), tem
);
3941 if (subreg
!= NULL_RTX
)
3942 tem
= gen_rtx_SUBREG (operand_mode
[i
], tem
, SUBREG_BYTE (subreg
));
3944 this_address_reloaded
= 0;
3945 substed_operand
[i
] = recog_data
.operand
[i
]
3946 = find_reloads_toplev (tem
, i
, address_type
[i
], ind_levels
,
3947 0, insn
, &this_address_reloaded
);
3949 /* If the alternative accepts constant pool refs directly
3950 there will be no reload needed at all. */
3951 if (plus
== NULL_RTX
3952 && subreg
== NULL_RTX
3953 && alternative_allows_const_pool_ref (this_address_reloaded
!= 1
3954 ? substed_operand
[i
]
3956 recog_data
.constraints
[i
],
3957 goal_alternative_number
))
3958 goal_alternative_win
[i
] = 1;
3962 /* Record the values of the earlyclobber operands for the caller. */
3963 if (goal_earlyclobber
)
3964 for (i
= 0; i
< noperands
; i
++)
3965 if (goal_alternative_earlyclobber
[i
])
3966 reload_earlyclobbers
[n_earlyclobbers
++] = recog_data
.operand
[i
];
3968 /* Now record reloads for all the operands that need them. */
3969 for (i
= 0; i
< noperands
; i
++)
3970 if (! goal_alternative_win
[i
])
3972 /* Operands that match previous ones have already been handled. */
3973 if (goal_alternative_matches
[i
] >= 0)
3975 /* Handle an operand with a nonoffsettable address
3976 appearing where an offsettable address will do
3977 by reloading the address into a base register.
3979 ??? We can also do this when the operand is a register and
3980 reg_equiv_mem is not offsettable, but this is a bit tricky,
3981 so we don't bother with it. It may not be worth doing. */
3982 else if (goal_alternative_matched
[i
] == -1
3983 && goal_alternative_offmemok
[i
]
3984 && MEM_P (recog_data
.operand
[i
]))
3986 /* If the address to be reloaded is a VOIDmode constant,
3987 use the default address mode as mode of the reload register,
3988 as would have been done by find_reloads_address. */
3989 addr_space_t as
= MEM_ADDR_SPACE (recog_data
.operand
[i
]);
3990 machine_mode address_mode
;
3992 address_mode
= get_address_mode (recog_data
.operand
[i
]);
3993 operand_reloadnum
[i
]
3994 = push_reload (XEXP (recog_data
.operand
[i
], 0), NULL_RTX
,
3995 &XEXP (recog_data
.operand
[i
], 0), (rtx
*) 0,
3996 base_reg_class (VOIDmode
, as
, MEM
, SCRATCH
),
3998 VOIDmode
, 0, 0, i
, RELOAD_OTHER
);
3999 rld
[operand_reloadnum
[i
]].inc
4000 = GET_MODE_SIZE (GET_MODE (recog_data
.operand
[i
]));
4002 /* If this operand is an output, we will have made any
4003 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4004 now we are treating part of the operand as an input, so
4005 we must change these to RELOAD_FOR_OTHER_ADDRESS. */
4007 if (modified
[i
] == RELOAD_WRITE
)
4009 for (j
= 0; j
< n_reloads
; j
++)
4011 if (rld
[j
].opnum
== i
)
4013 if (rld
[j
].when_needed
== RELOAD_FOR_OUTPUT_ADDRESS
)
4014 rld
[j
].when_needed
= RELOAD_FOR_OTHER_ADDRESS
;
4015 else if (rld
[j
].when_needed
4016 == RELOAD_FOR_OUTADDR_ADDRESS
)
4017 rld
[j
].when_needed
= RELOAD_FOR_OTHER_ADDRESS
;
4022 else if (goal_alternative_matched
[i
] == -1)
4024 operand_reloadnum
[i
]
4025 = push_reload ((modified
[i
] != RELOAD_WRITE
4026 ? recog_data
.operand
[i
] : 0),
4027 (modified
[i
] != RELOAD_READ
4028 ? recog_data
.operand
[i
] : 0),
4029 (modified
[i
] != RELOAD_WRITE
4030 ? recog_data
.operand_loc
[i
] : 0),
4031 (modified
[i
] != RELOAD_READ
4032 ? recog_data
.operand_loc
[i
] : 0),
4033 (enum reg_class
) goal_alternative
[i
],
4034 (modified
[i
] == RELOAD_WRITE
4035 ? VOIDmode
: operand_mode
[i
]),
4036 (modified
[i
] == RELOAD_READ
4037 ? VOIDmode
: operand_mode
[i
]),
4038 (insn_code_number
< 0 ? 0
4039 : insn_data
[insn_code_number
].operand
[i
].strict_low
),
4040 0, i
, operand_type
[i
]);
4042 /* In a matching pair of operands, one must be input only
4043 and the other must be output only.
4044 Pass the input operand as IN and the other as OUT. */
4045 else if (modified
[i
] == RELOAD_READ
4046 && modified
[goal_alternative_matched
[i
]] == RELOAD_WRITE
)
4048 operand_reloadnum
[i
]
4049 = push_reload (recog_data
.operand
[i
],
4050 recog_data
.operand
[goal_alternative_matched
[i
]],
4051 recog_data
.operand_loc
[i
],
4052 recog_data
.operand_loc
[goal_alternative_matched
[i
]],
4053 (enum reg_class
) goal_alternative
[i
],
4055 operand_mode
[goal_alternative_matched
[i
]],
4056 0, 0, i
, RELOAD_OTHER
);
4057 operand_reloadnum
[goal_alternative_matched
[i
]] = output_reloadnum
;
4059 else if (modified
[i
] == RELOAD_WRITE
4060 && modified
[goal_alternative_matched
[i
]] == RELOAD_READ
)
4062 operand_reloadnum
[goal_alternative_matched
[i
]]
4063 = push_reload (recog_data
.operand
[goal_alternative_matched
[i
]],
4064 recog_data
.operand
[i
],
4065 recog_data
.operand_loc
[goal_alternative_matched
[i
]],
4066 recog_data
.operand_loc
[i
],
4067 (enum reg_class
) goal_alternative
[i
],
4068 operand_mode
[goal_alternative_matched
[i
]],
4070 0, 0, i
, RELOAD_OTHER
);
4071 operand_reloadnum
[i
] = output_reloadnum
;
4075 gcc_assert (insn_code_number
< 0);
4076 error_for_asm (insn
, "inconsistent operand constraints "
4078 /* Avoid further trouble with this insn. */
4079 PATTERN (insn
) = gen_rtx_USE (VOIDmode
, const0_rtx
);
4084 else if (goal_alternative_matched
[i
] < 0
4085 && goal_alternative_matches
[i
] < 0
4086 && address_operand_reloaded
[i
] != 1
4089 /* For each non-matching operand that's a MEM or a pseudo-register
4090 that didn't get a hard register, make an optional reload.
4091 This may get done even if the insn needs no reloads otherwise. */
4093 rtx operand
= recog_data
.operand
[i
];
4095 while (GET_CODE (operand
) == SUBREG
)
4096 operand
= SUBREG_REG (operand
);
4097 if ((MEM_P (operand
)
4099 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
))
4100 /* If this is only for an output, the optional reload would not
4101 actually cause us to use a register now, just note that
4102 something is stored here. */
4103 && (goal_alternative
[i
] != NO_REGS
4104 || modified
[i
] == RELOAD_WRITE
)
4105 && ! no_input_reloads
4106 /* An optional output reload might allow to delete INSN later.
4107 We mustn't make in-out reloads on insns that are not permitted
4109 If this is an asm, we can't delete it; we must not even call
4110 push_reload for an optional output reload in this case,
4111 because we can't be sure that the constraint allows a register,
4112 and push_reload verifies the constraints for asms. */
4113 && (modified
[i
] == RELOAD_READ
4114 || (! no_output_reloads
&& ! this_insn_is_asm
)))
4115 operand_reloadnum
[i
]
4116 = push_reload ((modified
[i
] != RELOAD_WRITE
4117 ? recog_data
.operand
[i
] : 0),
4118 (modified
[i
] != RELOAD_READ
4119 ? recog_data
.operand
[i
] : 0),
4120 (modified
[i
] != RELOAD_WRITE
4121 ? recog_data
.operand_loc
[i
] : 0),
4122 (modified
[i
] != RELOAD_READ
4123 ? recog_data
.operand_loc
[i
] : 0),
4124 (enum reg_class
) goal_alternative
[i
],
4125 (modified
[i
] == RELOAD_WRITE
4126 ? VOIDmode
: operand_mode
[i
]),
4127 (modified
[i
] == RELOAD_READ
4128 ? VOIDmode
: operand_mode
[i
]),
4129 (insn_code_number
< 0 ? 0
4130 : insn_data
[insn_code_number
].operand
[i
].strict_low
),
4131 1, i
, operand_type
[i
]);
4132 /* If a memory reference remains (either as a MEM or a pseudo that
4133 did not get a hard register), yet we can't make an optional
4134 reload, check if this is actually a pseudo register reference;
4135 we then need to emit a USE and/or a CLOBBER so that reload
4136 inheritance will do the right thing. */
4140 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
4141 && reg_renumber
[REGNO (operand
)] < 0)))
4143 operand
= *recog_data
.operand_loc
[i
];
4145 while (GET_CODE (operand
) == SUBREG
)
4146 operand
= SUBREG_REG (operand
);
4147 if (REG_P (operand
))
4149 if (modified
[i
] != RELOAD_WRITE
)
4150 /* We mark the USE with QImode so that we recognize
4151 it as one that can be safely deleted at the end
4153 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode
, operand
),
4155 if (modified
[i
] != RELOAD_READ
)
4156 emit_insn_after (gen_clobber (operand
), insn
);
4160 else if (goal_alternative_matches
[i
] >= 0
4161 && goal_alternative_win
[goal_alternative_matches
[i
]]
4162 && modified
[i
] == RELOAD_READ
4163 && modified
[goal_alternative_matches
[i
]] == RELOAD_WRITE
4164 && ! no_input_reloads
&& ! no_output_reloads
4167 /* Similarly, make an optional reload for a pair of matching
4168 objects that are in MEM or a pseudo that didn't get a hard reg. */
4170 rtx operand
= recog_data
.operand
[i
];
4172 while (GET_CODE (operand
) == SUBREG
)
4173 operand
= SUBREG_REG (operand
);
4174 if ((MEM_P (operand
)
4176 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
))
4177 && (goal_alternative
[goal_alternative_matches
[i
]] != NO_REGS
))
4178 operand_reloadnum
[i
] = operand_reloadnum
[goal_alternative_matches
[i
]]
4179 = push_reload (recog_data
.operand
[goal_alternative_matches
[i
]],
4180 recog_data
.operand
[i
],
4181 recog_data
.operand_loc
[goal_alternative_matches
[i
]],
4182 recog_data
.operand_loc
[i
],
4183 (enum reg_class
) goal_alternative
[goal_alternative_matches
[i
]],
4184 operand_mode
[goal_alternative_matches
[i
]],
4186 0, 1, goal_alternative_matches
[i
], RELOAD_OTHER
);
4189 /* Perform whatever substitutions on the operands we are supposed
4190 to make due to commutativity or replacement of registers
4191 with equivalent constants or memory slots. */
4193 for (i
= 0; i
< noperands
; i
++)
4195 /* We only do this on the last pass through reload, because it is
4196 possible for some data (like reg_equiv_address) to be changed during
4197 later passes. Moreover, we lose the opportunity to get a useful
4198 reload_{in,out}_reg when we do these replacements. */
4202 rtx substitution
= substed_operand
[i
];
4204 *recog_data
.operand_loc
[i
] = substitution
;
4206 /* If we're replacing an operand with a LABEL_REF, we need to
4207 make sure that there's a REG_LABEL_OPERAND note attached to
4208 this instruction. */
4209 if (GET_CODE (substitution
) == LABEL_REF
4210 && !find_reg_note (insn
, REG_LABEL_OPERAND
,
4211 label_ref_label (substitution
))
4212 /* For a JUMP_P, if it was a branch target it must have
4213 already been recorded as such. */
4215 || !label_is_jump_target_p (label_ref_label (substitution
),
4218 add_reg_note (insn
, REG_LABEL_OPERAND
,
4219 label_ref_label (substitution
));
4220 if (LABEL_P (label_ref_label (substitution
)))
4221 ++LABEL_NUSES (label_ref_label (substitution
));
4226 retval
|= (substed_operand
[i
] != *recog_data
.operand_loc
[i
]);
4229 /* If this insn pattern contains any MATCH_DUP's, make sure that
4230 they will be substituted if the operands they match are substituted.
4231 Also do now any substitutions we already did on the operands.
4233 Don't do this if we aren't making replacements because we might be
4234 propagating things allocated by frame pointer elimination into places
4235 it doesn't expect. */
4237 if (insn_code_number
>= 0 && replace
)
4238 for (i
= insn_data
[insn_code_number
].n_dups
- 1; i
>= 0; i
--)
4240 int opno
= recog_data
.dup_num
[i
];
4241 *recog_data
.dup_loc
[i
] = *recog_data
.operand_loc
[opno
];
4242 dup_replacements (recog_data
.dup_loc
[i
], recog_data
.operand_loc
[opno
]);
4246 /* This loses because reloading of prior insns can invalidate the equivalence
4247 (or at least find_equiv_reg isn't smart enough to find it any more),
4248 causing this insn to need more reload regs than it needed before.
4249 It may be too late to make the reload regs available.
4250 Now this optimization is done safely in choose_reload_regs. */
4252 /* For each reload of a reg into some other class of reg,
4253 search for an existing equivalent reg (same value now) in the right class.
4254 We can use it as long as we don't need to change its contents. */
4255 for (i
= 0; i
< n_reloads
; i
++)
4256 if (rld
[i
].reg_rtx
== 0
4258 && REG_P (rld
[i
].in
)
4262 = find_equiv_reg (rld
[i
].in
, insn
, rld
[i
].rclass
, -1,
4263 static_reload_reg_p
, 0, rld
[i
].inmode
);
4264 /* Prevent generation of insn to load the value
4265 because the one we found already has the value. */
4267 rld
[i
].in
= rld
[i
].reg_rtx
;
4271 /* If we detected error and replaced asm instruction by USE, forget about the
4273 if (GET_CODE (PATTERN (insn
)) == USE
4274 && CONST_INT_P (XEXP (PATTERN (insn
), 0)))
4277 /* Perhaps an output reload can be combined with another
4278 to reduce needs by one. */
4279 if (!goal_earlyclobber
)
4282 /* If we have a pair of reloads for parts of an address, they are reloading
4283 the same object, the operands themselves were not reloaded, and they
4284 are for two operands that are supposed to match, merge the reloads and
4285 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4287 for (i
= 0; i
< n_reloads
; i
++)
4291 for (j
= i
+ 1; j
< n_reloads
; j
++)
4292 if ((rld
[i
].when_needed
== RELOAD_FOR_INPUT_ADDRESS
4293 || rld
[i
].when_needed
== RELOAD_FOR_OUTPUT_ADDRESS
4294 || rld
[i
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
4295 || rld
[i
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
)
4296 && (rld
[j
].when_needed
== RELOAD_FOR_INPUT_ADDRESS
4297 || rld
[j
].when_needed
== RELOAD_FOR_OUTPUT_ADDRESS
4298 || rld
[j
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
4299 || rld
[j
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
)
4300 && rtx_equal_p (rld
[i
].in
, rld
[j
].in
)
4301 && (operand_reloadnum
[rld
[i
].opnum
] < 0
4302 || rld
[operand_reloadnum
[rld
[i
].opnum
]].optional
)
4303 && (operand_reloadnum
[rld
[j
].opnum
] < 0
4304 || rld
[operand_reloadnum
[rld
[j
].opnum
]].optional
)
4305 && (goal_alternative_matches
[rld
[i
].opnum
] == rld
[j
].opnum
4306 || (goal_alternative_matches
[rld
[j
].opnum
]
4309 for (k
= 0; k
< n_replacements
; k
++)
4310 if (replacements
[k
].what
== j
)
4311 replacements
[k
].what
= i
;
4313 if (rld
[i
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
4314 || rld
[i
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
)
4315 rld
[i
].when_needed
= RELOAD_FOR_OPADDR_ADDR
;
4317 rld
[i
].when_needed
= RELOAD_FOR_OPERAND_ADDRESS
;
4322 /* Scan all the reloads and update their type.
4323 If a reload is for the address of an operand and we didn't reload
4324 that operand, change the type. Similarly, change the operand number
4325 of a reload when two operands match. If a reload is optional, treat it
4326 as though the operand isn't reloaded.
4328 ??? This latter case is somewhat odd because if we do the optional
4329 reload, it means the object is hanging around. Thus we need only
4330 do the address reload if the optional reload was NOT done.
4332 Change secondary reloads to be the address type of their operand, not
4335 If an operand's reload is now RELOAD_OTHER, change any
4336 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4337 RELOAD_FOR_OTHER_ADDRESS. */
4339 for (i
= 0; i
< n_reloads
; i
++)
4341 if (rld
[i
].secondary_p
4342 && rld
[i
].when_needed
== operand_type
[rld
[i
].opnum
])
4343 rld
[i
].when_needed
= address_type
[rld
[i
].opnum
];
4345 if ((rld
[i
].when_needed
== RELOAD_FOR_INPUT_ADDRESS
4346 || rld
[i
].when_needed
== RELOAD_FOR_OUTPUT_ADDRESS
4347 || rld
[i
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
4348 || rld
[i
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
)
4349 && (operand_reloadnum
[rld
[i
].opnum
] < 0
4350 || rld
[operand_reloadnum
[rld
[i
].opnum
]].optional
))
4352 /* If we have a secondary reload to go along with this reload,
4353 change its type to RELOAD_FOR_OPADDR_ADDR. */
4355 if ((rld
[i
].when_needed
== RELOAD_FOR_INPUT_ADDRESS
4356 || rld
[i
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
)
4357 && rld
[i
].secondary_in_reload
!= -1)
4359 int secondary_in_reload
= rld
[i
].secondary_in_reload
;
4361 rld
[secondary_in_reload
].when_needed
= RELOAD_FOR_OPADDR_ADDR
;
4363 /* If there's a tertiary reload we have to change it also. */
4364 if (secondary_in_reload
> 0
4365 && rld
[secondary_in_reload
].secondary_in_reload
!= -1)
4366 rld
[rld
[secondary_in_reload
].secondary_in_reload
].when_needed
4367 = RELOAD_FOR_OPADDR_ADDR
;
4370 if ((rld
[i
].when_needed
== RELOAD_FOR_OUTPUT_ADDRESS
4371 || rld
[i
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
)
4372 && rld
[i
].secondary_out_reload
!= -1)
4374 int secondary_out_reload
= rld
[i
].secondary_out_reload
;
4376 rld
[secondary_out_reload
].when_needed
= RELOAD_FOR_OPADDR_ADDR
;
4378 /* If there's a tertiary reload we have to change it also. */
4379 if (secondary_out_reload
4380 && rld
[secondary_out_reload
].secondary_out_reload
!= -1)
4381 rld
[rld
[secondary_out_reload
].secondary_out_reload
].when_needed
4382 = RELOAD_FOR_OPADDR_ADDR
;
4385 if (rld
[i
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
4386 || rld
[i
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
)
4387 rld
[i
].when_needed
= RELOAD_FOR_OPADDR_ADDR
;
4389 rld
[i
].when_needed
= RELOAD_FOR_OPERAND_ADDRESS
;
4392 if ((rld
[i
].when_needed
== RELOAD_FOR_INPUT_ADDRESS
4393 || rld
[i
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
)
4394 && operand_reloadnum
[rld
[i
].opnum
] >= 0
4395 && (rld
[operand_reloadnum
[rld
[i
].opnum
]].when_needed
4397 rld
[i
].when_needed
= RELOAD_FOR_OTHER_ADDRESS
;
4399 if (goal_alternative_matches
[rld
[i
].opnum
] >= 0)
4400 rld
[i
].opnum
= goal_alternative_matches
[rld
[i
].opnum
];
4403 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4404 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4405 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4407 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4408 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4409 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4410 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4411 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4412 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4413 This is complicated by the fact that a single operand can have more
4414 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4415 choose_reload_regs without affecting code quality, and cases that
4416 actually fail are extremely rare, so it turns out to be better to fix
4417 the problem here by not generating cases that choose_reload_regs will
4419 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4420 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4422 We can reduce the register pressure by exploiting that a
4423 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4424 does not conflict with any of them, if it is only used for the first of
4425 the RELOAD_FOR_X_ADDRESS reloads. */
4427 int first_op_addr_num
= -2;
4428 int first_inpaddr_num
[MAX_RECOG_OPERANDS
];
4429 int first_outpaddr_num
[MAX_RECOG_OPERANDS
];
4430 int need_change
= 0;
4431 /* We use last_op_addr_reload and the contents of the above arrays
4432 first as flags - -2 means no instance encountered, -1 means exactly
4433 one instance encountered.
4434 If more than one instance has been encountered, we store the reload
4435 number of the first reload of the kind in question; reload numbers
4436 are known to be non-negative. */
4437 for (i
= 0; i
< noperands
; i
++)
4438 first_inpaddr_num
[i
] = first_outpaddr_num
[i
] = -2;
4439 for (i
= n_reloads
- 1; i
>= 0; i
--)
4441 switch (rld
[i
].when_needed
)
4443 case RELOAD_FOR_OPERAND_ADDRESS
:
4444 if (++first_op_addr_num
>= 0)
4446 first_op_addr_num
= i
;
4450 case RELOAD_FOR_INPUT_ADDRESS
:
4451 if (++first_inpaddr_num
[rld
[i
].opnum
] >= 0)
4453 first_inpaddr_num
[rld
[i
].opnum
] = i
;
4457 case RELOAD_FOR_OUTPUT_ADDRESS
:
4458 if (++first_outpaddr_num
[rld
[i
].opnum
] >= 0)
4460 first_outpaddr_num
[rld
[i
].opnum
] = i
;
4471 for (i
= 0; i
< n_reloads
; i
++)
4474 enum reload_type type
;
4476 switch (rld
[i
].when_needed
)
4478 case RELOAD_FOR_OPADDR_ADDR
:
4479 first_num
= first_op_addr_num
;
4480 type
= RELOAD_FOR_OPERAND_ADDRESS
;
4482 case RELOAD_FOR_INPADDR_ADDRESS
:
4483 first_num
= first_inpaddr_num
[rld
[i
].opnum
];
4484 type
= RELOAD_FOR_INPUT_ADDRESS
;
4486 case RELOAD_FOR_OUTADDR_ADDRESS
:
4487 first_num
= first_outpaddr_num
[rld
[i
].opnum
];
4488 type
= RELOAD_FOR_OUTPUT_ADDRESS
;
4495 else if (i
> first_num
)
4496 rld
[i
].when_needed
= type
;
4499 /* Check if the only TYPE reload that uses reload I is
4500 reload FIRST_NUM. */
4501 for (j
= n_reloads
- 1; j
> first_num
; j
--)
4503 if (rld
[j
].when_needed
== type
4504 && (rld
[i
].secondary_p
4505 ? rld
[j
].secondary_in_reload
== i
4506 : reg_mentioned_p (rld
[i
].in
, rld
[j
].in
)))
4508 rld
[i
].when_needed
= type
;
4517 /* See if we have any reloads that are now allowed to be merged
4518 because we've changed when the reload is needed to
4519 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4520 check for the most common cases. */
4522 for (i
= 0; i
< n_reloads
; i
++)
4523 if (rld
[i
].in
!= 0 && rld
[i
].out
== 0
4524 && (rld
[i
].when_needed
== RELOAD_FOR_OPERAND_ADDRESS
4525 || rld
[i
].when_needed
== RELOAD_FOR_OPADDR_ADDR
4526 || rld
[i
].when_needed
== RELOAD_FOR_OTHER_ADDRESS
))
4527 for (j
= 0; j
< n_reloads
; j
++)
4528 if (i
!= j
&& rld
[j
].in
!= 0 && rld
[j
].out
== 0
4529 && rld
[j
].when_needed
== rld
[i
].when_needed
4530 && MATCHES (rld
[i
].in
, rld
[j
].in
)
4531 && rld
[i
].rclass
== rld
[j
].rclass
4532 && !rld
[i
].nocombine
&& !rld
[j
].nocombine
4533 && rld
[i
].reg_rtx
== rld
[j
].reg_rtx
)
4535 rld
[i
].opnum
= MIN (rld
[i
].opnum
, rld
[j
].opnum
);
4536 transfer_replacements (i
, j
);
4540 /* If we made any reloads for addresses, see if they violate a
4541 "no input reloads" requirement for this insn. But loads that we
4542 do after the insn (such as for output addresses) are fine. */
4543 if (HAVE_cc0
&& no_input_reloads
)
4544 for (i
= 0; i
< n_reloads
; i
++)
4545 gcc_assert (rld
[i
].in
== 0
4546 || rld
[i
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
4547 || rld
[i
].when_needed
== RELOAD_FOR_OUTPUT_ADDRESS
);
4549 /* Compute reload_mode and reload_nregs. */
4550 for (i
= 0; i
< n_reloads
; i
++)
4553 = (rld
[i
].inmode
== VOIDmode
4554 || (GET_MODE_SIZE (rld
[i
].outmode
)
4555 > GET_MODE_SIZE (rld
[i
].inmode
)))
4556 ? rld
[i
].outmode
: rld
[i
].inmode
;
4558 rld
[i
].nregs
= ira_reg_class_max_nregs
[rld
[i
].rclass
][rld
[i
].mode
];
4561 /* Special case a simple move with an input reload and a
4562 destination of a hard reg, if the hard reg is ok, use it. */
4563 for (i
= 0; i
< n_reloads
; i
++)
4564 if (rld
[i
].when_needed
== RELOAD_FOR_INPUT
4565 && GET_CODE (PATTERN (insn
)) == SET
4566 && REG_P (SET_DEST (PATTERN (insn
)))
4567 && (SET_SRC (PATTERN (insn
)) == rld
[i
].in
4568 || SET_SRC (PATTERN (insn
)) == rld
[i
].in_reg
)
4569 && !elimination_target_reg_p (SET_DEST (PATTERN (insn
))))
4571 rtx dest
= SET_DEST (PATTERN (insn
));
4572 unsigned int regno
= REGNO (dest
);
4574 if (regno
< FIRST_PSEUDO_REGISTER
4575 && TEST_HARD_REG_BIT (reg_class_contents
[rld
[i
].rclass
], regno
)
4576 && HARD_REGNO_MODE_OK (regno
, rld
[i
].mode
))
4578 int nr
= hard_regno_nregs
[regno
][rld
[i
].mode
];
4581 for (nri
= 1; nri
< nr
; nri
++)
4582 if (! TEST_HARD_REG_BIT (reg_class_contents
[rld
[i
].rclass
], regno
+ nri
))
4589 rld
[i
].reg_rtx
= dest
;
4596 /* Return true if alternative number ALTNUM in constraint-string
4597 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4598 MEM gives the reference if its address hasn't been fully reloaded,
4599 otherwise it is NULL. */
4602 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED
,
4603 const char *constraint
, int altnum
)
4607 /* Skip alternatives before the one requested. */
4610 while (*constraint
++ != ',')
4614 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4615 If one of them is present, this alternative accepts the result of
4616 passing a constant-pool reference through find_reloads_toplev.
4618 The same is true of extra memory constraints if the address
4619 was reloaded into a register. However, the target may elect
4620 to disallow the original constant address, forcing it to be
4621 reloaded into a register instead. */
4622 for (; (c
= *constraint
) && c
!= ',' && c
!= '#';
4623 constraint
+= CONSTRAINT_LEN (c
, constraint
))
4625 enum constraint_num cn
= lookup_constraint (constraint
);
4626 if (insn_extra_memory_constraint (cn
)
4627 && (mem
== NULL
|| constraint_satisfied_p (mem
, cn
)))
4633 /* Scan X for memory references and scan the addresses for reloading.
4634 Also checks for references to "constant" regs that we want to eliminate
4635 and replaces them with the values they stand for.
4636 We may alter X destructively if it contains a reference to such.
4637 If X is just a constant reg, we return the equivalent value
4640 IND_LEVELS says how many levels of indirect addressing this machine
4643 OPNUM and TYPE identify the purpose of the reload.
4645 IS_SET_DEST is true if X is the destination of a SET, which is not
4646 appropriate to be replaced by a constant.
4648 INSN, if nonzero, is the insn in which we do the reload. It is used
4649 to determine if we may generate output reloads, and where to put USEs
4650 for pseudos that we have to replace with stack slots.
4652 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4653 result of find_reloads_address. */
4656 find_reloads_toplev (rtx x
, int opnum
, enum reload_type type
,
4657 int ind_levels
, int is_set_dest
, rtx_insn
*insn
,
4658 int *address_reloaded
)
4660 RTX_CODE code
= GET_CODE (x
);
4662 const char *fmt
= GET_RTX_FORMAT (code
);
4668 /* This code is duplicated for speed in find_reloads. */
4669 int regno
= REGNO (x
);
4670 if (reg_equiv_constant (regno
) != 0 && !is_set_dest
)
4671 x
= reg_equiv_constant (regno
);
4673 /* This creates (subreg (mem...)) which would cause an unnecessary
4674 reload of the mem. */
4675 else if (reg_equiv_mem (regno
) != 0)
4676 x
= reg_equiv_mem (regno
);
4678 else if (reg_equiv_memory_loc (regno
)
4679 && (reg_equiv_address (regno
) != 0 || num_not_at_initial_offset
))
4681 rtx mem
= make_memloc (x
, regno
);
4682 if (reg_equiv_address (regno
)
4683 || ! rtx_equal_p (mem
, reg_equiv_mem (regno
)))
4685 /* If this is not a toplevel operand, find_reloads doesn't see
4686 this substitution. We have to emit a USE of the pseudo so
4687 that delete_output_reload can see it. */
4688 if (replace_reloads
&& recog_data
.operand
[opnum
] != x
)
4689 /* We mark the USE with QImode so that we recognize it
4690 as one that can be safely deleted at the end of
4692 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode
, x
), insn
),
4695 i
= find_reloads_address (GET_MODE (x
), &x
, XEXP (x
, 0), &XEXP (x
, 0),
4696 opnum
, type
, ind_levels
, insn
);
4697 if (!rtx_equal_p (x
, mem
))
4698 push_reg_equiv_alt_mem (regno
, x
);
4699 if (address_reloaded
)
4700 *address_reloaded
= i
;
4709 i
= find_reloads_address (GET_MODE (x
), &tem
, XEXP (x
, 0), &XEXP (x
, 0),
4710 opnum
, type
, ind_levels
, insn
);
4711 if (address_reloaded
)
4712 *address_reloaded
= i
;
4717 if (code
== SUBREG
&& REG_P (SUBREG_REG (x
)))
4719 /* Check for SUBREG containing a REG that's equivalent to a
4720 constant. If the constant has a known value, truncate it
4721 right now. Similarly if we are extracting a single-word of a
4722 multi-word constant. If the constant is symbolic, allow it
4723 to be substituted normally. push_reload will strip the
4724 subreg later. The constant must not be VOIDmode, because we
4725 will lose the mode of the register (this should never happen
4726 because one of the cases above should handle it). */
4728 int regno
= REGNO (SUBREG_REG (x
));
4731 if (regno
>= FIRST_PSEUDO_REGISTER
4732 && reg_renumber
[regno
] < 0
4733 && reg_equiv_constant (regno
) != 0)
4736 simplify_gen_subreg (GET_MODE (x
), reg_equiv_constant (regno
),
4737 GET_MODE (SUBREG_REG (x
)), SUBREG_BYTE (x
));
4739 if (CONSTANT_P (tem
)
4740 && !targetm
.legitimate_constant_p (GET_MODE (x
), tem
))
4742 tem
= force_const_mem (GET_MODE (x
), tem
);
4743 i
= find_reloads_address (GET_MODE (tem
), &tem
, XEXP (tem
, 0),
4744 &XEXP (tem
, 0), opnum
, type
,
4746 if (address_reloaded
)
4747 *address_reloaded
= i
;
4752 /* If the subreg contains a reg that will be converted to a mem,
4753 attempt to convert the whole subreg to a (narrower or wider)
4754 memory reference instead. If this succeeds, we're done --
4755 otherwise fall through to check whether the inner reg still
4756 needs address reloads anyway. */
4758 if (regno
>= FIRST_PSEUDO_REGISTER
4759 && reg_equiv_memory_loc (regno
) != 0)
4761 tem
= find_reloads_subreg_address (x
, opnum
, type
, ind_levels
,
4762 insn
, address_reloaded
);
4768 for (copied
= 0, i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4772 rtx new_part
= find_reloads_toplev (XEXP (x
, i
), opnum
, type
,
4773 ind_levels
, is_set_dest
, insn
,
4775 /* If we have replaced a reg with it's equivalent memory loc -
4776 that can still be handled here e.g. if it's in a paradoxical
4777 subreg - we must make the change in a copy, rather than using
4778 a destructive change. This way, find_reloads can still elect
4779 not to do the change. */
4780 if (new_part
!= XEXP (x
, i
) && ! CONSTANT_P (new_part
) && ! copied
)
4782 x
= shallow_copy_rtx (x
);
4785 XEXP (x
, i
) = new_part
;
4791 /* Return a mem ref for the memory equivalent of reg REGNO.
4792 This mem ref is not shared with anything. */
4795 make_memloc (rtx ad
, int regno
)
4797 /* We must rerun eliminate_regs, in case the elimination
4798 offsets have changed. */
4800 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno
), VOIDmode
, NULL_RTX
),
4803 /* If TEM might contain a pseudo, we must copy it to avoid
4804 modifying it when we do the substitution for the reload. */
4805 if (rtx_varies_p (tem
, 0))
4806 tem
= copy_rtx (tem
);
4808 tem
= replace_equiv_address_nv (reg_equiv_memory_loc (regno
), tem
);
4809 tem
= adjust_address_nv (tem
, GET_MODE (ad
), 0);
4811 /* Copy the result if it's still the same as the equivalence, to avoid
4812 modifying it when we do the substitution for the reload. */
4813 if (tem
== reg_equiv_memory_loc (regno
))
4814 tem
= copy_rtx (tem
);
4818 /* Returns true if AD could be turned into a valid memory reference
4819 to mode MODE in address space AS by reloading the part pointed to
4820 by PART into a register. */
4823 maybe_memory_address_addr_space_p (machine_mode mode
, rtx ad
,
4824 addr_space_t as
, rtx
*part
)
4828 rtx reg
= gen_rtx_REG (GET_MODE (tem
), max_reg_num ());
4831 retv
= memory_address_addr_space_p (mode
, ad
, as
);
4837 /* Record all reloads needed for handling memory address AD
4838 which appears in *LOC in a memory reference to mode MODE
4839 which itself is found in location *MEMREFLOC.
4840 Note that we take shortcuts assuming that no multi-reg machine mode
4841 occurs as part of an address.
4843 OPNUM and TYPE specify the purpose of this reload.
4845 IND_LEVELS says how many levels of indirect addressing this machine
4848 INSN, if nonzero, is the insn in which we do the reload. It is used
4849 to determine if we may generate output reloads, and where to put USEs
4850 for pseudos that we have to replace with stack slots.
4852 Value is one if this address is reloaded or replaced as a whole; it is
4853 zero if the top level of this address was not reloaded or replaced, and
4854 it is -1 if it may or may not have been reloaded or replaced.
4856 Note that there is no verification that the address will be valid after
4857 this routine does its work. Instead, we rely on the fact that the address
4858 was valid when reload started. So we need only undo things that reload
4859 could have broken. These are wrong register types, pseudos not allocated
4860 to a hard register, and frame pointer elimination. */
4863 find_reloads_address (machine_mode mode
, rtx
*memrefloc
, rtx ad
,
4864 rtx
*loc
, int opnum
, enum reload_type type
,
4865 int ind_levels
, rtx_insn
*insn
)
4867 addr_space_t as
= memrefloc
? MEM_ADDR_SPACE (*memrefloc
)
4868 : ADDR_SPACE_GENERIC
;
4870 int removed_and
= 0;
4874 /* If the address is a register, see if it is a legitimate address and
4875 reload if not. We first handle the cases where we need not reload
4876 or where we must reload in a non-standard way. */
4882 if (reg_equiv_constant (regno
) != 0)
4884 find_reloads_address_part (reg_equiv_constant (regno
), loc
,
4885 base_reg_class (mode
, as
, MEM
, SCRATCH
),
4886 GET_MODE (ad
), opnum
, type
, ind_levels
);
4890 tem
= reg_equiv_memory_loc (regno
);
4893 if (reg_equiv_address (regno
) != 0 || num_not_at_initial_offset
)
4895 tem
= make_memloc (ad
, regno
);
4896 if (! strict_memory_address_addr_space_p (GET_MODE (tem
),
4898 MEM_ADDR_SPACE (tem
)))
4902 find_reloads_address (GET_MODE (tem
), &tem
, XEXP (tem
, 0),
4903 &XEXP (tem
, 0), opnum
,
4904 ADDR_TYPE (type
), ind_levels
, insn
);
4905 if (!rtx_equal_p (tem
, orig
))
4906 push_reg_equiv_alt_mem (regno
, tem
);
4908 /* We can avoid a reload if the register's equivalent memory
4909 expression is valid as an indirect memory address.
4910 But not all addresses are valid in a mem used as an indirect
4911 address: only reg or reg+constant. */
4914 && strict_memory_address_addr_space_p (mode
, tem
, as
)
4915 && (REG_P (XEXP (tem
, 0))
4916 || (GET_CODE (XEXP (tem
, 0)) == PLUS
4917 && REG_P (XEXP (XEXP (tem
, 0), 0))
4918 && CONSTANT_P (XEXP (XEXP (tem
, 0), 1)))))
4920 /* TEM is not the same as what we'll be replacing the
4921 pseudo with after reload, put a USE in front of INSN
4922 in the final reload pass. */
4924 && num_not_at_initial_offset
4925 && ! rtx_equal_p (tem
, reg_equiv_mem (regno
)))
4928 /* We mark the USE with QImode so that we
4929 recognize it as one that can be safely
4930 deleted at the end of reload. */
4931 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode
, ad
),
4934 /* This doesn't really count as replacing the address
4935 as a whole, since it is still a memory access. */
4943 /* The only remaining case where we can avoid a reload is if this is a
4944 hard register that is valid as a base register and which is not the
4945 subject of a CLOBBER in this insn. */
4947 else if (regno
< FIRST_PSEUDO_REGISTER
4948 && regno_ok_for_base_p (regno
, mode
, as
, MEM
, SCRATCH
)
4949 && ! regno_clobbered_p (regno
, this_insn
, mode
, 0))
4952 /* If we do not have one of the cases above, we must do the reload. */
4953 push_reload (ad
, NULL_RTX
, loc
, (rtx
*) 0,
4954 base_reg_class (mode
, as
, MEM
, SCRATCH
),
4955 GET_MODE (ad
), VOIDmode
, 0, 0, opnum
, type
);
4959 if (strict_memory_address_addr_space_p (mode
, ad
, as
))
4961 /* The address appears valid, so reloads are not needed.
4962 But the address may contain an eliminable register.
4963 This can happen because a machine with indirect addressing
4964 may consider a pseudo register by itself a valid address even when
4965 it has failed to get a hard reg.
4966 So do a tree-walk to find and eliminate all such regs. */
4968 /* But first quickly dispose of a common case. */
4969 if (GET_CODE (ad
) == PLUS
4970 && CONST_INT_P (XEXP (ad
, 1))
4971 && REG_P (XEXP (ad
, 0))
4972 && reg_equiv_constant (REGNO (XEXP (ad
, 0))) == 0)
4975 subst_reg_equivs_changed
= 0;
4976 *loc
= subst_reg_equivs (ad
, insn
);
4978 if (! subst_reg_equivs_changed
)
4981 /* Check result for validity after substitution. */
4982 if (strict_memory_address_addr_space_p (mode
, ad
, as
))
4986 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4989 if (memrefloc
&& ADDR_SPACE_GENERIC_P (as
))
4991 LEGITIMIZE_RELOAD_ADDRESS (ad
, GET_MODE (*memrefloc
), opnum
, type
,
4996 *memrefloc
= copy_rtx (*memrefloc
);
4997 XEXP (*memrefloc
, 0) = ad
;
4998 move_replacements (&ad
, &XEXP (*memrefloc
, 0));
5004 /* The address is not valid. We have to figure out why. First see if
5005 we have an outer AND and remove it if so. Then analyze what's inside. */
5007 if (GET_CODE (ad
) == AND
)
5010 loc
= &XEXP (ad
, 0);
5014 /* One possibility for why the address is invalid is that it is itself
5015 a MEM. This can happen when the frame pointer is being eliminated, a
5016 pseudo is not allocated to a hard register, and the offset between the
5017 frame and stack pointers is not its initial value. In that case the
5018 pseudo will have been replaced by a MEM referring to the
5022 /* First ensure that the address in this MEM is valid. Then, unless
5023 indirect addresses are valid, reload the MEM into a register. */
5025 find_reloads_address (GET_MODE (ad
), &tem
, XEXP (ad
, 0), &XEXP (ad
, 0),
5026 opnum
, ADDR_TYPE (type
),
5027 ind_levels
== 0 ? 0 : ind_levels
- 1, insn
);
5029 /* If tem was changed, then we must create a new memory reference to
5030 hold it and store it back into memrefloc. */
5031 if (tem
!= ad
&& memrefloc
)
5033 *memrefloc
= copy_rtx (*memrefloc
);
5034 copy_replacements (tem
, XEXP (*memrefloc
, 0));
5035 loc
= &XEXP (*memrefloc
, 0);
5037 loc
= &XEXP (*loc
, 0);
5040 /* Check similar cases as for indirect addresses as above except
5041 that we can allow pseudos and a MEM since they should have been
5042 taken care of above. */
5045 || (GET_CODE (XEXP (tem
, 0)) == SYMBOL_REF
&& ! indirect_symref_ok
)
5046 || MEM_P (XEXP (tem
, 0))
5047 || ! (REG_P (XEXP (tem
, 0))
5048 || (GET_CODE (XEXP (tem
, 0)) == PLUS
5049 && REG_P (XEXP (XEXP (tem
, 0), 0))
5050 && CONST_INT_P (XEXP (XEXP (tem
, 0), 1)))))
5052 /* Must use TEM here, not AD, since it is the one that will
5053 have any subexpressions reloaded, if needed. */
5054 push_reload (tem
, NULL_RTX
, loc
, (rtx
*) 0,
5055 base_reg_class (mode
, as
, MEM
, SCRATCH
), GET_MODE (tem
),
5058 return ! removed_and
;
5064 /* If we have address of a stack slot but it's not valid because the
5065 displacement is too large, compute the sum in a register.
5066 Handle all base registers here, not just fp/ap/sp, because on some
5067 targets (namely SH) we can also get too large displacements from
5068 big-endian corrections. */
5069 else if (GET_CODE (ad
) == PLUS
5070 && REG_P (XEXP (ad
, 0))
5071 && REGNO (XEXP (ad
, 0)) < FIRST_PSEUDO_REGISTER
5072 && CONST_INT_P (XEXP (ad
, 1))
5073 && (regno_ok_for_base_p (REGNO (XEXP (ad
, 0)), mode
, as
, PLUS
,
5075 /* Similarly, if we were to reload the base register and the
5076 mem+offset address is still invalid, then we want to reload
5077 the whole address, not just the base register. */
5078 || ! maybe_memory_address_addr_space_p
5079 (mode
, ad
, as
, &(XEXP (ad
, 0)))))
5082 /* Unshare the MEM rtx so we can safely alter it. */
5085 *memrefloc
= copy_rtx (*memrefloc
);
5086 loc
= &XEXP (*memrefloc
, 0);
5088 loc
= &XEXP (*loc
, 0);
5091 if (double_reg_address_ok
[mode
]
5092 && regno_ok_for_base_p (REGNO (XEXP (ad
, 0)), mode
, as
,
5095 /* Unshare the sum as well. */
5096 *loc
= ad
= copy_rtx (ad
);
5098 /* Reload the displacement into an index reg.
5099 We assume the frame pointer or arg pointer is a base reg. */
5100 find_reloads_address_part (XEXP (ad
, 1), &XEXP (ad
, 1),
5101 INDEX_REG_CLASS
, GET_MODE (ad
), opnum
,
5107 /* If the sum of two regs is not necessarily valid,
5108 reload the sum into a base reg.
5109 That will at least work. */
5110 find_reloads_address_part (ad
, loc
,
5111 base_reg_class (mode
, as
, MEM
, SCRATCH
),
5112 GET_MODE (ad
), opnum
, type
, ind_levels
);
5114 return ! removed_and
;
5117 /* If we have an indexed stack slot, there are three possible reasons why
5118 it might be invalid: The index might need to be reloaded, the address
5119 might have been made by frame pointer elimination and hence have a
5120 constant out of range, or both reasons might apply.
5122 We can easily check for an index needing reload, but even if that is the
5123 case, we might also have an invalid constant. To avoid making the
5124 conservative assumption and requiring two reloads, we see if this address
5125 is valid when not interpreted strictly. If it is, the only problem is
5126 that the index needs a reload and find_reloads_address_1 will take care
5129 Handle all base registers here, not just fp/ap/sp, because on some
5130 targets (namely SPARC) we can also get invalid addresses from preventive
5131 subreg big-endian corrections made by find_reloads_toplev. We
5132 can also get expressions involving LO_SUM (rather than PLUS) from
5133 find_reloads_subreg_address.
5135 If we decide to do something, it must be that `double_reg_address_ok'
5136 is true. We generate a reload of the base register + constant and
5137 rework the sum so that the reload register will be added to the index.
5138 This is safe because we know the address isn't shared.
5140 We check for the base register as both the first and second operand of
5141 the innermost PLUS and/or LO_SUM. */
5143 for (op_index
= 0; op_index
< 2; ++op_index
)
5145 rtx operand
, addend
;
5146 enum rtx_code inner_code
;
5148 if (GET_CODE (ad
) != PLUS
)
5151 inner_code
= GET_CODE (XEXP (ad
, 0));
5152 if (!(GET_CODE (ad
) == PLUS
5153 && CONST_INT_P (XEXP (ad
, 1))
5154 && (inner_code
== PLUS
|| inner_code
== LO_SUM
)))
5157 operand
= XEXP (XEXP (ad
, 0), op_index
);
5158 if (!REG_P (operand
) || REGNO (operand
) >= FIRST_PSEUDO_REGISTER
)
5161 addend
= XEXP (XEXP (ad
, 0), 1 - op_index
);
5163 if ((regno_ok_for_base_p (REGNO (operand
), mode
, as
, inner_code
,
5165 || operand
== frame_pointer_rtx
5166 || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
5167 && operand
== hard_frame_pointer_rtx
)
5168 || (FRAME_POINTER_REGNUM
!= ARG_POINTER_REGNUM
5169 && operand
== arg_pointer_rtx
)
5170 || operand
== stack_pointer_rtx
)
5171 && ! maybe_memory_address_addr_space_p
5172 (mode
, ad
, as
, &XEXP (XEXP (ad
, 0), 1 - op_index
)))
5177 offset_reg
= plus_constant (GET_MODE (ad
), operand
,
5178 INTVAL (XEXP (ad
, 1)));
5180 /* Form the adjusted address. */
5181 if (GET_CODE (XEXP (ad
, 0)) == PLUS
)
5182 ad
= gen_rtx_PLUS (GET_MODE (ad
),
5183 op_index
== 0 ? offset_reg
: addend
,
5184 op_index
== 0 ? addend
: offset_reg
);
5186 ad
= gen_rtx_LO_SUM (GET_MODE (ad
),
5187 op_index
== 0 ? offset_reg
: addend
,
5188 op_index
== 0 ? addend
: offset_reg
);
5191 cls
= base_reg_class (mode
, as
, MEM
, GET_CODE (addend
));
5192 find_reloads_address_part (XEXP (ad
, op_index
),
5193 &XEXP (ad
, op_index
), cls
,
5194 GET_MODE (ad
), opnum
, type
, ind_levels
);
5195 find_reloads_address_1 (mode
, as
,
5196 XEXP (ad
, 1 - op_index
), 1, GET_CODE (ad
),
5197 GET_CODE (XEXP (ad
, op_index
)),
5198 &XEXP (ad
, 1 - op_index
), opnum
,
5205 /* See if address becomes valid when an eliminable register
5206 in a sum is replaced. */
5209 if (GET_CODE (ad
) == PLUS
)
5210 tem
= subst_indexed_address (ad
);
5211 if (tem
!= ad
&& strict_memory_address_addr_space_p (mode
, tem
, as
))
5213 /* Ok, we win that way. Replace any additional eliminable
5216 subst_reg_equivs_changed
= 0;
5217 tem
= subst_reg_equivs (tem
, insn
);
5219 /* Make sure that didn't make the address invalid again. */
5221 if (! subst_reg_equivs_changed
5222 || strict_memory_address_addr_space_p (mode
, tem
, as
))
5229 /* If constants aren't valid addresses, reload the constant address
5231 if (CONSTANT_P (ad
) && ! strict_memory_address_addr_space_p (mode
, ad
, as
))
5233 machine_mode address_mode
= GET_MODE (ad
);
5234 if (address_mode
== VOIDmode
)
5235 address_mode
= targetm
.addr_space
.address_mode (as
);
5237 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5238 Unshare it so we can safely alter it. */
5239 if (memrefloc
&& GET_CODE (ad
) == SYMBOL_REF
5240 && CONSTANT_POOL_ADDRESS_P (ad
))
5242 *memrefloc
= copy_rtx (*memrefloc
);
5243 loc
= &XEXP (*memrefloc
, 0);
5245 loc
= &XEXP (*loc
, 0);
5248 find_reloads_address_part (ad
, loc
,
5249 base_reg_class (mode
, as
, MEM
, SCRATCH
),
5250 address_mode
, opnum
, type
, ind_levels
);
5251 return ! removed_and
;
5254 return find_reloads_address_1 (mode
, as
, ad
, 0, MEM
, SCRATCH
, loc
,
5255 opnum
, type
, ind_levels
, insn
);
5258 /* Find all pseudo regs appearing in AD
5259 that are eliminable in favor of equivalent values
5260 and do not have hard regs; replace them by their equivalents.
5261 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5262 front of it for pseudos that we have to replace with stack slots. */
5265 subst_reg_equivs (rtx ad
, rtx_insn
*insn
)
5267 RTX_CODE code
= GET_CODE (ad
);
5284 int regno
= REGNO (ad
);
5286 if (reg_equiv_constant (regno
) != 0)
5288 subst_reg_equivs_changed
= 1;
5289 return reg_equiv_constant (regno
);
5291 if (reg_equiv_memory_loc (regno
) && num_not_at_initial_offset
)
5293 rtx mem
= make_memloc (ad
, regno
);
5294 if (! rtx_equal_p (mem
, reg_equiv_mem (regno
)))
5296 subst_reg_equivs_changed
= 1;
5297 /* We mark the USE with QImode so that we recognize it
5298 as one that can be safely deleted at the end of
5300 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode
, ad
), insn
),
5309 /* Quickly dispose of a common case. */
5310 if (XEXP (ad
, 0) == frame_pointer_rtx
5311 && CONST_INT_P (XEXP (ad
, 1)))
5319 fmt
= GET_RTX_FORMAT (code
);
5320 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
5322 XEXP (ad
, i
) = subst_reg_equivs (XEXP (ad
, i
), insn
);
5326 /* Compute the sum of X and Y, making canonicalizations assumed in an
5327 address, namely: sum constant integers, surround the sum of two
5328 constants with a CONST, put the constant as the second operand, and
5329 group the constant on the outermost sum.
5331 This routine assumes both inputs are already in canonical form. */
5334 form_sum (machine_mode mode
, rtx x
, rtx y
)
5338 gcc_assert (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
);
5339 gcc_assert (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
);
5341 if (CONST_INT_P (x
))
5342 return plus_constant (mode
, y
, INTVAL (x
));
5343 else if (CONST_INT_P (y
))
5344 return plus_constant (mode
, x
, INTVAL (y
));
5345 else if (CONSTANT_P (x
))
5346 tem
= x
, x
= y
, y
= tem
;
5348 if (GET_CODE (x
) == PLUS
&& CONSTANT_P (XEXP (x
, 1)))
5349 return form_sum (mode
, XEXP (x
, 0), form_sum (mode
, XEXP (x
, 1), y
));
5351 /* Note that if the operands of Y are specified in the opposite
5352 order in the recursive calls below, infinite recursion will occur. */
5353 if (GET_CODE (y
) == PLUS
&& CONSTANT_P (XEXP (y
, 1)))
5354 return form_sum (mode
, form_sum (mode
, x
, XEXP (y
, 0)), XEXP (y
, 1));
5356 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5357 constant will have been placed second. */
5358 if (CONSTANT_P (x
) && CONSTANT_P (y
))
5360 if (GET_CODE (x
) == CONST
)
5362 if (GET_CODE (y
) == CONST
)
5365 return gen_rtx_CONST (VOIDmode
, gen_rtx_PLUS (mode
, x
, y
));
5368 return gen_rtx_PLUS (mode
, x
, y
);
5371 /* If ADDR is a sum containing a pseudo register that should be
5372 replaced with a constant (from reg_equiv_constant),
5373 return the result of doing so, and also apply the associative
5374 law so that the result is more likely to be a valid address.
5375 (But it is not guaranteed to be one.)
5377 Note that at most one register is replaced, even if more are
5378 replaceable. Also, we try to put the result into a canonical form
5379 so it is more likely to be a valid address.
5381 In all other cases, return ADDR. */
5384 subst_indexed_address (rtx addr
)
5386 rtx op0
= 0, op1
= 0, op2
= 0;
5390 if (GET_CODE (addr
) == PLUS
)
5392 /* Try to find a register to replace. */
5393 op0
= XEXP (addr
, 0), op1
= XEXP (addr
, 1), op2
= 0;
5395 && (regno
= REGNO (op0
)) >= FIRST_PSEUDO_REGISTER
5396 && reg_renumber
[regno
] < 0
5397 && reg_equiv_constant (regno
) != 0)
5398 op0
= reg_equiv_constant (regno
);
5399 else if (REG_P (op1
)
5400 && (regno
= REGNO (op1
)) >= FIRST_PSEUDO_REGISTER
5401 && reg_renumber
[regno
] < 0
5402 && reg_equiv_constant (regno
) != 0)
5403 op1
= reg_equiv_constant (regno
);
5404 else if (GET_CODE (op0
) == PLUS
5405 && (tem
= subst_indexed_address (op0
)) != op0
)
5407 else if (GET_CODE (op1
) == PLUS
5408 && (tem
= subst_indexed_address (op1
)) != op1
)
5413 /* Pick out up to three things to add. */
5414 if (GET_CODE (op1
) == PLUS
)
5415 op2
= XEXP (op1
, 1), op1
= XEXP (op1
, 0);
5416 else if (GET_CODE (op0
) == PLUS
)
5417 op2
= op1
, op1
= XEXP (op0
, 1), op0
= XEXP (op0
, 0);
5419 /* Compute the sum. */
5421 op1
= form_sum (GET_MODE (addr
), op1
, op2
);
5423 op0
= form_sum (GET_MODE (addr
), op0
, op1
);
5430 /* Update the REG_INC notes for an insn. It updates all REG_INC
5431 notes for the instruction which refer to REGNO the to refer
5432 to the reload number.
5434 INSN is the insn for which any REG_INC notes need updating.
5436 REGNO is the register number which has been reloaded.
5438 RELOADNUM is the reload number. */
5441 update_auto_inc_notes (rtx_insn
*insn ATTRIBUTE_UNUSED
, int regno ATTRIBUTE_UNUSED
,
5442 int reloadnum ATTRIBUTE_UNUSED
)
5447 for (rtx link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
5448 if (REG_NOTE_KIND (link
) == REG_INC
5449 && (int) REGNO (XEXP (link
, 0)) == regno
)
5450 push_replacement (&XEXP (link
, 0), reloadnum
, VOIDmode
);
5453 /* Record the pseudo registers we must reload into hard registers in a
5454 subexpression of a would-be memory address, X referring to a value
5455 in mode MODE. (This function is not called if the address we find
5458 CONTEXT = 1 means we are considering regs as index regs,
5459 = 0 means we are considering them as base regs.
5460 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5462 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5463 is the code of the index part of the address. Otherwise, pass SCRATCH
5465 OPNUM and TYPE specify the purpose of any reloads made.
5467 IND_LEVELS says how many levels of indirect addressing are
5468 supported at this point in the address.
5470 INSN, if nonzero, is the insn in which we do the reload. It is used
5471 to determine if we may generate output reloads.
5473 We return nonzero if X, as a whole, is reloaded or replaced. */
5475 /* Note that we take shortcuts assuming that no multi-reg machine mode
5476 occurs as part of an address.
5477 Also, this is not fully machine-customizable; it works for machines
5478 such as VAXen and 68000's and 32000's, but other possible machines
5479 could have addressing modes that this does not handle right.
5480 If you add push_reload calls here, you need to make sure gen_reload
5481 handles those cases gracefully. */
5484 find_reloads_address_1 (machine_mode mode
, addr_space_t as
,
5486 enum rtx_code outer_code
, enum rtx_code index_code
,
5487 rtx
*loc
, int opnum
, enum reload_type type
,
5488 int ind_levels
, rtx_insn
*insn
)
5490 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5492 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5493 : REGNO_OK_FOR_INDEX_P (REGNO))
5495 enum reg_class context_reg_class
;
5496 RTX_CODE code
= GET_CODE (x
);
5497 bool reloaded_inner_of_autoinc
= false;
5500 context_reg_class
= INDEX_REG_CLASS
;
5502 context_reg_class
= base_reg_class (mode
, as
, outer_code
, index_code
);
5508 rtx orig_op0
= XEXP (x
, 0);
5509 rtx orig_op1
= XEXP (x
, 1);
5510 RTX_CODE code0
= GET_CODE (orig_op0
);
5511 RTX_CODE code1
= GET_CODE (orig_op1
);
5515 if (GET_CODE (op0
) == SUBREG
)
5517 op0
= SUBREG_REG (op0
);
5518 code0
= GET_CODE (op0
);
5519 if (code0
== REG
&& REGNO (op0
) < FIRST_PSEUDO_REGISTER
)
5520 op0
= gen_rtx_REG (word_mode
,
5522 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0
)),
5523 GET_MODE (SUBREG_REG (orig_op0
)),
5524 SUBREG_BYTE (orig_op0
),
5525 GET_MODE (orig_op0
))));
5528 if (GET_CODE (op1
) == SUBREG
)
5530 op1
= SUBREG_REG (op1
);
5531 code1
= GET_CODE (op1
);
5532 if (code1
== REG
&& REGNO (op1
) < FIRST_PSEUDO_REGISTER
)
5533 /* ??? Why is this given op1's mode and above for
5534 ??? op0 SUBREGs we use word_mode? */
5535 op1
= gen_rtx_REG (GET_MODE (op1
),
5537 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1
)),
5538 GET_MODE (SUBREG_REG (orig_op1
)),
5539 SUBREG_BYTE (orig_op1
),
5540 GET_MODE (orig_op1
))));
5542 /* Plus in the index register may be created only as a result of
5543 register rematerialization for expression like &localvar*4. Reload it.
5544 It may be possible to combine the displacement on the outer level,
5545 but it is probably not worthwhile to do so. */
5548 find_reloads_address (GET_MODE (x
), loc
, XEXP (x
, 0), &XEXP (x
, 0),
5549 opnum
, ADDR_TYPE (type
), ind_levels
, insn
);
5550 push_reload (*loc
, NULL_RTX
, loc
, (rtx
*) 0,
5552 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
5556 if (code0
== MULT
|| code0
== SIGN_EXTEND
|| code0
== TRUNCATE
5557 || code0
== ZERO_EXTEND
|| code1
== MEM
)
5559 find_reloads_address_1 (mode
, as
, orig_op0
, 1, PLUS
, SCRATCH
,
5560 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5562 find_reloads_address_1 (mode
, as
, orig_op1
, 0, PLUS
, code0
,
5563 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5567 else if (code1
== MULT
|| code1
== SIGN_EXTEND
|| code1
== TRUNCATE
5568 || code1
== ZERO_EXTEND
|| code0
== MEM
)
5570 find_reloads_address_1 (mode
, as
, orig_op0
, 0, PLUS
, code1
,
5571 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5573 find_reloads_address_1 (mode
, as
, orig_op1
, 1, PLUS
, SCRATCH
,
5574 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5578 else if (code0
== CONST_INT
|| code0
== CONST
5579 || code0
== SYMBOL_REF
|| code0
== LABEL_REF
)
5580 find_reloads_address_1 (mode
, as
, orig_op1
, 0, PLUS
, code0
,
5581 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5584 else if (code1
== CONST_INT
|| code1
== CONST
5585 || code1
== SYMBOL_REF
|| code1
== LABEL_REF
)
5586 find_reloads_address_1 (mode
, as
, orig_op0
, 0, PLUS
, code1
,
5587 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5590 else if (code0
== REG
&& code1
== REG
)
5592 if (REGNO_OK_FOR_INDEX_P (REGNO (op1
))
5593 && regno_ok_for_base_p (REGNO (op0
), mode
, as
, PLUS
, REG
))
5595 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0
))
5596 && regno_ok_for_base_p (REGNO (op1
), mode
, as
, PLUS
, REG
))
5598 else if (regno_ok_for_base_p (REGNO (op0
), mode
, as
, PLUS
, REG
))
5599 find_reloads_address_1 (mode
, as
, orig_op1
, 1, PLUS
, SCRATCH
,
5600 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5602 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1
)))
5603 find_reloads_address_1 (mode
, as
, orig_op0
, 0, PLUS
, REG
,
5604 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5606 else if (regno_ok_for_base_p (REGNO (op1
), mode
, as
, PLUS
, REG
))
5607 find_reloads_address_1 (mode
, as
, orig_op0
, 1, PLUS
, SCRATCH
,
5608 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5610 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0
)))
5611 find_reloads_address_1 (mode
, as
, orig_op1
, 0, PLUS
, REG
,
5612 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5616 find_reloads_address_1 (mode
, as
, orig_op0
, 0, PLUS
, REG
,
5617 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5619 find_reloads_address_1 (mode
, as
, orig_op1
, 1, PLUS
, SCRATCH
,
5620 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5625 else if (code0
== REG
)
5627 find_reloads_address_1 (mode
, as
, orig_op0
, 1, PLUS
, SCRATCH
,
5628 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5630 find_reloads_address_1 (mode
, as
, orig_op1
, 0, PLUS
, REG
,
5631 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5635 else if (code1
== REG
)
5637 find_reloads_address_1 (mode
, as
, orig_op1
, 1, PLUS
, SCRATCH
,
5638 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5640 find_reloads_address_1 (mode
, as
, orig_op0
, 0, PLUS
, REG
,
5641 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5651 rtx op0
= XEXP (x
, 0);
5652 rtx op1
= XEXP (x
, 1);
5653 enum rtx_code index_code
;
5657 if (GET_CODE (op1
) != PLUS
&& GET_CODE (op1
) != MINUS
)
5660 /* Currently, we only support {PRE,POST}_MODIFY constructs
5661 where a base register is {inc,dec}remented by the contents
5662 of another register or by a constant value. Thus, these
5663 operands must match. */
5664 gcc_assert (op0
== XEXP (op1
, 0));
5666 /* Require index register (or constant). Let's just handle the
5667 register case in the meantime... If the target allows
5668 auto-modify by a constant then we could try replacing a pseudo
5669 register with its equivalent constant where applicable.
5671 We also handle the case where the register was eliminated
5672 resulting in a PLUS subexpression.
5674 If we later decide to reload the whole PRE_MODIFY or
5675 POST_MODIFY, inc_for_reload might clobber the reload register
5676 before reading the index. The index register might therefore
5677 need to live longer than a TYPE reload normally would, so be
5678 conservative and class it as RELOAD_OTHER. */
5679 if ((REG_P (XEXP (op1
, 1))
5680 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1
, 1))))
5681 || GET_CODE (XEXP (op1
, 1)) == PLUS
)
5682 find_reloads_address_1 (mode
, as
, XEXP (op1
, 1), 1, code
, SCRATCH
,
5683 &XEXP (op1
, 1), opnum
, RELOAD_OTHER
,
5686 gcc_assert (REG_P (XEXP (op1
, 0)));
5688 regno
= REGNO (XEXP (op1
, 0));
5689 index_code
= GET_CODE (XEXP (op1
, 1));
5691 /* A register that is incremented cannot be constant! */
5692 gcc_assert (regno
< FIRST_PSEUDO_REGISTER
5693 || reg_equiv_constant (regno
) == 0);
5695 /* Handle a register that is equivalent to a memory location
5696 which cannot be addressed directly. */
5697 if (reg_equiv_memory_loc (regno
) != 0
5698 && (reg_equiv_address (regno
) != 0
5699 || num_not_at_initial_offset
))
5701 rtx tem
= make_memloc (XEXP (x
, 0), regno
);
5703 if (reg_equiv_address (regno
)
5704 || ! rtx_equal_p (tem
, reg_equiv_mem (regno
)))
5708 /* First reload the memory location's address.
5709 We can't use ADDR_TYPE (type) here, because we need to
5710 write back the value after reading it, hence we actually
5711 need two registers. */
5712 find_reloads_address (GET_MODE (tem
), &tem
, XEXP (tem
, 0),
5713 &XEXP (tem
, 0), opnum
,
5717 if (!rtx_equal_p (tem
, orig
))
5718 push_reg_equiv_alt_mem (regno
, tem
);
5720 /* Then reload the memory location into a base
5722 reloadnum
= push_reload (tem
, tem
, &XEXP (x
, 0),
5724 base_reg_class (mode
, as
,
5726 GET_MODE (x
), GET_MODE (x
), 0,
5727 0, opnum
, RELOAD_OTHER
);
5729 update_auto_inc_notes (this_insn
, regno
, reloadnum
);
5734 if (reg_renumber
[regno
] >= 0)
5735 regno
= reg_renumber
[regno
];
5737 /* We require a base register here... */
5738 if (!regno_ok_for_base_p (regno
, GET_MODE (x
), as
, code
, index_code
))
5740 reloadnum
= push_reload (XEXP (op1
, 0), XEXP (x
, 0),
5741 &XEXP (op1
, 0), &XEXP (x
, 0),
5742 base_reg_class (mode
, as
,
5744 GET_MODE (x
), GET_MODE (x
), 0, 0,
5745 opnum
, RELOAD_OTHER
);
5747 update_auto_inc_notes (this_insn
, regno
, reloadnum
);
5757 if (REG_P (XEXP (x
, 0)))
5759 int regno
= REGNO (XEXP (x
, 0));
5763 /* A register that is incremented cannot be constant! */
5764 gcc_assert (regno
< FIRST_PSEUDO_REGISTER
5765 || reg_equiv_constant (regno
) == 0);
5767 /* Handle a register that is equivalent to a memory location
5768 which cannot be addressed directly. */
5769 if (reg_equiv_memory_loc (regno
) != 0
5770 && (reg_equiv_address (regno
) != 0 || num_not_at_initial_offset
))
5772 rtx tem
= make_memloc (XEXP (x
, 0), regno
);
5773 if (reg_equiv_address (regno
)
5774 || ! rtx_equal_p (tem
, reg_equiv_mem (regno
)))
5778 /* First reload the memory location's address.
5779 We can't use ADDR_TYPE (type) here, because we need to
5780 write back the value after reading it, hence we actually
5781 need two registers. */
5782 find_reloads_address (GET_MODE (tem
), &tem
, XEXP (tem
, 0),
5783 &XEXP (tem
, 0), opnum
, type
,
5785 reloaded_inner_of_autoinc
= true;
5786 if (!rtx_equal_p (tem
, orig
))
5787 push_reg_equiv_alt_mem (regno
, tem
);
5788 /* Put this inside a new increment-expression. */
5789 x
= gen_rtx_fmt_e (GET_CODE (x
), GET_MODE (x
), tem
);
5790 /* Proceed to reload that, as if it contained a register. */
5794 /* If we have a hard register that is ok in this incdec context,
5795 don't make a reload. If the register isn't nice enough for
5796 autoincdec, we can reload it. But, if an autoincrement of a
5797 register that we here verified as playing nice, still outside
5798 isn't "valid", it must be that no autoincrement is "valid".
5799 If that is true and something made an autoincrement anyway,
5800 this must be a special context where one is allowed.
5801 (For example, a "push" instruction.)
5802 We can't improve this address, so leave it alone. */
5804 /* Otherwise, reload the autoincrement into a suitable hard reg
5805 and record how much to increment by. */
5807 if (reg_renumber
[regno
] >= 0)
5808 regno
= reg_renumber
[regno
];
5809 if (regno
>= FIRST_PSEUDO_REGISTER
5810 || !REG_OK_FOR_CONTEXT (context
, regno
, mode
, as
, code
,
5815 /* If we can output the register afterwards, do so, this
5816 saves the extra update.
5817 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5818 CALL_INSN - and it does not set CC0.
5819 But don't do this if we cannot directly address the
5820 memory location, since this will make it harder to
5821 reuse address reloads, and increases register pressure.
5822 Also don't do this if we can probably update x directly. */
5823 rtx equiv
= (MEM_P (XEXP (x
, 0))
5825 : reg_equiv_mem (regno
));
5826 enum insn_code icode
= optab_handler (add_optab
, GET_MODE (x
));
5827 if (insn
&& NONJUMP_INSN_P (insn
)
5829 && ! sets_cc0_p (PATTERN (insn
))
5831 && (regno
< FIRST_PSEUDO_REGISTER
5833 && memory_operand (equiv
, GET_MODE (equiv
))
5834 && ! (icode
!= CODE_FOR_nothing
5835 && insn_operand_matches (icode
, 0, equiv
)
5836 && insn_operand_matches (icode
, 1, equiv
))))
5837 /* Using RELOAD_OTHER means we emit this and the reload we
5838 made earlier in the wrong order. */
5839 && !reloaded_inner_of_autoinc
)
5841 /* We use the original pseudo for loc, so that
5842 emit_reload_insns() knows which pseudo this
5843 reload refers to and updates the pseudo rtx, not
5844 its equivalent memory location, as well as the
5845 corresponding entry in reg_last_reload_reg. */
5846 loc
= &XEXP (x_orig
, 0);
5849 = push_reload (x
, x
, loc
, loc
,
5851 GET_MODE (x
), GET_MODE (x
), 0, 0,
5852 opnum
, RELOAD_OTHER
);
5857 = push_reload (x
, x
, loc
, (rtx
*) 0,
5859 GET_MODE (x
), GET_MODE (x
), 0, 0,
5862 = find_inc_amount (PATTERN (this_insn
), XEXP (x_orig
, 0));
5867 update_auto_inc_notes (this_insn
, REGNO (XEXP (x_orig
, 0)),
5877 /* Look for parts to reload in the inner expression and reload them
5878 too, in addition to this operation. Reloading all inner parts in
5879 addition to this one shouldn't be necessary, but at this point,
5880 we don't know if we can possibly omit any part that *can* be
5881 reloaded. Targets that are better off reloading just either part
5882 (or perhaps even a different part of an outer expression), should
5883 define LEGITIMIZE_RELOAD_ADDRESS. */
5884 find_reloads_address_1 (GET_MODE (XEXP (x
, 0)), as
, XEXP (x
, 0),
5885 context
, code
, SCRATCH
, &XEXP (x
, 0), opnum
,
5886 type
, ind_levels
, insn
);
5887 push_reload (x
, NULL_RTX
, loc
, (rtx
*) 0,
5889 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
5893 /* This is probably the result of a substitution, by eliminate_regs, of
5894 an equivalent address for a pseudo that was not allocated to a hard
5895 register. Verify that the specified address is valid and reload it
5898 Since we know we are going to reload this item, don't decrement for
5899 the indirection level.
5901 Note that this is actually conservative: it would be slightly more
5902 efficient to use the value of SPILL_INDIRECT_LEVELS from
5905 find_reloads_address (GET_MODE (x
), loc
, XEXP (x
, 0), &XEXP (x
, 0),
5906 opnum
, ADDR_TYPE (type
), ind_levels
, insn
);
5907 push_reload (*loc
, NULL_RTX
, loc
, (rtx
*) 0,
5909 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
5914 int regno
= REGNO (x
);
5916 if (reg_equiv_constant (regno
) != 0)
5918 find_reloads_address_part (reg_equiv_constant (regno
), loc
,
5920 GET_MODE (x
), opnum
, type
, ind_levels
);
5924 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5925 that feeds this insn. */
5926 if (reg_equiv_mem (regno
) != 0)
5928 push_reload (reg_equiv_mem (regno
), NULL_RTX
, loc
, (rtx
*) 0,
5930 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
5935 if (reg_equiv_memory_loc (regno
)
5936 && (reg_equiv_address (regno
) != 0 || num_not_at_initial_offset
))
5938 rtx tem
= make_memloc (x
, regno
);
5939 if (reg_equiv_address (regno
) != 0
5940 || ! rtx_equal_p (tem
, reg_equiv_mem (regno
)))
5943 find_reloads_address (GET_MODE (x
), &x
, XEXP (x
, 0),
5944 &XEXP (x
, 0), opnum
, ADDR_TYPE (type
),
5946 if (!rtx_equal_p (x
, tem
))
5947 push_reg_equiv_alt_mem (regno
, x
);
5951 if (reg_renumber
[regno
] >= 0)
5952 regno
= reg_renumber
[regno
];
5954 if (regno
>= FIRST_PSEUDO_REGISTER
5955 || !REG_OK_FOR_CONTEXT (context
, regno
, mode
, as
, outer_code
,
5958 push_reload (x
, NULL_RTX
, loc
, (rtx
*) 0,
5960 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
5964 /* If a register appearing in an address is the subject of a CLOBBER
5965 in this insn, reload it into some other register to be safe.
5966 The CLOBBER is supposed to make the register unavailable
5967 from before this insn to after it. */
5968 if (regno_clobbered_p (regno
, this_insn
, GET_MODE (x
), 0))
5970 push_reload (x
, NULL_RTX
, loc
, (rtx
*) 0,
5972 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
5979 if (REG_P (SUBREG_REG (x
)))
5981 /* If this is a SUBREG of a hard register and the resulting register
5982 is of the wrong class, reload the whole SUBREG. This avoids
5983 needless copies if SUBREG_REG is multi-word. */
5984 if (REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
)
5986 int regno ATTRIBUTE_UNUSED
= subreg_regno (x
);
5988 if (!REG_OK_FOR_CONTEXT (context
, regno
, mode
, as
, outer_code
,
5991 push_reload (x
, NULL_RTX
, loc
, (rtx
*) 0,
5993 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
5997 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
5998 is larger than the class size, then reload the whole SUBREG. */
6001 enum reg_class rclass
= context_reg_class
;
6002 if (ira_reg_class_max_nregs
[rclass
][GET_MODE (SUBREG_REG (x
))]
6003 > reg_class_size
[(int) rclass
])
6005 /* If the inner register will be replaced by a memory
6006 reference, we can do this only if we can replace the
6007 whole subreg by a (narrower) memory reference. If
6008 this is not possible, fall through and reload just
6009 the inner register (including address reloads). */
6010 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x
))) != 0)
6012 rtx tem
= find_reloads_subreg_address (x
, opnum
,
6018 push_reload (tem
, NULL_RTX
, loc
, (rtx
*) 0, rclass
,
6019 GET_MODE (tem
), VOIDmode
, 0, 0,
6026 push_reload (x
, NULL_RTX
, loc
, (rtx
*) 0, rclass
,
6027 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
6040 const char *fmt
= GET_RTX_FORMAT (code
);
6043 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6046 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6048 find_reloads_address_1 (mode
, as
, XEXP (x
, i
), context
,
6049 code
, SCRATCH
, &XEXP (x
, i
),
6050 opnum
, type
, ind_levels
, insn
);
6054 #undef REG_OK_FOR_CONTEXT
6058 /* X, which is found at *LOC, is a part of an address that needs to be
6059 reloaded into a register of class RCLASS. If X is a constant, or if
6060 X is a PLUS that contains a constant, check that the constant is a
6061 legitimate operand and that we are supposed to be able to load
6062 it into the register.
6064 If not, force the constant into memory and reload the MEM instead.
6066 MODE is the mode to use, in case X is an integer constant.
6068 OPNUM and TYPE describe the purpose of any reloads made.
6070 IND_LEVELS says how many levels of indirect addressing this machine
6074 find_reloads_address_part (rtx x
, rtx
*loc
, enum reg_class rclass
,
6075 machine_mode mode
, int opnum
,
6076 enum reload_type type
, int ind_levels
)
6079 && (!targetm
.legitimate_constant_p (mode
, x
)
6080 || targetm
.preferred_reload_class (x
, rclass
) == NO_REGS
))
6082 x
= force_const_mem (mode
, x
);
6083 find_reloads_address (mode
, &x
, XEXP (x
, 0), &XEXP (x
, 0),
6084 opnum
, type
, ind_levels
, 0);
6087 else if (GET_CODE (x
) == PLUS
6088 && CONSTANT_P (XEXP (x
, 1))
6089 && (!targetm
.legitimate_constant_p (GET_MODE (x
), XEXP (x
, 1))
6090 || targetm
.preferred_reload_class (XEXP (x
, 1), rclass
)
6095 tem
= force_const_mem (GET_MODE (x
), XEXP (x
, 1));
6096 x
= gen_rtx_PLUS (GET_MODE (x
), XEXP (x
, 0), tem
);
6097 find_reloads_address (mode
, &XEXP (x
, 1), XEXP (tem
, 0), &XEXP (tem
, 0),
6098 opnum
, type
, ind_levels
, 0);
6101 push_reload (x
, NULL_RTX
, loc
, (rtx
*) 0, rclass
,
6102 mode
, VOIDmode
, 0, 0, opnum
, type
);
6105 /* X, a subreg of a pseudo, is a part of an address that needs to be
6106 reloaded, and the pseusdo is equivalent to a memory location.
6108 Attempt to replace the whole subreg by a (possibly narrower or wider)
6109 memory reference. If this is possible, return this new memory
6110 reference, and push all required address reloads. Otherwise,
6113 OPNUM and TYPE identify the purpose of the reload.
6115 IND_LEVELS says how many levels of indirect addressing are
6116 supported at this point in the address.
6118 INSN, if nonzero, is the insn in which we do the reload. It is used
6119 to determine where to put USEs for pseudos that we have to replace with
6123 find_reloads_subreg_address (rtx x
, int opnum
, enum reload_type type
,
6124 int ind_levels
, rtx_insn
*insn
,
6125 int *address_reloaded
)
6127 machine_mode outer_mode
= GET_MODE (x
);
6128 machine_mode inner_mode
= GET_MODE (SUBREG_REG (x
));
6129 int regno
= REGNO (SUBREG_REG (x
));
6134 gcc_assert (reg_equiv_memory_loc (regno
) != 0);
6136 /* We cannot replace the subreg with a modified memory reference if:
6138 - we have a paradoxical subreg that implicitly acts as a zero or
6139 sign extension operation due to LOAD_EXTEND_OP;
6141 - we have a subreg that is implicitly supposed to act on the full
6142 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6144 - the address of the equivalent memory location is mode-dependent; or
6146 - we have a paradoxical subreg and the resulting memory is not
6147 sufficiently aligned to allow access in the wider mode.
6149 In addition, we choose not to perform the replacement for *any*
6150 paradoxical subreg, even if it were possible in principle. This
6151 is to avoid generating wider memory references than necessary.
6153 This corresponds to how previous versions of reload used to handle
6154 paradoxical subregs where no address reload was required. */
6156 if (paradoxical_subreg_p (x
))
6159 if (WORD_REGISTER_OPERATIONS
6160 && GET_MODE_SIZE (outer_mode
) < GET_MODE_SIZE (inner_mode
)
6161 && ((GET_MODE_SIZE (outer_mode
) - 1) / UNITS_PER_WORD
6162 == (GET_MODE_SIZE (inner_mode
) - 1) / UNITS_PER_WORD
))
6165 /* Since we don't attempt to handle paradoxical subregs, we can just
6166 call into simplify_subreg, which will handle all remaining checks
6168 orig
= make_memloc (SUBREG_REG (x
), regno
);
6169 offset
= SUBREG_BYTE (x
);
6170 tem
= simplify_subreg (outer_mode
, orig
, inner_mode
, offset
);
6171 if (!tem
|| !MEM_P (tem
))
6174 /* Now push all required address reloads, if any. */
6175 reloaded
= find_reloads_address (GET_MODE (tem
), &tem
,
6176 XEXP (tem
, 0), &XEXP (tem
, 0),
6177 opnum
, type
, ind_levels
, insn
);
6178 /* ??? Do we need to handle nonzero offsets somehow? */
6179 if (!offset
&& !rtx_equal_p (tem
, orig
))
6180 push_reg_equiv_alt_mem (regno
, tem
);
6182 /* For some processors an address may be valid in the original mode but
6183 not in a smaller mode. For example, ARM accepts a scaled index register
6184 in SImode but not in HImode. Note that this is only a problem if the
6185 address in reg_equiv_mem is already invalid in the new mode; other
6186 cases would be fixed by find_reloads_address as usual.
6188 ??? We attempt to handle such cases here by doing an additional reload
6189 of the full address after the usual processing by find_reloads_address.
6190 Note that this may not work in the general case, but it seems to cover
6191 the cases where this situation currently occurs. A more general fix
6192 might be to reload the *value* instead of the address, but this would
6193 not be expected by the callers of this routine as-is.
6195 If find_reloads_address already completed replaced the address, there
6196 is nothing further to do. */
6198 && reg_equiv_mem (regno
) != 0
6199 && !strict_memory_address_addr_space_p
6200 (GET_MODE (x
), XEXP (reg_equiv_mem (regno
), 0),
6201 MEM_ADDR_SPACE (reg_equiv_mem (regno
))))
6203 push_reload (XEXP (tem
, 0), NULL_RTX
, &XEXP (tem
, 0), (rtx
*) 0,
6204 base_reg_class (GET_MODE (tem
), MEM_ADDR_SPACE (tem
),
6206 GET_MODE (XEXP (tem
, 0)), VOIDmode
, 0, 0, opnum
, type
);
6210 /* If this is not a toplevel operand, find_reloads doesn't see this
6211 substitution. We have to emit a USE of the pseudo so that
6212 delete_output_reload can see it. */
6213 if (replace_reloads
&& recog_data
.operand
[opnum
] != x
)
6214 /* We mark the USE with QImode so that we recognize it as one that
6215 can be safely deleted at the end of reload. */
6216 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode
, SUBREG_REG (x
)), insn
),
6219 if (address_reloaded
)
6220 *address_reloaded
= reloaded
;
6225 /* Substitute into the current INSN the registers into which we have reloaded
6226 the things that need reloading. The array `replacements'
6227 contains the locations of all pointers that must be changed
6228 and says what to replace them with.
6230 Return the rtx that X translates into; usually X, but modified. */
6233 subst_reloads (rtx_insn
*insn
)
6237 for (i
= 0; i
< n_replacements
; i
++)
6239 struct replacement
*r
= &replacements
[i
];
6240 rtx reloadreg
= rld
[r
->what
].reg_rtx
;
6244 /* This checking takes a very long time on some platforms
6245 causing the gcc.c-torture/compile/limits-fnargs.c test
6246 to time out during testing. See PR 31850.
6248 Internal consistency test. Check that we don't modify
6249 anything in the equivalence arrays. Whenever something from
6250 those arrays needs to be reloaded, it must be unshared before
6251 being substituted into; the equivalence must not be modified.
6252 Otherwise, if the equivalence is used after that, it will
6253 have been modified, and the thing substituted (probably a
6254 register) is likely overwritten and not a usable equivalence. */
6257 for (check_regno
= 0; check_regno
< max_regno
; check_regno
++)
6259 #define CHECK_MODF(ARRAY) \
6260 gcc_assert (!(*reg_equivs)[check_regno].ARRAY \
6261 || !loc_mentioned_in_p (r->where, \
6262 (*reg_equivs)[check_regno].ARRAY))
6264 CHECK_MODF (constant
);
6265 CHECK_MODF (memory_loc
);
6266 CHECK_MODF (address
);
6270 #endif /* DEBUG_RELOAD */
6272 /* If we're replacing a LABEL_REF with a register, there must
6273 already be an indication (to e.g. flow) which label this
6274 register refers to. */
6275 gcc_assert (GET_CODE (*r
->where
) != LABEL_REF
6277 || find_reg_note (insn
,
6279 XEXP (*r
->where
, 0))
6280 || label_is_jump_target_p (XEXP (*r
->where
, 0), insn
));
6282 /* Encapsulate RELOADREG so its machine mode matches what
6283 used to be there. Note that gen_lowpart_common will
6284 do the wrong thing if RELOADREG is multi-word. RELOADREG
6285 will always be a REG here. */
6286 if (GET_MODE (reloadreg
) != r
->mode
&& r
->mode
!= VOIDmode
)
6287 reloadreg
= reload_adjust_reg_for_mode (reloadreg
, r
->mode
);
6289 *r
->where
= reloadreg
;
6291 /* If reload got no reg and isn't optional, something's wrong. */
6293 gcc_assert (rld
[r
->what
].optional
);
6297 /* Make a copy of any replacements being done into X and move those
6298 copies to locations in Y, a copy of X. */
6301 copy_replacements (rtx x
, rtx y
)
6303 copy_replacements_1 (&x
, &y
, n_replacements
);
6307 copy_replacements_1 (rtx
*px
, rtx
*py
, int orig_replacements
)
6311 struct replacement
*r
;
6315 for (j
= 0; j
< orig_replacements
; j
++)
6316 if (replacements
[j
].where
== px
)
6318 r
= &replacements
[n_replacements
++];
6320 r
->what
= replacements
[j
].what
;
6321 r
->mode
= replacements
[j
].mode
;
6326 code
= GET_CODE (x
);
6327 fmt
= GET_RTX_FORMAT (code
);
6329 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6332 copy_replacements_1 (&XEXP (x
, i
), &XEXP (y
, i
), orig_replacements
);
6333 else if (fmt
[i
] == 'E')
6334 for (j
= XVECLEN (x
, i
); --j
>= 0; )
6335 copy_replacements_1 (&XVECEXP (x
, i
, j
), &XVECEXP (y
, i
, j
),
6340 /* Change any replacements being done to *X to be done to *Y. */
6343 move_replacements (rtx
*x
, rtx
*y
)
6347 for (i
= 0; i
< n_replacements
; i
++)
6348 if (replacements
[i
].where
== x
)
6349 replacements
[i
].where
= y
;
6352 /* If LOC was scheduled to be replaced by something, return the replacement.
6353 Otherwise, return *LOC. */
6356 find_replacement (rtx
*loc
)
6358 struct replacement
*r
;
6360 for (r
= &replacements
[0]; r
< &replacements
[n_replacements
]; r
++)
6362 rtx reloadreg
= rld
[r
->what
].reg_rtx
;
6364 if (reloadreg
&& r
->where
== loc
)
6366 if (r
->mode
!= VOIDmode
&& GET_MODE (reloadreg
) != r
->mode
)
6367 reloadreg
= reload_adjust_reg_for_mode (reloadreg
, r
->mode
);
6371 else if (reloadreg
&& GET_CODE (*loc
) == SUBREG
6372 && r
->where
== &SUBREG_REG (*loc
))
6374 if (r
->mode
!= VOIDmode
&& GET_MODE (reloadreg
) != r
->mode
)
6375 reloadreg
= reload_adjust_reg_for_mode (reloadreg
, r
->mode
);
6377 return simplify_gen_subreg (GET_MODE (*loc
), reloadreg
,
6378 GET_MODE (SUBREG_REG (*loc
)),
6379 SUBREG_BYTE (*loc
));
6383 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6384 what's inside and make a new rtl if so. */
6385 if (GET_CODE (*loc
) == PLUS
|| GET_CODE (*loc
) == MINUS
6386 || GET_CODE (*loc
) == MULT
)
6388 rtx x
= find_replacement (&XEXP (*loc
, 0));
6389 rtx y
= find_replacement (&XEXP (*loc
, 1));
6391 if (x
!= XEXP (*loc
, 0) || y
!= XEXP (*loc
, 1))
6392 return gen_rtx_fmt_ee (GET_CODE (*loc
), GET_MODE (*loc
), x
, y
);
6398 /* Return nonzero if register in range [REGNO, ENDREGNO)
6399 appears either explicitly or implicitly in X
6400 other than being stored into (except for earlyclobber operands).
6402 References contained within the substructure at LOC do not count.
6403 LOC may be zero, meaning don't ignore anything.
6405 This is similar to refers_to_regno_p in rtlanal.c except that we
6406 look at equivalences for pseudos that didn't get hard registers. */
6409 refers_to_regno_for_reload_p (unsigned int regno
, unsigned int endregno
,
6421 code
= GET_CODE (x
);
6428 /* If this is a pseudo, a hard register must not have been allocated.
6429 X must therefore either be a constant or be in memory. */
6430 if (r
>= FIRST_PSEUDO_REGISTER
)
6432 if (reg_equiv_memory_loc (r
))
6433 return refers_to_regno_for_reload_p (regno
, endregno
,
6434 reg_equiv_memory_loc (r
),
6437 gcc_assert (reg_equiv_constant (r
) || reg_equiv_invariant (r
));
6441 return (endregno
> r
6442 && regno
< r
+ (r
< FIRST_PSEUDO_REGISTER
6443 ? hard_regno_nregs
[r
][GET_MODE (x
)]
6447 /* If this is a SUBREG of a hard reg, we can see exactly which
6448 registers are being modified. Otherwise, handle normally. */
6449 if (REG_P (SUBREG_REG (x
))
6450 && REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
)
6452 unsigned int inner_regno
= subreg_regno (x
);
6453 unsigned int inner_endregno
6454 = inner_regno
+ (inner_regno
< FIRST_PSEUDO_REGISTER
6455 ? subreg_nregs (x
) : 1);
6457 return endregno
> inner_regno
&& regno
< inner_endregno
;
6463 if (&SET_DEST (x
) != loc
6464 /* Note setting a SUBREG counts as referring to the REG it is in for
6465 a pseudo but not for hard registers since we can
6466 treat each word individually. */
6467 && ((GET_CODE (SET_DEST (x
)) == SUBREG
6468 && loc
!= &SUBREG_REG (SET_DEST (x
))
6469 && REG_P (SUBREG_REG (SET_DEST (x
)))
6470 && REGNO (SUBREG_REG (SET_DEST (x
))) >= FIRST_PSEUDO_REGISTER
6471 && refers_to_regno_for_reload_p (regno
, endregno
,
6472 SUBREG_REG (SET_DEST (x
)),
6474 /* If the output is an earlyclobber operand, this is
6476 || ((!REG_P (SET_DEST (x
))
6477 || earlyclobber_operand_p (SET_DEST (x
)))
6478 && refers_to_regno_for_reload_p (regno
, endregno
,
6479 SET_DEST (x
), loc
))))
6482 if (code
== CLOBBER
|| loc
== &SET_SRC (x
))
6491 /* X does not match, so try its subexpressions. */
6493 fmt
= GET_RTX_FORMAT (code
);
6494 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6496 if (fmt
[i
] == 'e' && loc
!= &XEXP (x
, i
))
6504 if (refers_to_regno_for_reload_p (regno
, endregno
,
6508 else if (fmt
[i
] == 'E')
6511 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
6512 if (loc
!= &XVECEXP (x
, i
, j
)
6513 && refers_to_regno_for_reload_p (regno
, endregno
,
6514 XVECEXP (x
, i
, j
), loc
))
6521 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6522 we check if any register number in X conflicts with the relevant register
6523 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6524 contains a MEM (we don't bother checking for memory addresses that can't
6525 conflict because we expect this to be a rare case.
6527 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6528 that we look at equivalences for pseudos that didn't get hard registers. */
6531 reg_overlap_mentioned_for_reload_p (rtx x
, rtx in
)
6533 int regno
, endregno
;
6535 /* Overly conservative. */
6536 if (GET_CODE (x
) == STRICT_LOW_PART
6537 || GET_RTX_CLASS (GET_CODE (x
)) == RTX_AUTOINC
)
6540 /* If either argument is a constant, then modifying X can not affect IN. */
6541 if (CONSTANT_P (x
) || CONSTANT_P (in
))
6543 else if (GET_CODE (x
) == SUBREG
&& MEM_P (SUBREG_REG (x
)))
6544 return refers_to_mem_for_reload_p (in
);
6545 else if (GET_CODE (x
) == SUBREG
)
6547 regno
= REGNO (SUBREG_REG (x
));
6548 if (regno
< FIRST_PSEUDO_REGISTER
)
6549 regno
+= subreg_regno_offset (REGNO (SUBREG_REG (x
)),
6550 GET_MODE (SUBREG_REG (x
)),
6553 endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
6554 ? subreg_nregs (x
) : 1);
6556 return refers_to_regno_for_reload_p (regno
, endregno
, in
, (rtx
*) 0);
6562 /* If this is a pseudo, it must not have been assigned a hard register.
6563 Therefore, it must either be in memory or be a constant. */
6565 if (regno
>= FIRST_PSEUDO_REGISTER
)
6567 if (reg_equiv_memory_loc (regno
))
6568 return refers_to_mem_for_reload_p (in
);
6569 gcc_assert (reg_equiv_constant (regno
));
6573 endregno
= END_REGNO (x
);
6575 return refers_to_regno_for_reload_p (regno
, endregno
, in
, (rtx
*) 0);
6578 return refers_to_mem_for_reload_p (in
);
6579 else if (GET_CODE (x
) == SCRATCH
|| GET_CODE (x
) == PC
6580 || GET_CODE (x
) == CC0
)
6581 return reg_mentioned_p (x
, in
);
6584 gcc_assert (GET_CODE (x
) == PLUS
);
6586 /* We actually want to know if X is mentioned somewhere inside IN.
6587 We must not say that (plus (sp) (const_int 124)) is in
6588 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6589 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6590 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6595 else if (GET_CODE (in
) == PLUS
)
6596 return (rtx_equal_p (x
, in
)
6597 || reg_overlap_mentioned_for_reload_p (x
, XEXP (in
, 0))
6598 || reg_overlap_mentioned_for_reload_p (x
, XEXP (in
, 1)));
6599 else return (reg_overlap_mentioned_for_reload_p (XEXP (x
, 0), in
)
6600 || reg_overlap_mentioned_for_reload_p (XEXP (x
, 1), in
));
6606 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6610 refers_to_mem_for_reload_p (rtx x
)
6619 return (REGNO (x
) >= FIRST_PSEUDO_REGISTER
6620 && reg_equiv_memory_loc (REGNO (x
)));
6622 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
6623 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
6625 && (MEM_P (XEXP (x
, i
))
6626 || refers_to_mem_for_reload_p (XEXP (x
, i
))))
6632 /* Check the insns before INSN to see if there is a suitable register
6633 containing the same value as GOAL.
6634 If OTHER is -1, look for a register in class RCLASS.
6635 Otherwise, just see if register number OTHER shares GOAL's value.
6637 Return an rtx for the register found, or zero if none is found.
6639 If RELOAD_REG_P is (short *)1,
6640 we reject any hard reg that appears in reload_reg_rtx
6641 because such a hard reg is also needed coming into this insn.
6643 If RELOAD_REG_P is any other nonzero value,
6644 it is a vector indexed by hard reg number
6645 and we reject any hard reg whose element in the vector is nonnegative
6646 as well as any that appears in reload_reg_rtx.
6648 If GOAL is zero, then GOALREG is a register number; we look
6649 for an equivalent for that register.
6651 MODE is the machine mode of the value we want an equivalence for.
6652 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6654 This function is used by jump.c as well as in the reload pass.
6656 If GOAL is the sum of the stack pointer and a constant, we treat it
6657 as if it were a constant except that sp is required to be unchanging. */
6660 find_equiv_reg (rtx goal
, rtx_insn
*insn
, enum reg_class rclass
, int other
,
6661 short *reload_reg_p
, int goalreg
, machine_mode mode
)
6664 rtx goaltry
, valtry
, value
;
6671 int goal_mem_addr_varies
= 0;
6672 int need_stable_sp
= 0;
6679 else if (REG_P (goal
))
6680 regno
= REGNO (goal
);
6681 else if (MEM_P (goal
))
6683 enum rtx_code code
= GET_CODE (XEXP (goal
, 0));
6684 if (MEM_VOLATILE_P (goal
))
6686 if (flag_float_store
&& SCALAR_FLOAT_MODE_P (GET_MODE (goal
)))
6688 /* An address with side effects must be reexecuted. */
6703 else if (CONSTANT_P (goal
))
6705 else if (GET_CODE (goal
) == PLUS
6706 && XEXP (goal
, 0) == stack_pointer_rtx
6707 && CONSTANT_P (XEXP (goal
, 1)))
6708 goal_const
= need_stable_sp
= 1;
6709 else if (GET_CODE (goal
) == PLUS
6710 && XEXP (goal
, 0) == frame_pointer_rtx
6711 && CONSTANT_P (XEXP (goal
, 1)))
6717 /* Scan insns back from INSN, looking for one that copies
6718 a value into or out of GOAL.
6719 Stop and give up if we reach a label. */
6724 if (p
&& DEBUG_INSN_P (p
))
6727 if (p
== 0 || LABEL_P (p
)
6728 || num
> PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS
))
6731 /* Don't reuse register contents from before a setjmp-type
6732 function call; on the second return (from the longjmp) it
6733 might have been clobbered by a later reuse. It doesn't
6734 seem worthwhile to actually go and see if it is actually
6735 reused even if that information would be readily available;
6736 just don't reuse it across the setjmp call. */
6737 if (CALL_P (p
) && find_reg_note (p
, REG_SETJMP
, NULL_RTX
))
6740 if (NONJUMP_INSN_P (p
)
6741 /* If we don't want spill regs ... */
6742 && (! (reload_reg_p
!= 0
6743 && reload_reg_p
!= (short *) HOST_WIDE_INT_1
)
6744 /* ... then ignore insns introduced by reload; they aren't
6745 useful and can cause results in reload_as_needed to be
6746 different from what they were when calculating the need for
6747 spills. If we notice an input-reload insn here, we will
6748 reject it below, but it might hide a usable equivalent.
6749 That makes bad code. It may even fail: perhaps no reg was
6750 spilled for this insn because it was assumed we would find
6752 || INSN_UID (p
) < reload_first_uid
))
6755 pat
= single_set (p
);
6757 /* First check for something that sets some reg equal to GOAL. */
6760 && true_regnum (SET_SRC (pat
)) == regno
6761 && (valueno
= true_regnum (valtry
= SET_DEST (pat
))) >= 0)
6764 && true_regnum (SET_DEST (pat
)) == regno
6765 && (valueno
= true_regnum (valtry
= SET_SRC (pat
))) >= 0)
6767 (goal_const
&& rtx_equal_p (SET_SRC (pat
), goal
)
6768 /* When looking for stack pointer + const,
6769 make sure we don't use a stack adjust. */
6770 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat
), goal
)
6771 && (valueno
= true_regnum (valtry
= SET_DEST (pat
))) >= 0)
6773 && (valueno
= true_regnum (valtry
= SET_DEST (pat
))) >= 0
6774 && rtx_renumbered_equal_p (goal
, SET_SRC (pat
)))
6776 && (valueno
= true_regnum (valtry
= SET_SRC (pat
))) >= 0
6777 && rtx_renumbered_equal_p (goal
, SET_DEST (pat
)))
6778 /* If we are looking for a constant,
6779 and something equivalent to that constant was copied
6780 into a reg, we can use that reg. */
6781 || (goal_const
&& REG_NOTES (p
) != 0
6782 && (tem
= find_reg_note (p
, REG_EQUIV
, NULL_RTX
))
6783 && ((rtx_equal_p (XEXP (tem
, 0), goal
)
6785 = true_regnum (valtry
= SET_DEST (pat
))) >= 0)
6786 || (REG_P (SET_DEST (pat
))
6787 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem
, 0))
6788 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem
, 0)))
6789 && CONST_INT_P (goal
)
6791 = operand_subword (XEXP (tem
, 0), 0, 0,
6793 && rtx_equal_p (goal
, goaltry
)
6795 = operand_subword (SET_DEST (pat
), 0, 0,
6797 && (valueno
= true_regnum (valtry
)) >= 0)))
6798 || (goal_const
&& (tem
= find_reg_note (p
, REG_EQUIV
,
6800 && REG_P (SET_DEST (pat
))
6801 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem
, 0))
6802 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem
, 0)))
6803 && CONST_INT_P (goal
)
6804 && 0 != (goaltry
= operand_subword (XEXP (tem
, 0), 1, 0,
6806 && rtx_equal_p (goal
, goaltry
)
6808 = operand_subword (SET_DEST (pat
), 1, 0, VOIDmode
))
6809 && (valueno
= true_regnum (valtry
)) >= 0)))
6813 if (valueno
!= other
)
6816 else if ((unsigned) valueno
>= FIRST_PSEUDO_REGISTER
)
6818 else if (!in_hard_reg_set_p (reg_class_contents
[(int) rclass
],
6828 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6829 (or copying VALUE into GOAL, if GOAL is also a register).
6830 Now verify that VALUE is really valid. */
6832 /* VALUENO is the register number of VALUE; a hard register. */
6834 /* Don't try to re-use something that is killed in this insn. We want
6835 to be able to trust REG_UNUSED notes. */
6836 if (REG_NOTES (where
) != 0 && find_reg_note (where
, REG_UNUSED
, value
))
6839 /* If we propose to get the value from the stack pointer or if GOAL is
6840 a MEM based on the stack pointer, we need a stable SP. */
6841 if (valueno
== STACK_POINTER_REGNUM
|| regno
== STACK_POINTER_REGNUM
6842 || (goal_mem
&& reg_overlap_mentioned_for_reload_p (stack_pointer_rtx
,
6846 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6847 if (GET_MODE (value
) != mode
)
6850 /* Reject VALUE if it was loaded from GOAL
6851 and is also a register that appears in the address of GOAL. */
6853 if (goal_mem
&& value
== SET_DEST (single_set (where
))
6854 && refers_to_regno_for_reload_p (valueno
, end_hard_regno (mode
, valueno
),
6858 /* Reject registers that overlap GOAL. */
6860 if (regno
>= 0 && regno
< FIRST_PSEUDO_REGISTER
)
6861 nregs
= hard_regno_nregs
[regno
][mode
];
6864 valuenregs
= hard_regno_nregs
[valueno
][mode
];
6866 if (!goal_mem
&& !goal_const
6867 && regno
+ nregs
> valueno
&& regno
< valueno
+ valuenregs
)
6870 /* Reject VALUE if it is one of the regs reserved for reloads.
6871 Reload1 knows how to reuse them anyway, and it would get
6872 confused if we allocated one without its knowledge.
6873 (Now that insns introduced by reload are ignored above,
6874 this case shouldn't happen, but I'm not positive.) */
6876 if (reload_reg_p
!= 0 && reload_reg_p
!= (short *) HOST_WIDE_INT_1
)
6879 for (i
= 0; i
< valuenregs
; ++i
)
6880 if (reload_reg_p
[valueno
+ i
] >= 0)
6884 /* Reject VALUE if it is a register being used for an input reload
6885 even if it is not one of those reserved. */
6887 if (reload_reg_p
!= 0)
6890 for (i
= 0; i
< n_reloads
; i
++)
6891 if (rld
[i
].reg_rtx
!= 0 && rld
[i
].in
)
6893 int regno1
= REGNO (rld
[i
].reg_rtx
);
6894 int nregs1
= hard_regno_nregs
[regno1
]
6895 [GET_MODE (rld
[i
].reg_rtx
)];
6896 if (regno1
< valueno
+ valuenregs
6897 && regno1
+ nregs1
> valueno
)
6903 /* We must treat frame pointer as varying here,
6904 since it can vary--in a nonlocal goto as generated by expand_goto. */
6905 goal_mem_addr_varies
= !CONSTANT_ADDRESS_P (XEXP (goal
, 0));
6907 /* Now verify that the values of GOAL and VALUE remain unaltered
6908 until INSN is reached. */
6917 /* Don't trust the conversion past a function call
6918 if either of the two is in a call-clobbered register, or memory. */
6923 if (goal_mem
|| need_stable_sp
)
6926 if (regno
>= 0 && regno
< FIRST_PSEUDO_REGISTER
)
6927 for (i
= 0; i
< nregs
; ++i
)
6928 if (call_used_regs
[regno
+ i
]
6929 || HARD_REGNO_CALL_PART_CLOBBERED (regno
+ i
, mode
))
6932 if (valueno
>= 0 && valueno
< FIRST_PSEUDO_REGISTER
)
6933 for (i
= 0; i
< valuenregs
; ++i
)
6934 if (call_used_regs
[valueno
+ i
]
6935 || HARD_REGNO_CALL_PART_CLOBBERED (valueno
+ i
, mode
))
6943 /* Watch out for unspec_volatile, and volatile asms. */
6944 if (volatile_insn_p (pat
))
6947 /* If this insn P stores in either GOAL or VALUE, return 0.
6948 If GOAL is a memory ref and this insn writes memory, return 0.
6949 If GOAL is a memory ref and its address is not constant,
6950 and this insn P changes a register used in GOAL, return 0. */
6952 if (GET_CODE (pat
) == COND_EXEC
)
6953 pat
= COND_EXEC_CODE (pat
);
6954 if (GET_CODE (pat
) == SET
|| GET_CODE (pat
) == CLOBBER
)
6956 rtx dest
= SET_DEST (pat
);
6957 while (GET_CODE (dest
) == SUBREG
6958 || GET_CODE (dest
) == ZERO_EXTRACT
6959 || GET_CODE (dest
) == STRICT_LOW_PART
)
6960 dest
= XEXP (dest
, 0);
6963 int xregno
= REGNO (dest
);
6965 if (REGNO (dest
) < FIRST_PSEUDO_REGISTER
)
6966 xnregs
= hard_regno_nregs
[xregno
][GET_MODE (dest
)];
6969 if (xregno
< regno
+ nregs
&& xregno
+ xnregs
> regno
)
6971 if (xregno
< valueno
+ valuenregs
6972 && xregno
+ xnregs
> valueno
)
6974 if (goal_mem_addr_varies
6975 && reg_overlap_mentioned_for_reload_p (dest
, goal
))
6977 if (xregno
== STACK_POINTER_REGNUM
&& need_stable_sp
)
6980 else if (goal_mem
&& MEM_P (dest
)
6981 && ! push_operand (dest
, GET_MODE (dest
)))
6983 else if (MEM_P (dest
) && regno
>= FIRST_PSEUDO_REGISTER
6984 && reg_equiv_memory_loc (regno
) != 0)
6986 else if (need_stable_sp
&& push_operand (dest
, GET_MODE (dest
)))
6989 else if (GET_CODE (pat
) == PARALLEL
)
6992 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; i
--)
6994 rtx v1
= XVECEXP (pat
, 0, i
);
6995 if (GET_CODE (v1
) == COND_EXEC
)
6996 v1
= COND_EXEC_CODE (v1
);
6997 if (GET_CODE (v1
) == SET
|| GET_CODE (v1
) == CLOBBER
)
6999 rtx dest
= SET_DEST (v1
);
7000 while (GET_CODE (dest
) == SUBREG
7001 || GET_CODE (dest
) == ZERO_EXTRACT
7002 || GET_CODE (dest
) == STRICT_LOW_PART
)
7003 dest
= XEXP (dest
, 0);
7006 int xregno
= REGNO (dest
);
7008 if (REGNO (dest
) < FIRST_PSEUDO_REGISTER
)
7009 xnregs
= hard_regno_nregs
[xregno
][GET_MODE (dest
)];
7012 if (xregno
< regno
+ nregs
7013 && xregno
+ xnregs
> regno
)
7015 if (xregno
< valueno
+ valuenregs
7016 && xregno
+ xnregs
> valueno
)
7018 if (goal_mem_addr_varies
7019 && reg_overlap_mentioned_for_reload_p (dest
,
7022 if (xregno
== STACK_POINTER_REGNUM
&& need_stable_sp
)
7025 else if (goal_mem
&& MEM_P (dest
)
7026 && ! push_operand (dest
, GET_MODE (dest
)))
7028 else if (MEM_P (dest
) && regno
>= FIRST_PSEUDO_REGISTER
7029 && reg_equiv_memory_loc (regno
) != 0)
7031 else if (need_stable_sp
7032 && push_operand (dest
, GET_MODE (dest
)))
7038 if (CALL_P (p
) && CALL_INSN_FUNCTION_USAGE (p
))
7042 for (link
= CALL_INSN_FUNCTION_USAGE (p
); XEXP (link
, 1) != 0;
7043 link
= XEXP (link
, 1))
7045 pat
= XEXP (link
, 0);
7046 if (GET_CODE (pat
) == CLOBBER
)
7048 rtx dest
= SET_DEST (pat
);
7052 int xregno
= REGNO (dest
);
7054 = hard_regno_nregs
[xregno
][GET_MODE (dest
)];
7056 if (xregno
< regno
+ nregs
7057 && xregno
+ xnregs
> regno
)
7059 else if (xregno
< valueno
+ valuenregs
7060 && xregno
+ xnregs
> valueno
)
7062 else if (goal_mem_addr_varies
7063 && reg_overlap_mentioned_for_reload_p (dest
,
7068 else if (goal_mem
&& MEM_P (dest
)
7069 && ! push_operand (dest
, GET_MODE (dest
)))
7071 else if (need_stable_sp
7072 && push_operand (dest
, GET_MODE (dest
)))
7079 /* If this insn auto-increments or auto-decrements
7080 either regno or valueno, return 0 now.
7081 If GOAL is a memory ref and its address is not constant,
7082 and this insn P increments a register used in GOAL, return 0. */
7086 for (link
= REG_NOTES (p
); link
; link
= XEXP (link
, 1))
7087 if (REG_NOTE_KIND (link
) == REG_INC
7088 && REG_P (XEXP (link
, 0)))
7090 int incno
= REGNO (XEXP (link
, 0));
7091 if (incno
< regno
+ nregs
&& incno
>= regno
)
7093 if (incno
< valueno
+ valuenregs
&& incno
>= valueno
)
7095 if (goal_mem_addr_varies
7096 && reg_overlap_mentioned_for_reload_p (XEXP (link
, 0),
7106 /* Find a place where INCED appears in an increment or decrement operator
7107 within X, and return the amount INCED is incremented or decremented by.
7108 The value is always positive. */
7111 find_inc_amount (rtx x
, rtx inced
)
7113 enum rtx_code code
= GET_CODE (x
);
7119 rtx addr
= XEXP (x
, 0);
7120 if ((GET_CODE (addr
) == PRE_DEC
7121 || GET_CODE (addr
) == POST_DEC
7122 || GET_CODE (addr
) == PRE_INC
7123 || GET_CODE (addr
) == POST_INC
)
7124 && XEXP (addr
, 0) == inced
)
7125 return GET_MODE_SIZE (GET_MODE (x
));
7126 else if ((GET_CODE (addr
) == PRE_MODIFY
7127 || GET_CODE (addr
) == POST_MODIFY
)
7128 && GET_CODE (XEXP (addr
, 1)) == PLUS
7129 && XEXP (addr
, 0) == XEXP (XEXP (addr
, 1), 0)
7130 && XEXP (addr
, 0) == inced
7131 && CONST_INT_P (XEXP (XEXP (addr
, 1), 1)))
7133 i
= INTVAL (XEXP (XEXP (addr
, 1), 1));
7134 return i
< 0 ? -i
: i
;
7138 fmt
= GET_RTX_FORMAT (code
);
7139 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
7143 int tem
= find_inc_amount (XEXP (x
, i
), inced
);
7150 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
7152 int tem
= find_inc_amount (XVECEXP (x
, i
, j
), inced
);
7162 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7163 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7166 reg_inc_found_and_valid_p (unsigned int regno
, unsigned int endregno
,
7176 if (! INSN_P (insn
))
7179 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
7180 if (REG_NOTE_KIND (link
) == REG_INC
)
7182 unsigned int test
= (int) REGNO (XEXP (link
, 0));
7183 if (test
>= regno
&& test
< endregno
)
7189 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7190 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7191 REG_INC. REGNO must refer to a hard register. */
7194 regno_clobbered_p (unsigned int regno
, rtx_insn
*insn
, machine_mode mode
,
7197 unsigned int nregs
, endregno
;
7199 /* regno must be a hard register. */
7200 gcc_assert (regno
< FIRST_PSEUDO_REGISTER
);
7202 nregs
= hard_regno_nregs
[regno
][mode
];
7203 endregno
= regno
+ nregs
;
7205 if ((GET_CODE (PATTERN (insn
)) == CLOBBER
7206 || (sets
== 1 && GET_CODE (PATTERN (insn
)) == SET
))
7207 && REG_P (XEXP (PATTERN (insn
), 0)))
7209 unsigned int test
= REGNO (XEXP (PATTERN (insn
), 0));
7211 return test
>= regno
&& test
< endregno
;
7214 if (sets
== 2 && reg_inc_found_and_valid_p (regno
, endregno
, insn
))
7217 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
7219 int i
= XVECLEN (PATTERN (insn
), 0) - 1;
7223 rtx elt
= XVECEXP (PATTERN (insn
), 0, i
);
7224 if ((GET_CODE (elt
) == CLOBBER
7225 || (sets
== 1 && GET_CODE (elt
) == SET
))
7226 && REG_P (XEXP (elt
, 0)))
7228 unsigned int test
= REGNO (XEXP (elt
, 0));
7230 if (test
>= regno
&& test
< endregno
)
7234 && reg_inc_found_and_valid_p (regno
, endregno
, elt
))
7242 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7244 reload_adjust_reg_for_mode (rtx reloadreg
, machine_mode mode
)
7248 if (GET_MODE (reloadreg
) == mode
)
7251 regno
= REGNO (reloadreg
);
7253 if (REG_WORDS_BIG_ENDIAN
)
7254 regno
+= (int) hard_regno_nregs
[regno
][GET_MODE (reloadreg
)]
7255 - (int) hard_regno_nregs
[regno
][mode
];
7257 return gen_rtx_REG (mode
, regno
);
7260 static const char *const reload_when_needed_name
[] =
7263 "RELOAD_FOR_OUTPUT",
7265 "RELOAD_FOR_INPUT_ADDRESS",
7266 "RELOAD_FOR_INPADDR_ADDRESS",
7267 "RELOAD_FOR_OUTPUT_ADDRESS",
7268 "RELOAD_FOR_OUTADDR_ADDRESS",
7269 "RELOAD_FOR_OPERAND_ADDRESS",
7270 "RELOAD_FOR_OPADDR_ADDR",
7272 "RELOAD_FOR_OTHER_ADDRESS"
7275 /* These functions are used to print the variables set by 'find_reloads' */
7278 debug_reload_to_stream (FILE *f
)
7285 for (r
= 0; r
< n_reloads
; r
++)
7287 fprintf (f
, "Reload %d: ", r
);
7291 fprintf (f
, "reload_in (%s) = ",
7292 GET_MODE_NAME (rld
[r
].inmode
));
7293 print_inline_rtx (f
, rld
[r
].in
, 24);
7294 fprintf (f
, "\n\t");
7297 if (rld
[r
].out
!= 0)
7299 fprintf (f
, "reload_out (%s) = ",
7300 GET_MODE_NAME (rld
[r
].outmode
));
7301 print_inline_rtx (f
, rld
[r
].out
, 24);
7302 fprintf (f
, "\n\t");
7305 fprintf (f
, "%s, ", reg_class_names
[(int) rld
[r
].rclass
]);
7307 fprintf (f
, "%s (opnum = %d)",
7308 reload_when_needed_name
[(int) rld
[r
].when_needed
],
7311 if (rld
[r
].optional
)
7312 fprintf (f
, ", optional");
7314 if (rld
[r
].nongroup
)
7315 fprintf (f
, ", nongroup");
7317 if (rld
[r
].inc
!= 0)
7318 fprintf (f
, ", inc by %d", rld
[r
].inc
);
7320 if (rld
[r
].nocombine
)
7321 fprintf (f
, ", can't combine");
7323 if (rld
[r
].secondary_p
)
7324 fprintf (f
, ", secondary_reload_p");
7326 if (rld
[r
].in_reg
!= 0)
7328 fprintf (f
, "\n\treload_in_reg: ");
7329 print_inline_rtx (f
, rld
[r
].in_reg
, 24);
7332 if (rld
[r
].out_reg
!= 0)
7334 fprintf (f
, "\n\treload_out_reg: ");
7335 print_inline_rtx (f
, rld
[r
].out_reg
, 24);
7338 if (rld
[r
].reg_rtx
!= 0)
7340 fprintf (f
, "\n\treload_reg_rtx: ");
7341 print_inline_rtx (f
, rld
[r
].reg_rtx
, 24);
7345 if (rld
[r
].secondary_in_reload
!= -1)
7347 fprintf (f
, "%ssecondary_in_reload = %d",
7348 prefix
, rld
[r
].secondary_in_reload
);
7352 if (rld
[r
].secondary_out_reload
!= -1)
7353 fprintf (f
, "%ssecondary_out_reload = %d\n",
7354 prefix
, rld
[r
].secondary_out_reload
);
7357 if (rld
[r
].secondary_in_icode
!= CODE_FOR_nothing
)
7359 fprintf (f
, "%ssecondary_in_icode = %s", prefix
,
7360 insn_data
[rld
[r
].secondary_in_icode
].name
);
7364 if (rld
[r
].secondary_out_icode
!= CODE_FOR_nothing
)
7365 fprintf (f
, "%ssecondary_out_icode = %s", prefix
,
7366 insn_data
[rld
[r
].secondary_out_icode
].name
);
7375 debug_reload_to_stream (stderr
);