1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains subroutines used only from the file reload1.c.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
58 find_reloads can alter the operands of the instruction it is called on.
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
72 Using a reload register for several reloads in one insn:
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload. */
88 /* We do not enable this with CHECKING_P, since it is awfully slow. */
93 #include "coretypes.h"
104 #include "rtl-error.h"
106 #include "addresses.h"
109 /* True if X is a constant that can be forced into the constant pool.
110 MODE is the mode of the operand, or VOIDmode if not known. */
111 #define CONST_POOL_OK_P(MODE, X) \
112 ((MODE) != VOIDmode \
114 && GET_CODE (X) != HIGH \
115 && !targetm.cannot_force_const_mem (MODE, X))
117 /* True if C is a non-empty register class that has too few registers
118 to be safely used as a reload target class. */
121 small_register_class_p (reg_class_t rclass
)
123 return (reg_class_size
[(int) rclass
] == 1
124 || (reg_class_size
[(int) rclass
] >= 1
125 && targetm
.class_likely_spilled_p (rclass
)));
129 /* All reloads of the current insn are recorded here. See reload.h for
132 struct reload rld
[MAX_RELOADS
];
134 /* All the "earlyclobber" operands of the current insn
135 are recorded here. */
137 rtx reload_earlyclobbers
[MAX_RECOG_OPERANDS
];
139 int reload_n_operands
;
141 /* Replacing reloads.
143 If `replace_reloads' is nonzero, then as each reload is recorded
144 an entry is made for it in the table `replacements'.
145 Then later `subst_reloads' can look through that table and
146 perform all the replacements needed. */
148 /* Nonzero means record the places to replace. */
149 static int replace_reloads
;
151 /* Each replacement is recorded with a structure like this. */
154 rtx
*where
; /* Location to store in */
155 int what
; /* which reload this is for */
156 machine_mode mode
; /* mode it must have */
159 static struct replacement replacements
[MAX_RECOG_OPERANDS
* ((MAX_REGS_PER_ADDRESS
* 2) + 1)];
161 /* Number of replacements currently recorded. */
162 static int n_replacements
;
164 /* Used to track what is modified by an operand. */
167 int reg_flag
; /* Nonzero if referencing a register. */
168 int safe
; /* Nonzero if this can't conflict with anything. */
169 rtx base
; /* Base address for MEM. */
170 HOST_WIDE_INT start
; /* Starting offset or register number. */
171 HOST_WIDE_INT end
; /* Ending offset or register number. */
174 #ifdef SECONDARY_MEMORY_NEEDED
176 /* Save MEMs needed to copy from one class of registers to another. One MEM
177 is used per mode, but normally only one or two modes are ever used.
179 We keep two versions, before and after register elimination. The one
180 after register elimination is record separately for each operand. This
181 is done in case the address is not valid to be sure that we separately
184 static rtx secondary_memlocs
[NUM_MACHINE_MODES
];
185 static rtx secondary_memlocs_elim
[NUM_MACHINE_MODES
][MAX_RECOG_OPERANDS
];
186 static int secondary_memlocs_elim_used
= 0;
189 /* The instruction we are doing reloads for;
190 so we can test whether a register dies in it. */
191 static rtx_insn
*this_insn
;
193 /* Nonzero if this instruction is a user-specified asm with operands. */
194 static int this_insn_is_asm
;
196 /* If hard_regs_live_known is nonzero,
197 we can tell which hard regs are currently live,
198 at least enough to succeed in choosing dummy reloads. */
199 static int hard_regs_live_known
;
201 /* Indexed by hard reg number,
202 element is nonnegative if hard reg has been spilled.
203 This vector is passed to `find_reloads' as an argument
204 and is not changed here. */
205 static short *static_reload_reg_p
;
207 /* Set to 1 in subst_reg_equivs if it changes anything. */
208 static int subst_reg_equivs_changed
;
210 /* On return from push_reload, holds the reload-number for the OUT
211 operand, which can be different for that from the input operand. */
212 static int output_reloadnum
;
214 /* Compare two RTX's. */
215 #define MATCHES(x, y) \
216 (x == y || (x != 0 && (REG_P (x) \
217 ? REG_P (y) && REGNO (x) == REGNO (y) \
218 : rtx_equal_p (x, y) && ! side_effects_p (x))))
220 /* Indicates if two reloads purposes are for similar enough things that we
221 can merge their reloads. */
222 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
223 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
224 || ((when1) == (when2) && (op1) == (op2)) \
225 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
226 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
227 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
228 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
229 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
231 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
232 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
233 ((when1) != (when2) \
234 || ! ((op1) == (op2) \
235 || (when1) == RELOAD_FOR_INPUT \
236 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
237 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
239 /* If we are going to reload an address, compute the reload type to
241 #define ADDR_TYPE(type) \
242 ((type) == RELOAD_FOR_INPUT_ADDRESS \
243 ? RELOAD_FOR_INPADDR_ADDRESS \
244 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
245 ? RELOAD_FOR_OUTADDR_ADDRESS \
248 static int push_secondary_reload (int, rtx
, int, int, enum reg_class
,
249 machine_mode
, enum reload_type
,
250 enum insn_code
*, secondary_reload_info
*);
251 static enum reg_class
find_valid_class (machine_mode
, machine_mode
,
253 static void push_replacement (rtx
*, int, machine_mode
);
254 static void dup_replacements (rtx
*, rtx
*);
255 static void combine_reloads (void);
256 static int find_reusable_reload (rtx
*, rtx
, enum reg_class
,
257 enum reload_type
, int, int);
258 static rtx
find_dummy_reload (rtx
, rtx
, rtx
*, rtx
*, machine_mode
,
259 machine_mode
, reg_class_t
, int, int);
260 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx
);
261 static struct decomposition
decompose (rtx
);
262 static int immune_p (rtx
, rtx
, struct decomposition
);
263 static bool alternative_allows_const_pool_ref (rtx
, const char *, int);
264 static rtx
find_reloads_toplev (rtx
, int, enum reload_type
, int, int,
266 static rtx
make_memloc (rtx
, int);
267 static int maybe_memory_address_addr_space_p (machine_mode
, rtx
,
268 addr_space_t
, rtx
*);
269 static int find_reloads_address (machine_mode
, rtx
*, rtx
, rtx
*,
270 int, enum reload_type
, int, rtx_insn
*);
271 static rtx
subst_reg_equivs (rtx
, rtx_insn
*);
272 static rtx
subst_indexed_address (rtx
);
273 static void update_auto_inc_notes (rtx_insn
*, int, int);
274 static int find_reloads_address_1 (machine_mode
, addr_space_t
, rtx
, int,
275 enum rtx_code
, enum rtx_code
, rtx
*,
276 int, enum reload_type
,int, rtx_insn
*);
277 static void find_reloads_address_part (rtx
, rtx
*, enum reg_class
,
279 enum reload_type
, int);
280 static rtx
find_reloads_subreg_address (rtx
, int, enum reload_type
,
281 int, rtx_insn
*, int *);
282 static void copy_replacements_1 (rtx
*, rtx
*, int);
283 static int find_inc_amount (rtx
, rtx
);
284 static int refers_to_mem_for_reload_p (rtx
);
285 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
288 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
292 push_reg_equiv_alt_mem (int regno
, rtx mem
)
296 for (it
= reg_equiv_alt_mem_list (regno
); it
; it
= XEXP (it
, 1))
297 if (rtx_equal_p (XEXP (it
, 0), mem
))
300 reg_equiv_alt_mem_list (regno
)
301 = alloc_EXPR_LIST (REG_EQUIV
, mem
,
302 reg_equiv_alt_mem_list (regno
));
305 /* Determine if any secondary reloads are needed for loading (if IN_P is
306 nonzero) or storing (if IN_P is zero) X to or from a reload register of
307 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
308 are needed, push them.
310 Return the reload number of the secondary reload we made, or -1 if
311 we didn't need one. *PICODE is set to the insn_code to use if we do
312 need a secondary reload. */
315 push_secondary_reload (int in_p
, rtx x
, int opnum
, int optional
,
316 enum reg_class reload_class
,
317 machine_mode reload_mode
, enum reload_type type
,
318 enum insn_code
*picode
, secondary_reload_info
*prev_sri
)
320 enum reg_class rclass
= NO_REGS
;
321 enum reg_class scratch_class
;
322 machine_mode mode
= reload_mode
;
323 enum insn_code icode
= CODE_FOR_nothing
;
324 enum insn_code t_icode
= CODE_FOR_nothing
;
325 enum reload_type secondary_type
;
326 int s_reload
, t_reload
= -1;
327 const char *scratch_constraint
;
328 secondary_reload_info sri
;
330 if (type
== RELOAD_FOR_INPUT_ADDRESS
331 || type
== RELOAD_FOR_OUTPUT_ADDRESS
332 || type
== RELOAD_FOR_INPADDR_ADDRESS
333 || type
== RELOAD_FOR_OUTADDR_ADDRESS
)
334 secondary_type
= type
;
336 secondary_type
= in_p
? RELOAD_FOR_INPUT_ADDRESS
: RELOAD_FOR_OUTPUT_ADDRESS
;
338 *picode
= CODE_FOR_nothing
;
340 /* If X is a paradoxical SUBREG, use the inner value to determine both the
341 mode and object being reloaded. */
342 if (paradoxical_subreg_p (x
))
345 reload_mode
= GET_MODE (x
);
348 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
349 is still a pseudo-register by now, it *must* have an equivalent MEM
350 but we don't want to assume that), use that equivalent when seeing if
351 a secondary reload is needed since whether or not a reload is needed
352 might be sensitive to the form of the MEM. */
354 if (REG_P (x
) && REGNO (x
) >= FIRST_PSEUDO_REGISTER
355 && reg_equiv_mem (REGNO (x
)))
356 x
= reg_equiv_mem (REGNO (x
));
358 sri
.icode
= CODE_FOR_nothing
;
359 sri
.prev_sri
= prev_sri
;
360 rclass
= (enum reg_class
) targetm
.secondary_reload (in_p
, x
, reload_class
,
362 icode
= (enum insn_code
) sri
.icode
;
364 /* If we don't need any secondary registers, done. */
365 if (rclass
== NO_REGS
&& icode
== CODE_FOR_nothing
)
368 if (rclass
!= NO_REGS
)
369 t_reload
= push_secondary_reload (in_p
, x
, opnum
, optional
, rclass
,
370 reload_mode
, type
, &t_icode
, &sri
);
372 /* If we will be using an insn, the secondary reload is for a
375 if (icode
!= CODE_FOR_nothing
)
377 /* If IN_P is nonzero, the reload register will be the output in
378 operand 0. If IN_P is zero, the reload register will be the input
379 in operand 1. Outputs should have an initial "=", which we must
382 /* ??? It would be useful to be able to handle only two, or more than
383 three, operands, but for now we can only handle the case of having
384 exactly three: output, input and one temp/scratch. */
385 gcc_assert (insn_data
[(int) icode
].n_operands
== 3);
387 /* ??? We currently have no way to represent a reload that needs
388 an icode to reload from an intermediate tertiary reload register.
389 We should probably have a new field in struct reload to tag a
390 chain of scratch operand reloads onto. */
391 gcc_assert (rclass
== NO_REGS
);
393 scratch_constraint
= insn_data
[(int) icode
].operand
[2].constraint
;
394 gcc_assert (*scratch_constraint
== '=');
395 scratch_constraint
++;
396 if (*scratch_constraint
== '&')
397 scratch_constraint
++;
398 scratch_class
= (reg_class_for_constraint
399 (lookup_constraint (scratch_constraint
)));
401 rclass
= scratch_class
;
402 mode
= insn_data
[(int) icode
].operand
[2].mode
;
405 /* This case isn't valid, so fail. Reload is allowed to use the same
406 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
407 in the case of a secondary register, we actually need two different
408 registers for correct code. We fail here to prevent the possibility of
409 silently generating incorrect code later.
411 The convention is that secondary input reloads are valid only if the
412 secondary_class is different from class. If you have such a case, you
413 can not use secondary reloads, you must work around the problem some
416 Allow this when a reload_in/out pattern is being used. I.e. assume
417 that the generated code handles this case. */
419 gcc_assert (!in_p
|| rclass
!= reload_class
|| icode
!= CODE_FOR_nothing
420 || t_icode
!= CODE_FOR_nothing
);
422 /* See if we can reuse an existing secondary reload. */
423 for (s_reload
= 0; s_reload
< n_reloads
; s_reload
++)
424 if (rld
[s_reload
].secondary_p
425 && (reg_class_subset_p (rclass
, rld
[s_reload
].rclass
)
426 || reg_class_subset_p (rld
[s_reload
].rclass
, rclass
))
427 && ((in_p
&& rld
[s_reload
].inmode
== mode
)
428 || (! in_p
&& rld
[s_reload
].outmode
== mode
))
429 && ((in_p
&& rld
[s_reload
].secondary_in_reload
== t_reload
)
430 || (! in_p
&& rld
[s_reload
].secondary_out_reload
== t_reload
))
431 && ((in_p
&& rld
[s_reload
].secondary_in_icode
== t_icode
)
432 || (! in_p
&& rld
[s_reload
].secondary_out_icode
== t_icode
))
433 && (small_register_class_p (rclass
)
434 || targetm
.small_register_classes_for_mode_p (VOIDmode
))
435 && MERGABLE_RELOADS (secondary_type
, rld
[s_reload
].when_needed
,
436 opnum
, rld
[s_reload
].opnum
))
439 rld
[s_reload
].inmode
= mode
;
441 rld
[s_reload
].outmode
= mode
;
443 if (reg_class_subset_p (rclass
, rld
[s_reload
].rclass
))
444 rld
[s_reload
].rclass
= rclass
;
446 rld
[s_reload
].opnum
= MIN (rld
[s_reload
].opnum
, opnum
);
447 rld
[s_reload
].optional
&= optional
;
448 rld
[s_reload
].secondary_p
= 1;
449 if (MERGE_TO_OTHER (secondary_type
, rld
[s_reload
].when_needed
,
450 opnum
, rld
[s_reload
].opnum
))
451 rld
[s_reload
].when_needed
= RELOAD_OTHER
;
456 if (s_reload
== n_reloads
)
458 #ifdef SECONDARY_MEMORY_NEEDED
459 /* If we need a memory location to copy between the two reload regs,
460 set it up now. Note that we do the input case before making
461 the reload and the output case after. This is due to the
462 way reloads are output. */
464 if (in_p
&& icode
== CODE_FOR_nothing
465 && SECONDARY_MEMORY_NEEDED (rclass
, reload_class
, mode
))
467 get_secondary_mem (x
, reload_mode
, opnum
, type
);
469 /* We may have just added new reloads. Make sure we add
470 the new reload at the end. */
471 s_reload
= n_reloads
;
475 /* We need to make a new secondary reload for this register class. */
476 rld
[s_reload
].in
= rld
[s_reload
].out
= 0;
477 rld
[s_reload
].rclass
= rclass
;
479 rld
[s_reload
].inmode
= in_p
? mode
: VOIDmode
;
480 rld
[s_reload
].outmode
= ! in_p
? mode
: VOIDmode
;
481 rld
[s_reload
].reg_rtx
= 0;
482 rld
[s_reload
].optional
= optional
;
483 rld
[s_reload
].inc
= 0;
484 /* Maybe we could combine these, but it seems too tricky. */
485 rld
[s_reload
].nocombine
= 1;
486 rld
[s_reload
].in_reg
= 0;
487 rld
[s_reload
].out_reg
= 0;
488 rld
[s_reload
].opnum
= opnum
;
489 rld
[s_reload
].when_needed
= secondary_type
;
490 rld
[s_reload
].secondary_in_reload
= in_p
? t_reload
: -1;
491 rld
[s_reload
].secondary_out_reload
= ! in_p
? t_reload
: -1;
492 rld
[s_reload
].secondary_in_icode
= in_p
? t_icode
: CODE_FOR_nothing
;
493 rld
[s_reload
].secondary_out_icode
494 = ! in_p
? t_icode
: CODE_FOR_nothing
;
495 rld
[s_reload
].secondary_p
= 1;
499 #ifdef SECONDARY_MEMORY_NEEDED
500 if (! in_p
&& icode
== CODE_FOR_nothing
501 && SECONDARY_MEMORY_NEEDED (reload_class
, rclass
, mode
))
502 get_secondary_mem (x
, mode
, opnum
, type
);
510 /* If a secondary reload is needed, return its class. If both an intermediate
511 register and a scratch register is needed, we return the class of the
512 intermediate register. */
514 secondary_reload_class (bool in_p
, reg_class_t rclass
, machine_mode mode
,
517 enum insn_code icode
;
518 secondary_reload_info sri
;
520 sri
.icode
= CODE_FOR_nothing
;
523 = (enum reg_class
) targetm
.secondary_reload (in_p
, x
, rclass
, mode
, &sri
);
524 icode
= (enum insn_code
) sri
.icode
;
526 /* If there are no secondary reloads at all, we return NO_REGS.
527 If an intermediate register is needed, we return its class. */
528 if (icode
== CODE_FOR_nothing
|| rclass
!= NO_REGS
)
531 /* No intermediate register is needed, but we have a special reload
532 pattern, which we assume for now needs a scratch register. */
533 return scratch_reload_class (icode
);
536 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
537 three operands, verify that operand 2 is an output operand, and return
539 ??? We'd like to be able to handle any pattern with at least 2 operands,
540 for zero or more scratch registers, but that needs more infrastructure. */
542 scratch_reload_class (enum insn_code icode
)
544 const char *scratch_constraint
;
545 enum reg_class rclass
;
547 gcc_assert (insn_data
[(int) icode
].n_operands
== 3);
548 scratch_constraint
= insn_data
[(int) icode
].operand
[2].constraint
;
549 gcc_assert (*scratch_constraint
== '=');
550 scratch_constraint
++;
551 if (*scratch_constraint
== '&')
552 scratch_constraint
++;
553 rclass
= reg_class_for_constraint (lookup_constraint (scratch_constraint
));
554 gcc_assert (rclass
!= NO_REGS
);
558 #ifdef SECONDARY_MEMORY_NEEDED
560 /* Return a memory location that will be used to copy X in mode MODE.
561 If we haven't already made a location for this mode in this insn,
562 call find_reloads_address on the location being returned. */
565 get_secondary_mem (rtx x ATTRIBUTE_UNUSED
, machine_mode mode
,
566 int opnum
, enum reload_type type
)
571 /* By default, if MODE is narrower than a word, widen it to a word.
572 This is required because most machines that require these memory
573 locations do not support short load and stores from all registers
574 (e.g., FP registers). */
576 #ifdef SECONDARY_MEMORY_NEEDED_MODE
577 mode
= SECONDARY_MEMORY_NEEDED_MODE (mode
);
579 if (GET_MODE_BITSIZE (mode
) < BITS_PER_WORD
&& INTEGRAL_MODE_P (mode
))
580 mode
= mode_for_size (BITS_PER_WORD
, GET_MODE_CLASS (mode
), 0);
583 /* If we already have made a MEM for this operand in MODE, return it. */
584 if (secondary_memlocs_elim
[(int) mode
][opnum
] != 0)
585 return secondary_memlocs_elim
[(int) mode
][opnum
];
587 /* If this is the first time we've tried to get a MEM for this mode,
588 allocate a new one. `something_changed' in reload will get set
589 by noticing that the frame size has changed. */
591 if (secondary_memlocs
[(int) mode
] == 0)
593 #ifdef SECONDARY_MEMORY_NEEDED_RTX
594 secondary_memlocs
[(int) mode
] = SECONDARY_MEMORY_NEEDED_RTX (mode
);
596 secondary_memlocs
[(int) mode
]
597 = assign_stack_local (mode
, GET_MODE_SIZE (mode
), 0);
601 /* Get a version of the address doing any eliminations needed. If that
602 didn't give us a new MEM, make a new one if it isn't valid. */
604 loc
= eliminate_regs (secondary_memlocs
[(int) mode
], VOIDmode
, NULL_RTX
);
605 mem_valid
= strict_memory_address_addr_space_p (mode
, XEXP (loc
, 0),
606 MEM_ADDR_SPACE (loc
));
608 if (! mem_valid
&& loc
== secondary_memlocs
[(int) mode
])
609 loc
= copy_rtx (loc
);
611 /* The only time the call below will do anything is if the stack
612 offset is too large. In that case IND_LEVELS doesn't matter, so we
613 can just pass a zero. Adjust the type to be the address of the
614 corresponding object. If the address was valid, save the eliminated
615 address. If it wasn't valid, we need to make a reload each time, so
620 type
= (type
== RELOAD_FOR_INPUT
? RELOAD_FOR_INPUT_ADDRESS
621 : type
== RELOAD_FOR_OUTPUT
? RELOAD_FOR_OUTPUT_ADDRESS
624 find_reloads_address (mode
, &loc
, XEXP (loc
, 0), &XEXP (loc
, 0),
628 secondary_memlocs_elim
[(int) mode
][opnum
] = loc
;
629 if (secondary_memlocs_elim_used
<= (int)mode
)
630 secondary_memlocs_elim_used
= (int)mode
+ 1;
634 /* Clear any secondary memory locations we've made. */
637 clear_secondary_mem (void)
639 memset (secondary_memlocs
, 0, sizeof secondary_memlocs
);
641 #endif /* SECONDARY_MEMORY_NEEDED */
644 /* Find the largest class which has at least one register valid in
645 mode INNER, and which for every such register, that register number
646 plus N is also valid in OUTER (if in range) and is cheap to move
647 into REGNO. Such a class must exist. */
649 static enum reg_class
650 find_valid_class (machine_mode outer ATTRIBUTE_UNUSED
,
651 machine_mode inner ATTRIBUTE_UNUSED
, int n
,
652 unsigned int dest_regno ATTRIBUTE_UNUSED
)
657 enum reg_class best_class
= NO_REGS
;
658 enum reg_class dest_class ATTRIBUTE_UNUSED
= REGNO_REG_CLASS (dest_regno
);
659 unsigned int best_size
= 0;
662 for (rclass
= 1; rclass
< N_REG_CLASSES
; rclass
++)
666 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
- n
&& ! bad
; regno
++)
667 if (TEST_HARD_REG_BIT (reg_class_contents
[rclass
], regno
))
669 if (HARD_REGNO_MODE_OK (regno
, inner
))
672 if (TEST_HARD_REG_BIT (reg_class_contents
[rclass
], regno
+ n
)
673 && ! HARD_REGNO_MODE_OK (regno
+ n
, outer
))
680 cost
= register_move_cost (outer
, (enum reg_class
) rclass
, dest_class
);
682 if ((reg_class_size
[rclass
] > best_size
683 && (best_cost
< 0 || best_cost
>= cost
))
686 best_class
= (enum reg_class
) rclass
;
687 best_size
= reg_class_size
[rclass
];
688 best_cost
= register_move_cost (outer
, (enum reg_class
) rclass
,
693 gcc_assert (best_size
!= 0);
698 /* We are trying to reload a subreg of something that is not a register.
699 Find the largest class which contains only registers valid in
700 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
701 which we would eventually like to obtain the object. */
703 static enum reg_class
704 find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED
,
705 machine_mode mode ATTRIBUTE_UNUSED
,
706 enum reg_class dest_class ATTRIBUTE_UNUSED
)
711 enum reg_class best_class
= NO_REGS
;
712 unsigned int best_size
= 0;
715 for (rclass
= 1; rclass
< N_REG_CLASSES
; rclass
++)
718 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
&& !bad
; regno
++)
720 if (in_hard_reg_set_p (reg_class_contents
[rclass
], mode
, regno
)
721 && !HARD_REGNO_MODE_OK (regno
, mode
))
728 cost
= register_move_cost (outer
, (enum reg_class
) rclass
, dest_class
);
730 if ((reg_class_size
[rclass
] > best_size
731 && (best_cost
< 0 || best_cost
>= cost
))
734 best_class
= (enum reg_class
) rclass
;
735 best_size
= reg_class_size
[rclass
];
736 best_cost
= register_move_cost (outer
, (enum reg_class
) rclass
,
741 gcc_assert (best_size
!= 0);
743 #ifdef LIMIT_RELOAD_CLASS
744 best_class
= LIMIT_RELOAD_CLASS (mode
, best_class
);
749 /* Return the number of a previously made reload that can be combined with
750 a new one, or n_reloads if none of the existing reloads can be used.
751 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
752 push_reload, they determine the kind of the new reload that we try to
753 combine. P_IN points to the corresponding value of IN, which can be
754 modified by this function.
755 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
758 find_reusable_reload (rtx
*p_in
, rtx out
, enum reg_class rclass
,
759 enum reload_type type
, int opnum
, int dont_share
)
763 /* We can't merge two reloads if the output of either one is
766 if (earlyclobber_operand_p (out
))
769 /* We can use an existing reload if the class is right
770 and at least one of IN and OUT is a match
771 and the other is at worst neutral.
772 (A zero compared against anything is neutral.)
774 For targets with small register classes, don't use existing reloads
775 unless they are for the same thing since that can cause us to need
776 more reload registers than we otherwise would. */
778 for (i
= 0; i
< n_reloads
; i
++)
779 if ((reg_class_subset_p (rclass
, rld
[i
].rclass
)
780 || reg_class_subset_p (rld
[i
].rclass
, rclass
))
781 /* If the existing reload has a register, it must fit our class. */
782 && (rld
[i
].reg_rtx
== 0
783 || TEST_HARD_REG_BIT (reg_class_contents
[(int) rclass
],
784 true_regnum (rld
[i
].reg_rtx
)))
785 && ((in
!= 0 && MATCHES (rld
[i
].in
, in
) && ! dont_share
786 && (out
== 0 || rld
[i
].out
== 0 || MATCHES (rld
[i
].out
, out
)))
787 || (out
!= 0 && MATCHES (rld
[i
].out
, out
)
788 && (in
== 0 || rld
[i
].in
== 0 || MATCHES (rld
[i
].in
, in
))))
789 && (rld
[i
].out
== 0 || ! earlyclobber_operand_p (rld
[i
].out
))
790 && (small_register_class_p (rclass
)
791 || targetm
.small_register_classes_for_mode_p (VOIDmode
))
792 && MERGABLE_RELOADS (type
, rld
[i
].when_needed
, opnum
, rld
[i
].opnum
))
795 /* Reloading a plain reg for input can match a reload to postincrement
796 that reg, since the postincrement's value is the right value.
797 Likewise, it can match a preincrement reload, since we regard
798 the preincrementation as happening before any ref in this insn
800 for (i
= 0; i
< n_reloads
; i
++)
801 if ((reg_class_subset_p (rclass
, rld
[i
].rclass
)
802 || reg_class_subset_p (rld
[i
].rclass
, rclass
))
803 /* If the existing reload has a register, it must fit our
805 && (rld
[i
].reg_rtx
== 0
806 || TEST_HARD_REG_BIT (reg_class_contents
[(int) rclass
],
807 true_regnum (rld
[i
].reg_rtx
)))
808 && out
== 0 && rld
[i
].out
== 0 && rld
[i
].in
!= 0
810 && GET_RTX_CLASS (GET_CODE (rld
[i
].in
)) == RTX_AUTOINC
811 && MATCHES (XEXP (rld
[i
].in
, 0), in
))
812 || (REG_P (rld
[i
].in
)
813 && GET_RTX_CLASS (GET_CODE (in
)) == RTX_AUTOINC
814 && MATCHES (XEXP (in
, 0), rld
[i
].in
)))
815 && (rld
[i
].out
== 0 || ! earlyclobber_operand_p (rld
[i
].out
))
816 && (small_register_class_p (rclass
)
817 || targetm
.small_register_classes_for_mode_p (VOIDmode
))
818 && MERGABLE_RELOADS (type
, rld
[i
].when_needed
,
819 opnum
, rld
[i
].opnum
))
821 /* Make sure reload_in ultimately has the increment,
822 not the plain register. */
830 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
831 expression. MODE is the mode that X will be used in. OUTPUT is true if
832 the function is invoked for the output part of an enclosing reload. */
835 reload_inner_reg_of_subreg (rtx x
, machine_mode mode
, bool output
)
839 /* Only SUBREGs are problematical. */
840 if (GET_CODE (x
) != SUBREG
)
843 inner
= SUBREG_REG (x
);
845 /* If INNER is a constant or PLUS, then INNER will need reloading. */
846 if (CONSTANT_P (inner
) || GET_CODE (inner
) == PLUS
)
849 /* If INNER is not a hard register, then INNER will not need reloading. */
850 if (!(REG_P (inner
) && HARD_REGISTER_P (inner
)))
853 /* If INNER is not ok for MODE, then INNER will need reloading. */
854 if (!HARD_REGNO_MODE_OK (subreg_regno (x
), mode
))
857 /* If this is for an output, and the outer part is a word or smaller,
858 INNER is larger than a word and the number of registers in INNER is
859 not the same as the number of words in INNER, then INNER will need
860 reloading (with an in-out reload). */
862 && GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
863 && GET_MODE_SIZE (GET_MODE (inner
)) > UNITS_PER_WORD
864 && ((GET_MODE_SIZE (GET_MODE (inner
)) / UNITS_PER_WORD
)
865 != (int) hard_regno_nregs
[REGNO (inner
)][GET_MODE (inner
)]));
868 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
869 requiring an extra reload register. The caller has already found that
870 IN contains some reference to REGNO, so check that we can produce the
871 new value in a single step. E.g. if we have
872 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
873 instruction that adds one to a register, this should succeed.
874 However, if we have something like
875 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
876 needs to be loaded into a register first, we need a separate reload
878 Such PLUS reloads are generated by find_reload_address_part.
879 The out-of-range PLUS expressions are usually introduced in the instruction
880 patterns by register elimination and substituting pseudos without a home
881 by their function-invariant equivalences. */
883 can_reload_into (rtx in
, int regno
, machine_mode mode
)
888 struct recog_data_d save_recog_data
;
890 /* For matching constraints, we often get notional input reloads where
891 we want to use the original register as the reload register. I.e.
892 technically this is a non-optional input-output reload, but IN is
893 already a valid register, and has been chosen as the reload register.
894 Speed this up, since it trivially works. */
898 /* To test MEMs properly, we'd have to take into account all the reloads
899 that are already scheduled, which can become quite complicated.
900 And since we've already handled address reloads for this MEM, it
901 should always succeed anyway. */
905 /* If we can make a simple SET insn that does the job, everything should
907 dst
= gen_rtx_REG (mode
, regno
);
908 test_insn
= make_insn_raw (gen_rtx_SET (dst
, in
));
909 save_recog_data
= recog_data
;
910 if (recog_memoized (test_insn
) >= 0)
912 extract_insn (test_insn
);
913 r
= constrain_operands (1, get_enabled_alternatives (test_insn
));
915 recog_data
= save_recog_data
;
919 /* Record one reload that needs to be performed.
920 IN is an rtx saying where the data are to be found before this instruction.
921 OUT says where they must be stored after the instruction.
922 (IN is zero for data not read, and OUT is zero for data not written.)
923 INLOC and OUTLOC point to the places in the instructions where
924 IN and OUT were found.
925 If IN and OUT are both nonzero, it means the same register must be used
926 to reload both IN and OUT.
928 RCLASS is a register class required for the reloaded data.
929 INMODE is the machine mode that the instruction requires
930 for the reg that replaces IN and OUTMODE is likewise for OUT.
932 If IN is zero, then OUT's location and mode should be passed as
935 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
937 OPTIONAL nonzero means this reload does not need to be performed:
938 it can be discarded if that is more convenient.
940 OPNUM and TYPE say what the purpose of this reload is.
942 The return value is the reload-number for this reload.
944 If both IN and OUT are nonzero, in some rare cases we might
945 want to make two separate reloads. (Actually we never do this now.)
946 Therefore, the reload-number for OUT is stored in
947 output_reloadnum when we return; the return value applies to IN.
948 Usually (presently always), when IN and OUT are nonzero,
949 the two reload-numbers are equal, but the caller should be careful to
953 push_reload (rtx in
, rtx out
, rtx
*inloc
, rtx
*outloc
,
954 enum reg_class rclass
, machine_mode inmode
,
955 machine_mode outmode
, int strict_low
, int optional
,
956 int opnum
, enum reload_type type
)
960 int dont_remove_subreg
= 0;
961 #ifdef LIMIT_RELOAD_CLASS
962 rtx
*in_subreg_loc
= 0, *out_subreg_loc
= 0;
964 int secondary_in_reload
= -1, secondary_out_reload
= -1;
965 enum insn_code secondary_in_icode
= CODE_FOR_nothing
;
966 enum insn_code secondary_out_icode
= CODE_FOR_nothing
;
967 enum reg_class subreg_in_class ATTRIBUTE_UNUSED
;
968 subreg_in_class
= NO_REGS
;
970 /* INMODE and/or OUTMODE could be VOIDmode if no mode
971 has been specified for the operand. In that case,
972 use the operand's mode as the mode to reload. */
973 if (inmode
== VOIDmode
&& in
!= 0)
974 inmode
= GET_MODE (in
);
975 if (outmode
== VOIDmode
&& out
!= 0)
976 outmode
= GET_MODE (out
);
978 /* If find_reloads and friends until now missed to replace a pseudo
979 with a constant of reg_equiv_constant something went wrong
981 Note that it can't simply be done here if we missed it earlier
982 since the constant might need to be pushed into the literal pool
983 and the resulting memref would probably need further
985 if (in
!= 0 && REG_P (in
))
987 int regno
= REGNO (in
);
989 gcc_assert (regno
< FIRST_PSEUDO_REGISTER
990 || reg_renumber
[regno
] >= 0
991 || reg_equiv_constant (regno
) == NULL_RTX
);
994 /* reg_equiv_constant only contains constants which are obviously
995 not appropriate as destination. So if we would need to replace
996 the destination pseudo with a constant we are in real
998 if (out
!= 0 && REG_P (out
))
1000 int regno
= REGNO (out
);
1002 gcc_assert (regno
< FIRST_PSEUDO_REGISTER
1003 || reg_renumber
[regno
] >= 0
1004 || reg_equiv_constant (regno
) == NULL_RTX
);
1007 /* If we have a read-write operand with an address side-effect,
1008 change either IN or OUT so the side-effect happens only once. */
1009 if (in
!= 0 && out
!= 0 && MEM_P (in
) && rtx_equal_p (in
, out
))
1010 switch (GET_CODE (XEXP (in
, 0)))
1012 case POST_INC
: case POST_DEC
: case POST_MODIFY
:
1013 in
= replace_equiv_address_nv (in
, XEXP (XEXP (in
, 0), 0));
1016 case PRE_INC
: case PRE_DEC
: case PRE_MODIFY
:
1017 out
= replace_equiv_address_nv (out
, XEXP (XEXP (out
, 0), 0));
1024 /* If we are reloading a (SUBREG constant ...), really reload just the
1025 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1026 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1027 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1028 register is a pseudo, also reload the inside expression.
1029 For machines that extend byte loads, do this for any SUBREG of a pseudo
1030 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1031 M2 is an integral mode that gets extended when loaded.
1032 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1033 where either M1 is not valid for R or M2 is wider than a word but we
1034 only need one register to store an M2-sized quantity in R.
1035 (However, if OUT is nonzero, we need to reload the reg *and*
1036 the subreg, so do nothing here, and let following statement handle it.)
1038 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1039 we can't handle it here because CONST_INT does not indicate a mode.
1041 Similarly, we must reload the inside expression if we have a
1042 STRICT_LOW_PART (presumably, in == out in this case).
1044 Also reload the inner expression if it does not require a secondary
1045 reload but the SUBREG does.
1047 Finally, reload the inner expression if it is a register that is in
1048 the class whose registers cannot be referenced in a different size
1049 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1050 cannot reload just the inside since we might end up with the wrong
1051 register class. But if it is inside a STRICT_LOW_PART, we have
1052 no choice, so we hope we do get the right register class there. */
1054 if (in
!= 0 && GET_CODE (in
) == SUBREG
1055 && (subreg_lowpart_p (in
) || strict_low
)
1056 #ifdef CANNOT_CHANGE_MODE_CLASS
1057 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in
)), inmode
, rclass
)
1059 && contains_reg_of_mode
[(int) rclass
][(int) GET_MODE (SUBREG_REG (in
))]
1060 && (CONSTANT_P (SUBREG_REG (in
))
1061 || GET_CODE (SUBREG_REG (in
)) == PLUS
1063 || (((REG_P (SUBREG_REG (in
))
1064 && REGNO (SUBREG_REG (in
)) >= FIRST_PSEUDO_REGISTER
)
1065 || MEM_P (SUBREG_REG (in
)))
1066 && ((GET_MODE_PRECISION (inmode
)
1067 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in
))))
1068 #ifdef LOAD_EXTEND_OP
1069 || (GET_MODE_SIZE (inmode
) <= UNITS_PER_WORD
1070 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
)))
1072 && (GET_MODE_PRECISION (inmode
)
1073 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in
))))
1074 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in
)))
1075 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in
))) != UNKNOWN
)
1077 #if WORD_REGISTER_OPERATIONS
1078 || ((GET_MODE_PRECISION (inmode
)
1079 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in
))))
1080 && ((GET_MODE_SIZE (inmode
) - 1) / UNITS_PER_WORD
==
1081 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
))) - 1)
1085 || (REG_P (SUBREG_REG (in
))
1086 && REGNO (SUBREG_REG (in
)) < FIRST_PSEUDO_REGISTER
1087 /* The case where out is nonzero
1088 is handled differently in the following statement. */
1089 && (out
== 0 || subreg_lowpart_p (in
))
1090 && ((GET_MODE_SIZE (inmode
) <= UNITS_PER_WORD
1091 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
)))
1093 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
)))
1095 != (int) hard_regno_nregs
[REGNO (SUBREG_REG (in
))]
1096 [GET_MODE (SUBREG_REG (in
))]))
1097 || ! HARD_REGNO_MODE_OK (subreg_regno (in
), inmode
)))
1098 || (secondary_reload_class (1, rclass
, inmode
, in
) != NO_REGS
1099 && (secondary_reload_class (1, rclass
, GET_MODE (SUBREG_REG (in
)),
1102 #ifdef CANNOT_CHANGE_MODE_CLASS
1103 || (REG_P (SUBREG_REG (in
))
1104 && REGNO (SUBREG_REG (in
)) < FIRST_PSEUDO_REGISTER
1105 && REG_CANNOT_CHANGE_MODE_P
1106 (REGNO (SUBREG_REG (in
)), GET_MODE (SUBREG_REG (in
)), inmode
))
1110 #ifdef LIMIT_RELOAD_CLASS
1111 in_subreg_loc
= inloc
;
1113 inloc
= &SUBREG_REG (in
);
1115 #if ! defined (LOAD_EXTEND_OP)
1116 if (!WORD_REGISTER_OPERATIONS
1118 /* This is supposed to happen only for paradoxical subregs made by
1119 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1120 gcc_assert (GET_MODE_SIZE (GET_MODE (in
)) <= GET_MODE_SIZE (inmode
));
1122 inmode
= GET_MODE (in
);
1125 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1126 where M1 is not valid for R if it was not handled by the code above.
1128 Similar issue for (SUBREG constant ...) if it was not handled by the
1129 code above. This can happen if SUBREG_BYTE != 0.
1131 However, we must reload the inner reg *as well as* the subreg in
1134 if (in
!= 0 && reload_inner_reg_of_subreg (in
, inmode
, false))
1136 if (REG_P (SUBREG_REG (in
)))
1138 = find_valid_class (inmode
, GET_MODE (SUBREG_REG (in
)),
1139 subreg_regno_offset (REGNO (SUBREG_REG (in
)),
1140 GET_MODE (SUBREG_REG (in
)),
1143 REGNO (SUBREG_REG (in
)));
1144 else if (GET_CODE (SUBREG_REG (in
)) == SYMBOL_REF
)
1145 subreg_in_class
= find_valid_class_1 (inmode
,
1146 GET_MODE (SUBREG_REG (in
)),
1149 /* This relies on the fact that emit_reload_insns outputs the
1150 instructions for input reloads of type RELOAD_OTHER in the same
1151 order as the reloads. Thus if the outer reload is also of type
1152 RELOAD_OTHER, we are guaranteed that this inner reload will be
1153 output before the outer reload. */
1154 push_reload (SUBREG_REG (in
), NULL_RTX
, &SUBREG_REG (in
), (rtx
*) 0,
1155 subreg_in_class
, VOIDmode
, VOIDmode
, 0, 0, opnum
, type
);
1156 dont_remove_subreg
= 1;
1159 /* Similarly for paradoxical and problematical SUBREGs on the output.
1160 Note that there is no reason we need worry about the previous value
1161 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1162 entitled to clobber it all (except in the case of a word mode subreg
1163 or of a STRICT_LOW_PART, in that latter case the constraint should
1164 label it input-output.) */
1165 if (out
!= 0 && GET_CODE (out
) == SUBREG
1166 && (subreg_lowpart_p (out
) || strict_low
)
1167 #ifdef CANNOT_CHANGE_MODE_CLASS
1168 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out
)), outmode
, rclass
)
1170 && contains_reg_of_mode
[(int) rclass
][(int) GET_MODE (SUBREG_REG (out
))]
1171 && (CONSTANT_P (SUBREG_REG (out
))
1173 || (((REG_P (SUBREG_REG (out
))
1174 && REGNO (SUBREG_REG (out
)) >= FIRST_PSEUDO_REGISTER
)
1175 || MEM_P (SUBREG_REG (out
)))
1176 && ((GET_MODE_PRECISION (outmode
)
1177 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out
))))
1178 #if WORD_REGISTER_OPERATIONS
1179 || ((GET_MODE_PRECISION (outmode
)
1180 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out
))))
1181 && ((GET_MODE_SIZE (outmode
) - 1) / UNITS_PER_WORD
==
1182 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out
))) - 1)
1186 || (REG_P (SUBREG_REG (out
))
1187 && REGNO (SUBREG_REG (out
)) < FIRST_PSEUDO_REGISTER
1188 /* The case of a word mode subreg
1189 is handled differently in the following statement. */
1190 && ! (GET_MODE_SIZE (outmode
) <= UNITS_PER_WORD
1191 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out
)))
1193 && ! HARD_REGNO_MODE_OK (subreg_regno (out
), outmode
))
1194 || (secondary_reload_class (0, rclass
, outmode
, out
) != NO_REGS
1195 && (secondary_reload_class (0, rclass
, GET_MODE (SUBREG_REG (out
)),
1198 #ifdef CANNOT_CHANGE_MODE_CLASS
1199 || (REG_P (SUBREG_REG (out
))
1200 && REGNO (SUBREG_REG (out
)) < FIRST_PSEUDO_REGISTER
1201 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out
)),
1202 GET_MODE (SUBREG_REG (out
)),
1207 #ifdef LIMIT_RELOAD_CLASS
1208 out_subreg_loc
= outloc
;
1210 outloc
= &SUBREG_REG (out
);
1212 gcc_assert (WORD_REGISTER_OPERATIONS
|| !MEM_P (out
)
1213 || GET_MODE_SIZE (GET_MODE (out
))
1214 <= GET_MODE_SIZE (outmode
));
1215 outmode
= GET_MODE (out
);
1218 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1219 where either M1 is not valid for R or M2 is wider than a word but we
1220 only need one register to store an M2-sized quantity in R.
1222 However, we must reload the inner reg *as well as* the subreg in
1223 that case and the inner reg is an in-out reload. */
1225 if (out
!= 0 && reload_inner_reg_of_subreg (out
, outmode
, true))
1227 enum reg_class in_out_class
1228 = find_valid_class (outmode
, GET_MODE (SUBREG_REG (out
)),
1229 subreg_regno_offset (REGNO (SUBREG_REG (out
)),
1230 GET_MODE (SUBREG_REG (out
)),
1233 REGNO (SUBREG_REG (out
)));
1235 /* This relies on the fact that emit_reload_insns outputs the
1236 instructions for output reloads of type RELOAD_OTHER in reverse
1237 order of the reloads. Thus if the outer reload is also of type
1238 RELOAD_OTHER, we are guaranteed that this inner reload will be
1239 output after the outer reload. */
1240 push_reload (SUBREG_REG (out
), SUBREG_REG (out
), &SUBREG_REG (out
),
1241 &SUBREG_REG (out
), in_out_class
, VOIDmode
, VOIDmode
,
1242 0, 0, opnum
, RELOAD_OTHER
);
1243 dont_remove_subreg
= 1;
1246 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1247 if (in
!= 0 && out
!= 0 && MEM_P (out
)
1248 && (REG_P (in
) || MEM_P (in
) || GET_CODE (in
) == PLUS
)
1249 && reg_overlap_mentioned_for_reload_p (in
, XEXP (out
, 0)))
1252 /* If IN is a SUBREG of a hard register, make a new REG. This
1253 simplifies some of the cases below. */
1255 if (in
!= 0 && GET_CODE (in
) == SUBREG
&& REG_P (SUBREG_REG (in
))
1256 && REGNO (SUBREG_REG (in
)) < FIRST_PSEUDO_REGISTER
1257 && ! dont_remove_subreg
)
1258 in
= gen_rtx_REG (GET_MODE (in
), subreg_regno (in
));
1260 /* Similarly for OUT. */
1261 if (out
!= 0 && GET_CODE (out
) == SUBREG
1262 && REG_P (SUBREG_REG (out
))
1263 && REGNO (SUBREG_REG (out
)) < FIRST_PSEUDO_REGISTER
1264 && ! dont_remove_subreg
)
1265 out
= gen_rtx_REG (GET_MODE (out
), subreg_regno (out
));
1267 /* Narrow down the class of register wanted if that is
1268 desirable on this machine for efficiency. */
1270 reg_class_t preferred_class
= rclass
;
1273 preferred_class
= targetm
.preferred_reload_class (in
, rclass
);
1275 /* Output reloads may need analogous treatment, different in detail. */
1278 = targetm
.preferred_output_reload_class (out
, preferred_class
);
1280 /* Discard what the target said if we cannot do it. */
1281 if (preferred_class
!= NO_REGS
1282 || (optional
&& type
== RELOAD_FOR_OUTPUT
))
1283 rclass
= (enum reg_class
) preferred_class
;
1286 /* Make sure we use a class that can handle the actual pseudo
1287 inside any subreg. For example, on the 386, QImode regs
1288 can appear within SImode subregs. Although GENERAL_REGS
1289 can handle SImode, QImode needs a smaller class. */
1290 #ifdef LIMIT_RELOAD_CLASS
1292 rclass
= LIMIT_RELOAD_CLASS (inmode
, rclass
);
1293 else if (in
!= 0 && GET_CODE (in
) == SUBREG
)
1294 rclass
= LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in
)), rclass
);
1297 rclass
= LIMIT_RELOAD_CLASS (outmode
, rclass
);
1298 if (out
!= 0 && GET_CODE (out
) == SUBREG
)
1299 rclass
= LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out
)), rclass
);
1302 /* Verify that this class is at least possible for the mode that
1304 if (this_insn_is_asm
)
1307 if (GET_MODE_SIZE (inmode
) > GET_MODE_SIZE (outmode
))
1311 if (mode
== VOIDmode
)
1313 error_for_asm (this_insn
, "cannot reload integer constant "
1314 "operand in %<asm%>");
1319 outmode
= word_mode
;
1321 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1322 if (HARD_REGNO_MODE_OK (i
, mode
)
1323 && in_hard_reg_set_p (reg_class_contents
[(int) rclass
], mode
, i
))
1325 if (i
== FIRST_PSEUDO_REGISTER
)
1327 error_for_asm (this_insn
, "impossible register constraint "
1329 /* Avoid further trouble with this insn. */
1330 PATTERN (this_insn
) = gen_rtx_USE (VOIDmode
, const0_rtx
);
1331 /* We used to continue here setting class to ALL_REGS, but it triggers
1332 sanity check on i386 for:
1333 void foo(long double d)
1337 Returning zero here ought to be safe as we take care in
1338 find_reloads to not process the reloads when instruction was
1345 /* Optional output reloads are always OK even if we have no register class,
1346 since the function of these reloads is only to have spill_reg_store etc.
1347 set, so that the storing insn can be deleted later. */
1348 gcc_assert (rclass
!= NO_REGS
1349 || (optional
!= 0 && type
== RELOAD_FOR_OUTPUT
));
1351 i
= find_reusable_reload (&in
, out
, rclass
, type
, opnum
, dont_share
);
1355 /* See if we need a secondary reload register to move between CLASS
1356 and IN or CLASS and OUT. Get the icode and push any required reloads
1357 needed for each of them if so. */
1361 = push_secondary_reload (1, in
, opnum
, optional
, rclass
, inmode
, type
,
1362 &secondary_in_icode
, NULL
);
1363 if (out
!= 0 && GET_CODE (out
) != SCRATCH
)
1364 secondary_out_reload
1365 = push_secondary_reload (0, out
, opnum
, optional
, rclass
, outmode
,
1366 type
, &secondary_out_icode
, NULL
);
1368 /* We found no existing reload suitable for re-use.
1369 So add an additional reload. */
1371 #ifdef SECONDARY_MEMORY_NEEDED
1372 if (subreg_in_class
== NO_REGS
1375 || (GET_CODE (in
) == SUBREG
&& REG_P (SUBREG_REG (in
))))
1376 && reg_or_subregno (in
) < FIRST_PSEUDO_REGISTER
)
1377 subreg_in_class
= REGNO_REG_CLASS (reg_or_subregno (in
));
1378 /* If a memory location is needed for the copy, make one. */
1379 if (subreg_in_class
!= NO_REGS
1380 && SECONDARY_MEMORY_NEEDED (subreg_in_class
, rclass
, inmode
))
1381 get_secondary_mem (in
, inmode
, opnum
, type
);
1387 rld
[i
].rclass
= rclass
;
1388 rld
[i
].inmode
= inmode
;
1389 rld
[i
].outmode
= outmode
;
1391 rld
[i
].optional
= optional
;
1393 rld
[i
].nocombine
= 0;
1394 rld
[i
].in_reg
= inloc
? *inloc
: 0;
1395 rld
[i
].out_reg
= outloc
? *outloc
: 0;
1396 rld
[i
].opnum
= opnum
;
1397 rld
[i
].when_needed
= type
;
1398 rld
[i
].secondary_in_reload
= secondary_in_reload
;
1399 rld
[i
].secondary_out_reload
= secondary_out_reload
;
1400 rld
[i
].secondary_in_icode
= secondary_in_icode
;
1401 rld
[i
].secondary_out_icode
= secondary_out_icode
;
1402 rld
[i
].secondary_p
= 0;
1406 #ifdef SECONDARY_MEMORY_NEEDED
1409 || (GET_CODE (out
) == SUBREG
&& REG_P (SUBREG_REG (out
))))
1410 && reg_or_subregno (out
) < FIRST_PSEUDO_REGISTER
1411 && SECONDARY_MEMORY_NEEDED (rclass
,
1412 REGNO_REG_CLASS (reg_or_subregno (out
)),
1414 get_secondary_mem (out
, outmode
, opnum
, type
);
1419 /* We are reusing an existing reload,
1420 but we may have additional information for it.
1421 For example, we may now have both IN and OUT
1422 while the old one may have just one of them. */
1424 /* The modes can be different. If they are, we want to reload in
1425 the larger mode, so that the value is valid for both modes. */
1426 if (inmode
!= VOIDmode
1427 && GET_MODE_SIZE (inmode
) > GET_MODE_SIZE (rld
[i
].inmode
))
1428 rld
[i
].inmode
= inmode
;
1429 if (outmode
!= VOIDmode
1430 && GET_MODE_SIZE (outmode
) > GET_MODE_SIZE (rld
[i
].outmode
))
1431 rld
[i
].outmode
= outmode
;
1434 rtx in_reg
= inloc
? *inloc
: 0;
1435 /* If we merge reloads for two distinct rtl expressions that
1436 are identical in content, there might be duplicate address
1437 reloads. Remove the extra set now, so that if we later find
1438 that we can inherit this reload, we can get rid of the
1439 address reloads altogether.
1441 Do not do this if both reloads are optional since the result
1442 would be an optional reload which could potentially leave
1443 unresolved address replacements.
1445 It is not sufficient to call transfer_replacements since
1446 choose_reload_regs will remove the replacements for address
1447 reloads of inherited reloads which results in the same
1449 if (rld
[i
].in
!= in
&& rtx_equal_p (in
, rld
[i
].in
)
1450 && ! (rld
[i
].optional
&& optional
))
1452 /* We must keep the address reload with the lower operand
1454 if (opnum
> rld
[i
].opnum
)
1456 remove_address_replacements (in
);
1458 in_reg
= rld
[i
].in_reg
;
1461 remove_address_replacements (rld
[i
].in
);
1463 /* When emitting reloads we don't necessarily look at the in-
1464 and outmode, but also directly at the operands (in and out).
1465 So we can't simply overwrite them with whatever we have found
1466 for this (to-be-merged) reload, we have to "merge" that too.
1467 Reusing another reload already verified that we deal with the
1468 same operands, just possibly in different modes. So we
1469 overwrite the operands only when the new mode is larger.
1470 See also PR33613. */
1472 || GET_MODE_SIZE (GET_MODE (in
))
1473 > GET_MODE_SIZE (GET_MODE (rld
[i
].in
)))
1477 && GET_MODE_SIZE (GET_MODE (in_reg
))
1478 > GET_MODE_SIZE (GET_MODE (rld
[i
].in_reg
))))
1479 rld
[i
].in_reg
= in_reg
;
1485 && GET_MODE_SIZE (GET_MODE (out
))
1486 > GET_MODE_SIZE (GET_MODE (rld
[i
].out
))))
1490 || GET_MODE_SIZE (GET_MODE (*outloc
))
1491 > GET_MODE_SIZE (GET_MODE (rld
[i
].out_reg
))))
1492 rld
[i
].out_reg
= *outloc
;
1494 if (reg_class_subset_p (rclass
, rld
[i
].rclass
))
1495 rld
[i
].rclass
= rclass
;
1496 rld
[i
].optional
&= optional
;
1497 if (MERGE_TO_OTHER (type
, rld
[i
].when_needed
,
1498 opnum
, rld
[i
].opnum
))
1499 rld
[i
].when_needed
= RELOAD_OTHER
;
1500 rld
[i
].opnum
= MIN (rld
[i
].opnum
, opnum
);
1503 /* If the ostensible rtx being reloaded differs from the rtx found
1504 in the location to substitute, this reload is not safe to combine
1505 because we cannot reliably tell whether it appears in the insn. */
1507 if (in
!= 0 && in
!= *inloc
)
1508 rld
[i
].nocombine
= 1;
1511 /* This was replaced by changes in find_reloads_address_1 and the new
1512 function inc_for_reload, which go with a new meaning of reload_inc. */
1514 /* If this is an IN/OUT reload in an insn that sets the CC,
1515 it must be for an autoincrement. It doesn't work to store
1516 the incremented value after the insn because that would clobber the CC.
1517 So we must do the increment of the value reloaded from,
1518 increment it, store it back, then decrement again. */
1519 if (out
!= 0 && sets_cc0_p (PATTERN (this_insn
)))
1523 rld
[i
].inc
= find_inc_amount (PATTERN (this_insn
), in
);
1524 /* If we did not find a nonzero amount-to-increment-by,
1525 that contradicts the belief that IN is being incremented
1526 in an address in this insn. */
1527 gcc_assert (rld
[i
].inc
!= 0);
1531 /* If we will replace IN and OUT with the reload-reg,
1532 record where they are located so that substitution need
1533 not do a tree walk. */
1535 if (replace_reloads
)
1539 struct replacement
*r
= &replacements
[n_replacements
++];
1544 if (outloc
!= 0 && outloc
!= inloc
)
1546 struct replacement
*r
= &replacements
[n_replacements
++];
1553 /* If this reload is just being introduced and it has both
1554 an incoming quantity and an outgoing quantity that are
1555 supposed to be made to match, see if either one of the two
1556 can serve as the place to reload into.
1558 If one of them is acceptable, set rld[i].reg_rtx
1561 if (in
!= 0 && out
!= 0 && in
!= out
&& rld
[i
].reg_rtx
== 0)
1563 rld
[i
].reg_rtx
= find_dummy_reload (in
, out
, inloc
, outloc
,
1566 earlyclobber_operand_p (out
));
1568 /* If the outgoing register already contains the same value
1569 as the incoming one, we can dispense with loading it.
1570 The easiest way to tell the caller that is to give a phony
1571 value for the incoming operand (same as outgoing one). */
1572 if (rld
[i
].reg_rtx
== out
1573 && (REG_P (in
) || CONSTANT_P (in
))
1574 && 0 != find_equiv_reg (in
, this_insn
, NO_REGS
, REGNO (out
),
1575 static_reload_reg_p
, i
, inmode
))
1579 /* If this is an input reload and the operand contains a register that
1580 dies in this insn and is used nowhere else, see if it is the right class
1581 to be used for this reload. Use it if so. (This occurs most commonly
1582 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1583 this if it is also an output reload that mentions the register unless
1584 the output is a SUBREG that clobbers an entire register.
1586 Note that the operand might be one of the spill regs, if it is a
1587 pseudo reg and we are in a block where spilling has not taken place.
1588 But if there is no spilling in this block, that is OK.
1589 An explicitly used hard reg cannot be a spill reg. */
1591 if (rld
[i
].reg_rtx
== 0 && in
!= 0 && hard_regs_live_known
)
1595 machine_mode rel_mode
= inmode
;
1597 if (out
&& GET_MODE_SIZE (outmode
) > GET_MODE_SIZE (inmode
))
1600 for (note
= REG_NOTES (this_insn
); note
; note
= XEXP (note
, 1))
1601 if (REG_NOTE_KIND (note
) == REG_DEAD
1602 && REG_P (XEXP (note
, 0))
1603 && (regno
= REGNO (XEXP (note
, 0))) < FIRST_PSEUDO_REGISTER
1604 && reg_mentioned_p (XEXP (note
, 0), in
)
1605 /* Check that a former pseudo is valid; see find_dummy_reload. */
1606 && (ORIGINAL_REGNO (XEXP (note
, 0)) < FIRST_PSEUDO_REGISTER
1607 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
1608 ORIGINAL_REGNO (XEXP (note
, 0)))
1609 && hard_regno_nregs
[regno
][GET_MODE (XEXP (note
, 0))] == 1))
1610 && ! refers_to_regno_for_reload_p (regno
,
1611 end_hard_regno (rel_mode
,
1613 PATTERN (this_insn
), inloc
)
1614 && ! find_reg_fusage (this_insn
, USE
, XEXP (note
, 0))
1615 /* If this is also an output reload, IN cannot be used as
1616 the reload register if it is set in this insn unless IN
1618 && (out
== 0 || in
== out
1619 || ! hard_reg_set_here_p (regno
,
1620 end_hard_regno (rel_mode
, regno
),
1621 PATTERN (this_insn
)))
1622 /* ??? Why is this code so different from the previous?
1623 Is there any simple coherent way to describe the two together?
1624 What's going on here. */
1626 || (GET_CODE (in
) == SUBREG
1627 && (((GET_MODE_SIZE (GET_MODE (in
)) + (UNITS_PER_WORD
- 1))
1629 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
)))
1630 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))))
1631 /* Make sure the operand fits in the reg that dies. */
1632 && (GET_MODE_SIZE (rel_mode
)
1633 <= GET_MODE_SIZE (GET_MODE (XEXP (note
, 0))))
1634 && HARD_REGNO_MODE_OK (regno
, inmode
)
1635 && HARD_REGNO_MODE_OK (regno
, outmode
))
1638 unsigned int nregs
= MAX (hard_regno_nregs
[regno
][inmode
],
1639 hard_regno_nregs
[regno
][outmode
]);
1641 for (offs
= 0; offs
< nregs
; offs
++)
1642 if (fixed_regs
[regno
+ offs
]
1643 || ! TEST_HARD_REG_BIT (reg_class_contents
[(int) rclass
],
1648 && (! (refers_to_regno_for_reload_p
1649 (regno
, end_hard_regno (inmode
, regno
), in
, (rtx
*) 0))
1650 || can_reload_into (in
, regno
, inmode
)))
1652 rld
[i
].reg_rtx
= gen_rtx_REG (rel_mode
, regno
);
1659 output_reloadnum
= i
;
1664 /* Record an additional place we must replace a value
1665 for which we have already recorded a reload.
1666 RELOADNUM is the value returned by push_reload
1667 when the reload was recorded.
1668 This is used in insn patterns that use match_dup. */
1671 push_replacement (rtx
*loc
, int reloadnum
, machine_mode mode
)
1673 if (replace_reloads
)
1675 struct replacement
*r
= &replacements
[n_replacements
++];
1676 r
->what
= reloadnum
;
1682 /* Duplicate any replacement we have recorded to apply at
1683 location ORIG_LOC to also be performed at DUP_LOC.
1684 This is used in insn patterns that use match_dup. */
1687 dup_replacements (rtx
*dup_loc
, rtx
*orig_loc
)
1689 int i
, n
= n_replacements
;
1691 for (i
= 0; i
< n
; i
++)
1693 struct replacement
*r
= &replacements
[i
];
1694 if (r
->where
== orig_loc
)
1695 push_replacement (dup_loc
, r
->what
, r
->mode
);
1699 /* Transfer all replacements that used to be in reload FROM to be in
1703 transfer_replacements (int to
, int from
)
1707 for (i
= 0; i
< n_replacements
; i
++)
1708 if (replacements
[i
].what
== from
)
1709 replacements
[i
].what
= to
;
1712 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1713 or a subpart of it. If we have any replacements registered for IN_RTX,
1714 cancel the reloads that were supposed to load them.
1715 Return nonzero if we canceled any reloads. */
1717 remove_address_replacements (rtx in_rtx
)
1720 char reload_flags
[MAX_RELOADS
];
1721 int something_changed
= 0;
1723 memset (reload_flags
, 0, sizeof reload_flags
);
1724 for (i
= 0, j
= 0; i
< n_replacements
; i
++)
1726 if (loc_mentioned_in_p (replacements
[i
].where
, in_rtx
))
1727 reload_flags
[replacements
[i
].what
] |= 1;
1730 replacements
[j
++] = replacements
[i
];
1731 reload_flags
[replacements
[i
].what
] |= 2;
1734 /* Note that the following store must be done before the recursive calls. */
1737 for (i
= n_reloads
- 1; i
>= 0; i
--)
1739 if (reload_flags
[i
] == 1)
1741 deallocate_reload_reg (i
);
1742 remove_address_replacements (rld
[i
].in
);
1744 something_changed
= 1;
1747 return something_changed
;
1750 /* If there is only one output reload, and it is not for an earlyclobber
1751 operand, try to combine it with a (logically unrelated) input reload
1752 to reduce the number of reload registers needed.
1754 This is safe if the input reload does not appear in
1755 the value being output-reloaded, because this implies
1756 it is not needed any more once the original insn completes.
1758 If that doesn't work, see we can use any of the registers that
1759 die in this insn as a reload register. We can if it is of the right
1760 class and does not appear in the value being output-reloaded. */
1763 combine_reloads (void)
1766 int output_reload
= -1;
1767 int secondary_out
= -1;
1770 /* Find the output reload; return unless there is exactly one
1771 and that one is mandatory. */
1773 for (i
= 0; i
< n_reloads
; i
++)
1774 if (rld
[i
].out
!= 0)
1776 if (output_reload
>= 0)
1781 if (output_reload
< 0 || rld
[output_reload
].optional
)
1784 /* An input-output reload isn't combinable. */
1786 if (rld
[output_reload
].in
!= 0)
1789 /* If this reload is for an earlyclobber operand, we can't do anything. */
1790 if (earlyclobber_operand_p (rld
[output_reload
].out
))
1793 /* If there is a reload for part of the address of this operand, we would
1794 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1795 its life to the point where doing this combine would not lower the
1796 number of spill registers needed. */
1797 for (i
= 0; i
< n_reloads
; i
++)
1798 if ((rld
[i
].when_needed
== RELOAD_FOR_OUTPUT_ADDRESS
1799 || rld
[i
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
)
1800 && rld
[i
].opnum
== rld
[output_reload
].opnum
)
1803 /* Check each input reload; can we combine it? */
1805 for (i
= 0; i
< n_reloads
; i
++)
1806 if (rld
[i
].in
&& ! rld
[i
].optional
&& ! rld
[i
].nocombine
1807 /* Life span of this reload must not extend past main insn. */
1808 && rld
[i
].when_needed
!= RELOAD_FOR_OUTPUT_ADDRESS
1809 && rld
[i
].when_needed
!= RELOAD_FOR_OUTADDR_ADDRESS
1810 && rld
[i
].when_needed
!= RELOAD_OTHER
1811 && (ira_reg_class_max_nregs
[(int)rld
[i
].rclass
][(int) rld
[i
].inmode
]
1812 == ira_reg_class_max_nregs
[(int) rld
[output_reload
].rclass
]
1813 [(int) rld
[output_reload
].outmode
])
1815 && rld
[i
].reg_rtx
== 0
1816 #ifdef SECONDARY_MEMORY_NEEDED
1817 /* Don't combine two reloads with different secondary
1818 memory locations. */
1819 && (secondary_memlocs_elim
[(int) rld
[output_reload
].outmode
][rld
[i
].opnum
] == 0
1820 || secondary_memlocs_elim
[(int) rld
[output_reload
].outmode
][rld
[output_reload
].opnum
] == 0
1821 || rtx_equal_p (secondary_memlocs_elim
[(int) rld
[output_reload
].outmode
][rld
[i
].opnum
],
1822 secondary_memlocs_elim
[(int) rld
[output_reload
].outmode
][rld
[output_reload
].opnum
]))
1824 && (targetm
.small_register_classes_for_mode_p (VOIDmode
)
1825 ? (rld
[i
].rclass
== rld
[output_reload
].rclass
)
1826 : (reg_class_subset_p (rld
[i
].rclass
,
1827 rld
[output_reload
].rclass
)
1828 || reg_class_subset_p (rld
[output_reload
].rclass
,
1830 && (MATCHES (rld
[i
].in
, rld
[output_reload
].out
)
1831 /* Args reversed because the first arg seems to be
1832 the one that we imagine being modified
1833 while the second is the one that might be affected. */
1834 || (! reg_overlap_mentioned_for_reload_p (rld
[output_reload
].out
,
1836 /* However, if the input is a register that appears inside
1837 the output, then we also can't share.
1838 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1839 If the same reload reg is used for both reg 69 and the
1840 result to be stored in memory, then that result
1841 will clobber the address of the memory ref. */
1842 && ! (REG_P (rld
[i
].in
)
1843 && reg_overlap_mentioned_for_reload_p (rld
[i
].in
,
1844 rld
[output_reload
].out
))))
1845 && ! reload_inner_reg_of_subreg (rld
[i
].in
, rld
[i
].inmode
,
1846 rld
[i
].when_needed
!= RELOAD_FOR_INPUT
)
1847 && (reg_class_size
[(int) rld
[i
].rclass
]
1848 || targetm
.small_register_classes_for_mode_p (VOIDmode
))
1849 /* We will allow making things slightly worse by combining an
1850 input and an output, but no worse than that. */
1851 && (rld
[i
].when_needed
== RELOAD_FOR_INPUT
1852 || rld
[i
].when_needed
== RELOAD_FOR_OUTPUT
))
1856 /* We have found a reload to combine with! */
1857 rld
[i
].out
= rld
[output_reload
].out
;
1858 rld
[i
].out_reg
= rld
[output_reload
].out_reg
;
1859 rld
[i
].outmode
= rld
[output_reload
].outmode
;
1860 /* Mark the old output reload as inoperative. */
1861 rld
[output_reload
].out
= 0;
1862 /* The combined reload is needed for the entire insn. */
1863 rld
[i
].when_needed
= RELOAD_OTHER
;
1864 /* If the output reload had a secondary reload, copy it. */
1865 if (rld
[output_reload
].secondary_out_reload
!= -1)
1867 rld
[i
].secondary_out_reload
1868 = rld
[output_reload
].secondary_out_reload
;
1869 rld
[i
].secondary_out_icode
1870 = rld
[output_reload
].secondary_out_icode
;
1873 #ifdef SECONDARY_MEMORY_NEEDED
1874 /* Copy any secondary MEM. */
1875 if (secondary_memlocs_elim
[(int) rld
[output_reload
].outmode
][rld
[output_reload
].opnum
] != 0)
1876 secondary_memlocs_elim
[(int) rld
[output_reload
].outmode
][rld
[i
].opnum
]
1877 = secondary_memlocs_elim
[(int) rld
[output_reload
].outmode
][rld
[output_reload
].opnum
];
1879 /* If required, minimize the register class. */
1880 if (reg_class_subset_p (rld
[output_reload
].rclass
,
1882 rld
[i
].rclass
= rld
[output_reload
].rclass
;
1884 /* Transfer all replacements from the old reload to the combined. */
1885 for (j
= 0; j
< n_replacements
; j
++)
1886 if (replacements
[j
].what
== output_reload
)
1887 replacements
[j
].what
= i
;
1892 /* If this insn has only one operand that is modified or written (assumed
1893 to be the first), it must be the one corresponding to this reload. It
1894 is safe to use anything that dies in this insn for that output provided
1895 that it does not occur in the output (we already know it isn't an
1896 earlyclobber. If this is an asm insn, give up. */
1898 if (INSN_CODE (this_insn
) == -1)
1901 for (i
= 1; i
< insn_data
[INSN_CODE (this_insn
)].n_operands
; i
++)
1902 if (insn_data
[INSN_CODE (this_insn
)].operand
[i
].constraint
[0] == '='
1903 || insn_data
[INSN_CODE (this_insn
)].operand
[i
].constraint
[0] == '+')
1906 /* See if some hard register that dies in this insn and is not used in
1907 the output is the right class. Only works if the register we pick
1908 up can fully hold our output reload. */
1909 for (note
= REG_NOTES (this_insn
); note
; note
= XEXP (note
, 1))
1910 if (REG_NOTE_KIND (note
) == REG_DEAD
1911 && REG_P (XEXP (note
, 0))
1912 && !reg_overlap_mentioned_for_reload_p (XEXP (note
, 0),
1913 rld
[output_reload
].out
)
1914 && (regno
= REGNO (XEXP (note
, 0))) < FIRST_PSEUDO_REGISTER
1915 && HARD_REGNO_MODE_OK (regno
, rld
[output_reload
].outmode
)
1916 && TEST_HARD_REG_BIT (reg_class_contents
[(int) rld
[output_reload
].rclass
],
1918 && (hard_regno_nregs
[regno
][rld
[output_reload
].outmode
]
1919 <= hard_regno_nregs
[regno
][GET_MODE (XEXP (note
, 0))])
1920 /* Ensure that a secondary or tertiary reload for this output
1921 won't want this register. */
1922 && ((secondary_out
= rld
[output_reload
].secondary_out_reload
) == -1
1923 || (!(TEST_HARD_REG_BIT
1924 (reg_class_contents
[(int) rld
[secondary_out
].rclass
], regno
))
1925 && ((secondary_out
= rld
[secondary_out
].secondary_out_reload
) == -1
1926 || !(TEST_HARD_REG_BIT
1927 (reg_class_contents
[(int) rld
[secondary_out
].rclass
],
1929 && !fixed_regs
[regno
]
1930 /* Check that a former pseudo is valid; see find_dummy_reload. */
1931 && (ORIGINAL_REGNO (XEXP (note
, 0)) < FIRST_PSEUDO_REGISTER
1932 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
1933 ORIGINAL_REGNO (XEXP (note
, 0)))
1934 && hard_regno_nregs
[regno
][GET_MODE (XEXP (note
, 0))] == 1)))
1936 rld
[output_reload
].reg_rtx
1937 = gen_rtx_REG (rld
[output_reload
].outmode
, regno
);
1942 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1943 See if one of IN and OUT is a register that may be used;
1944 this is desirable since a spill-register won't be needed.
1945 If so, return the register rtx that proves acceptable.
1947 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1948 RCLASS is the register class required for the reload.
1950 If FOR_REAL is >= 0, it is the number of the reload,
1951 and in some cases when it can be discovered that OUT doesn't need
1952 to be computed, clear out rld[FOR_REAL].out.
1954 If FOR_REAL is -1, this should not be done, because this call
1955 is just to see if a register can be found, not to find and install it.
1957 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1958 puts an additional constraint on being able to use IN for OUT since
1959 IN must not appear elsewhere in the insn (it is assumed that IN itself
1960 is safe from the earlyclobber). */
1963 find_dummy_reload (rtx real_in
, rtx real_out
, rtx
*inloc
, rtx
*outloc
,
1964 machine_mode inmode
, machine_mode outmode
,
1965 reg_class_t rclass
, int for_real
, int earlyclobber
)
1973 /* If operands exceed a word, we can't use either of them
1974 unless they have the same size. */
1975 if (GET_MODE_SIZE (outmode
) != GET_MODE_SIZE (inmode
)
1976 && (GET_MODE_SIZE (outmode
) > UNITS_PER_WORD
1977 || GET_MODE_SIZE (inmode
) > UNITS_PER_WORD
))
1980 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1981 respectively refers to a hard register. */
1983 /* Find the inside of any subregs. */
1984 while (GET_CODE (out
) == SUBREG
)
1986 if (REG_P (SUBREG_REG (out
))
1987 && REGNO (SUBREG_REG (out
)) < FIRST_PSEUDO_REGISTER
)
1988 out_offset
+= subreg_regno_offset (REGNO (SUBREG_REG (out
)),
1989 GET_MODE (SUBREG_REG (out
)),
1992 out
= SUBREG_REG (out
);
1994 while (GET_CODE (in
) == SUBREG
)
1996 if (REG_P (SUBREG_REG (in
))
1997 && REGNO (SUBREG_REG (in
)) < FIRST_PSEUDO_REGISTER
)
1998 in_offset
+= subreg_regno_offset (REGNO (SUBREG_REG (in
)),
1999 GET_MODE (SUBREG_REG (in
)),
2002 in
= SUBREG_REG (in
);
2005 /* Narrow down the reg class, the same way push_reload will;
2006 otherwise we might find a dummy now, but push_reload won't. */
2008 reg_class_t preferred_class
= targetm
.preferred_reload_class (in
, rclass
);
2009 if (preferred_class
!= NO_REGS
)
2010 rclass
= (enum reg_class
) preferred_class
;
2013 /* See if OUT will do. */
2015 && REGNO (out
) < FIRST_PSEUDO_REGISTER
)
2017 unsigned int regno
= REGNO (out
) + out_offset
;
2018 unsigned int nwords
= hard_regno_nregs
[regno
][outmode
];
2021 /* When we consider whether the insn uses OUT,
2022 ignore references within IN. They don't prevent us
2023 from copying IN into OUT, because those refs would
2024 move into the insn that reloads IN.
2026 However, we only ignore IN in its role as this reload.
2027 If the insn uses IN elsewhere and it contains OUT,
2028 that counts. We can't be sure it's the "same" operand
2029 so it might not go through this reload.
2031 We also need to avoid using OUT if it, or part of it, is a
2032 fixed register. Modifying such registers, even transiently,
2033 may have undefined effects on the machine, such as modifying
2034 the stack pointer. */
2036 *inloc
= const0_rtx
;
2038 if (regno
< FIRST_PSEUDO_REGISTER
2039 && HARD_REGNO_MODE_OK (regno
, outmode
)
2040 && ! refers_to_regno_for_reload_p (regno
, regno
+ nwords
,
2041 PATTERN (this_insn
), outloc
))
2045 for (i
= 0; i
< nwords
; i
++)
2046 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) rclass
],
2048 || fixed_regs
[regno
+ i
])
2053 if (REG_P (real_out
))
2056 value
= gen_rtx_REG (outmode
, regno
);
2063 /* Consider using IN if OUT was not acceptable
2064 or if OUT dies in this insn (like the quotient in a divmod insn).
2065 We can't use IN unless it is dies in this insn,
2066 which means we must know accurately which hard regs are live.
2067 Also, the result can't go in IN if IN is used within OUT,
2068 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2069 if (hard_regs_live_known
2071 && REGNO (in
) < FIRST_PSEUDO_REGISTER
2073 || find_reg_note (this_insn
, REG_UNUSED
, real_out
))
2074 && find_reg_note (this_insn
, REG_DEAD
, real_in
)
2075 && !fixed_regs
[REGNO (in
)]
2076 && HARD_REGNO_MODE_OK (REGNO (in
),
2077 /* The only case where out and real_out might
2078 have different modes is where real_out
2079 is a subreg, and in that case, out
2081 (GET_MODE (out
) != VOIDmode
2082 ? GET_MODE (out
) : outmode
))
2083 && (ORIGINAL_REGNO (in
) < FIRST_PSEUDO_REGISTER
2084 /* However only do this if we can be sure that this input
2085 operand doesn't correspond with an uninitialized pseudo.
2086 global can assign some hardreg to it that is the same as
2087 the one assigned to a different, also live pseudo (as it
2088 can ignore the conflict). We must never introduce writes
2089 to such hardregs, as they would clobber the other live
2090 pseudo. See PR 20973. */
2091 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
2092 ORIGINAL_REGNO (in
))
2093 /* Similarly, only do this if we can be sure that the death
2094 note is still valid. global can assign some hardreg to
2095 the pseudo referenced in the note and simultaneously a
2096 subword of this hardreg to a different, also live pseudo,
2097 because only another subword of the hardreg is actually
2098 used in the insn. This cannot happen if the pseudo has
2099 been assigned exactly one hardreg. See PR 33732. */
2100 && hard_regno_nregs
[REGNO (in
)][GET_MODE (in
)] == 1)))
2102 unsigned int regno
= REGNO (in
) + in_offset
;
2103 unsigned int nwords
= hard_regno_nregs
[regno
][inmode
];
2105 if (! refers_to_regno_for_reload_p (regno
, regno
+ nwords
, out
, (rtx
*) 0)
2106 && ! hard_reg_set_here_p (regno
, regno
+ nwords
,
2107 PATTERN (this_insn
))
2109 || ! refers_to_regno_for_reload_p (regno
, regno
+ nwords
,
2110 PATTERN (this_insn
), inloc
)))
2114 for (i
= 0; i
< nwords
; i
++)
2115 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) rclass
],
2121 /* If we were going to use OUT as the reload reg
2122 and changed our mind, it means OUT is a dummy that
2123 dies here. So don't bother copying value to it. */
2124 if (for_real
>= 0 && value
== real_out
)
2125 rld
[for_real
].out
= 0;
2126 if (REG_P (real_in
))
2129 value
= gen_rtx_REG (inmode
, regno
);
2137 /* This page contains subroutines used mainly for determining
2138 whether the IN or an OUT of a reload can serve as the
2141 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2144 earlyclobber_operand_p (rtx x
)
2148 for (i
= 0; i
< n_earlyclobbers
; i
++)
2149 if (reload_earlyclobbers
[i
] == x
)
2155 /* Return 1 if expression X alters a hard reg in the range
2156 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2157 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2158 X should be the body of an instruction. */
2161 hard_reg_set_here_p (unsigned int beg_regno
, unsigned int end_regno
, rtx x
)
2163 if (GET_CODE (x
) == SET
|| GET_CODE (x
) == CLOBBER
)
2165 rtx op0
= SET_DEST (x
);
2167 while (GET_CODE (op0
) == SUBREG
)
2168 op0
= SUBREG_REG (op0
);
2171 unsigned int r
= REGNO (op0
);
2173 /* See if this reg overlaps range under consideration. */
2175 && end_hard_regno (GET_MODE (op0
), r
) > beg_regno
)
2179 else if (GET_CODE (x
) == PARALLEL
)
2181 int i
= XVECLEN (x
, 0) - 1;
2184 if (hard_reg_set_here_p (beg_regno
, end_regno
, XVECEXP (x
, 0, i
)))
2191 /* Return 1 if ADDR is a valid memory address for mode MODE
2192 in address space AS, and check that each pseudo reg has the
2193 proper kind of hard reg. */
2196 strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED
,
2197 rtx addr
, addr_space_t as
)
2199 #ifdef GO_IF_LEGITIMATE_ADDRESS
2200 gcc_assert (ADDR_SPACE_GENERIC_P (as
));
2201 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
2207 return targetm
.addr_space
.legitimate_address_p (mode
, addr
, 1, as
);
2211 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2212 if they are the same hard reg, and has special hacks for
2213 autoincrement and autodecrement.
2214 This is specifically intended for find_reloads to use
2215 in determining whether two operands match.
2216 X is the operand whose number is the lower of the two.
2218 The value is 2 if Y contains a pre-increment that matches
2219 a non-incrementing address in X. */
2221 /* ??? To be completely correct, we should arrange to pass
2222 for X the output operand and for Y the input operand.
2223 For now, we assume that the output operand has the lower number
2224 because that is natural in (SET output (... input ...)). */
2227 operands_match_p (rtx x
, rtx y
)
2230 RTX_CODE code
= GET_CODE (x
);
2236 if ((code
== REG
|| (code
== SUBREG
&& REG_P (SUBREG_REG (x
))))
2237 && (REG_P (y
) || (GET_CODE (y
) == SUBREG
2238 && REG_P (SUBREG_REG (y
)))))
2244 i
= REGNO (SUBREG_REG (x
));
2245 if (i
>= FIRST_PSEUDO_REGISTER
)
2247 i
+= subreg_regno_offset (REGNO (SUBREG_REG (x
)),
2248 GET_MODE (SUBREG_REG (x
)),
2255 if (GET_CODE (y
) == SUBREG
)
2257 j
= REGNO (SUBREG_REG (y
));
2258 if (j
>= FIRST_PSEUDO_REGISTER
)
2260 j
+= subreg_regno_offset (REGNO (SUBREG_REG (y
)),
2261 GET_MODE (SUBREG_REG (y
)),
2268 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2269 multiple hard register group of scalar integer registers, so that
2270 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2272 if (REG_WORDS_BIG_ENDIAN
&& GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
2273 && SCALAR_INT_MODE_P (GET_MODE (x
))
2274 && i
< FIRST_PSEUDO_REGISTER
)
2275 i
+= hard_regno_nregs
[i
][GET_MODE (x
)] - 1;
2276 if (REG_WORDS_BIG_ENDIAN
&& GET_MODE_SIZE (GET_MODE (y
)) > UNITS_PER_WORD
2277 && SCALAR_INT_MODE_P (GET_MODE (y
))
2278 && j
< FIRST_PSEUDO_REGISTER
)
2279 j
+= hard_regno_nregs
[j
][GET_MODE (y
)] - 1;
2283 /* If two operands must match, because they are really a single
2284 operand of an assembler insn, then two postincrements are invalid
2285 because the assembler insn would increment only once.
2286 On the other hand, a postincrement matches ordinary indexing
2287 if the postincrement is the output operand. */
2288 if (code
== POST_DEC
|| code
== POST_INC
|| code
== POST_MODIFY
)
2289 return operands_match_p (XEXP (x
, 0), y
);
2290 /* Two preincrements are invalid
2291 because the assembler insn would increment only once.
2292 On the other hand, a preincrement matches ordinary indexing
2293 if the preincrement is the input operand.
2294 In this case, return 2, since some callers need to do special
2295 things when this happens. */
2296 if (GET_CODE (y
) == PRE_DEC
|| GET_CODE (y
) == PRE_INC
2297 || GET_CODE (y
) == PRE_MODIFY
)
2298 return operands_match_p (x
, XEXP (y
, 0)) ? 2 : 0;
2302 /* Now we have disposed of all the cases in which different rtx codes
2304 if (code
!= GET_CODE (y
))
2307 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2308 if (GET_MODE (x
) != GET_MODE (y
))
2311 /* MEMs referring to different address space are not equivalent. */
2312 if (code
== MEM
&& MEM_ADDR_SPACE (x
) != MEM_ADDR_SPACE (y
))
2321 return LABEL_REF_LABEL (x
) == LABEL_REF_LABEL (y
);
2323 return XSTR (x
, 0) == XSTR (y
, 0);
2329 /* Compare the elements. If any pair of corresponding elements
2330 fail to match, return 0 for the whole things. */
2333 fmt
= GET_RTX_FORMAT (code
);
2334 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2340 if (XWINT (x
, i
) != XWINT (y
, i
))
2345 if (XINT (x
, i
) != XINT (y
, i
))
2350 val
= operands_match_p (XEXP (x
, i
), XEXP (y
, i
));
2353 /* If any subexpression returns 2,
2354 we should return 2 if we are successful. */
2363 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
2365 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; --j
)
2367 val
= operands_match_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
));
2375 /* It is believed that rtx's at this level will never
2376 contain anything but integers and other rtx's,
2377 except for within LABEL_REFs and SYMBOL_REFs. */
2382 return 1 + success_2
;
2385 /* Describe the range of registers or memory referenced by X.
2386 If X is a register, set REG_FLAG and put the first register
2387 number into START and the last plus one into END.
2388 If X is a memory reference, put a base address into BASE
2389 and a range of integer offsets into START and END.
2390 If X is pushing on the stack, we can assume it causes no trouble,
2391 so we set the SAFE field. */
2393 static struct decomposition
2396 struct decomposition val
;
2399 memset (&val
, 0, sizeof (val
));
2401 switch (GET_CODE (x
))
2405 rtx base
= NULL_RTX
, offset
= 0;
2406 rtx addr
= XEXP (x
, 0);
2408 if (GET_CODE (addr
) == PRE_DEC
|| GET_CODE (addr
) == PRE_INC
2409 || GET_CODE (addr
) == POST_DEC
|| GET_CODE (addr
) == POST_INC
)
2411 val
.base
= XEXP (addr
, 0);
2412 val
.start
= -GET_MODE_SIZE (GET_MODE (x
));
2413 val
.end
= GET_MODE_SIZE (GET_MODE (x
));
2414 val
.safe
= REGNO (val
.base
) == STACK_POINTER_REGNUM
;
2418 if (GET_CODE (addr
) == PRE_MODIFY
|| GET_CODE (addr
) == POST_MODIFY
)
2420 if (GET_CODE (XEXP (addr
, 1)) == PLUS
2421 && XEXP (addr
, 0) == XEXP (XEXP (addr
, 1), 0)
2422 && CONSTANT_P (XEXP (XEXP (addr
, 1), 1)))
2424 val
.base
= XEXP (addr
, 0);
2425 val
.start
= -INTVAL (XEXP (XEXP (addr
, 1), 1));
2426 val
.end
= INTVAL (XEXP (XEXP (addr
, 1), 1));
2427 val
.safe
= REGNO (val
.base
) == STACK_POINTER_REGNUM
;
2432 if (GET_CODE (addr
) == CONST
)
2434 addr
= XEXP (addr
, 0);
2437 if (GET_CODE (addr
) == PLUS
)
2439 if (CONSTANT_P (XEXP (addr
, 0)))
2441 base
= XEXP (addr
, 1);
2442 offset
= XEXP (addr
, 0);
2444 else if (CONSTANT_P (XEXP (addr
, 1)))
2446 base
= XEXP (addr
, 0);
2447 offset
= XEXP (addr
, 1);
2454 offset
= const0_rtx
;
2456 if (GET_CODE (offset
) == CONST
)
2457 offset
= XEXP (offset
, 0);
2458 if (GET_CODE (offset
) == PLUS
)
2460 if (CONST_INT_P (XEXP (offset
, 0)))
2462 base
= gen_rtx_PLUS (GET_MODE (base
), base
, XEXP (offset
, 1));
2463 offset
= XEXP (offset
, 0);
2465 else if (CONST_INT_P (XEXP (offset
, 1)))
2467 base
= gen_rtx_PLUS (GET_MODE (base
), base
, XEXP (offset
, 0));
2468 offset
= XEXP (offset
, 1);
2472 base
= gen_rtx_PLUS (GET_MODE (base
), base
, offset
);
2473 offset
= const0_rtx
;
2476 else if (!CONST_INT_P (offset
))
2478 base
= gen_rtx_PLUS (GET_MODE (base
), base
, offset
);
2479 offset
= const0_rtx
;
2482 if (all_const
&& GET_CODE (base
) == PLUS
)
2483 base
= gen_rtx_CONST (GET_MODE (base
), base
);
2485 gcc_assert (CONST_INT_P (offset
));
2487 val
.start
= INTVAL (offset
);
2488 val
.end
= val
.start
+ GET_MODE_SIZE (GET_MODE (x
));
2495 val
.start
= true_regnum (x
);
2496 if (val
.start
< 0 || val
.start
>= FIRST_PSEUDO_REGISTER
)
2498 /* A pseudo with no hard reg. */
2499 val
.start
= REGNO (x
);
2500 val
.end
= val
.start
+ 1;
2504 val
.end
= end_hard_regno (GET_MODE (x
), val
.start
);
2508 if (!REG_P (SUBREG_REG (x
)))
2509 /* This could be more precise, but it's good enough. */
2510 return decompose (SUBREG_REG (x
));
2512 val
.start
= true_regnum (x
);
2513 if (val
.start
< 0 || val
.start
>= FIRST_PSEUDO_REGISTER
)
2514 return decompose (SUBREG_REG (x
));
2517 val
.end
= val
.start
+ subreg_nregs (x
);
2521 /* This hasn't been assigned yet, so it can't conflict yet. */
2526 gcc_assert (CONSTANT_P (x
));
2533 /* Return 1 if altering Y will not modify the value of X.
2534 Y is also described by YDATA, which should be decompose (Y). */
2537 immune_p (rtx x
, rtx y
, struct decomposition ydata
)
2539 struct decomposition xdata
;
2542 return !refers_to_regno_for_reload_p (ydata
.start
, ydata
.end
, x
, (rtx
*) 0);
2546 gcc_assert (MEM_P (y
));
2547 /* If Y is memory and X is not, Y can't affect X. */
2551 xdata
= decompose (x
);
2553 if (! rtx_equal_p (xdata
.base
, ydata
.base
))
2555 /* If bases are distinct symbolic constants, there is no overlap. */
2556 if (CONSTANT_P (xdata
.base
) && CONSTANT_P (ydata
.base
))
2558 /* Constants and stack slots never overlap. */
2559 if (CONSTANT_P (xdata
.base
)
2560 && (ydata
.base
== frame_pointer_rtx
2561 || ydata
.base
== hard_frame_pointer_rtx
2562 || ydata
.base
== stack_pointer_rtx
))
2564 if (CONSTANT_P (ydata
.base
)
2565 && (xdata
.base
== frame_pointer_rtx
2566 || xdata
.base
== hard_frame_pointer_rtx
2567 || xdata
.base
== stack_pointer_rtx
))
2569 /* If either base is variable, we don't know anything. */
2573 return (xdata
.start
>= ydata
.end
|| ydata
.start
>= xdata
.end
);
2576 /* Similar, but calls decompose. */
2579 safe_from_earlyclobber (rtx op
, rtx clobber
)
2581 struct decomposition early_data
;
2583 early_data
= decompose (clobber
);
2584 return immune_p (op
, clobber
, early_data
);
2587 /* Main entry point of this file: search the body of INSN
2588 for values that need reloading and record them with push_reload.
2589 REPLACE nonzero means record also where the values occur
2590 so that subst_reloads can be used.
2592 IND_LEVELS says how many levels of indirection are supported by this
2593 machine; a value of zero means that a memory reference is not a valid
2596 LIVE_KNOWN says we have valid information about which hard
2597 regs are live at each point in the program; this is true when
2598 we are called from global_alloc but false when stupid register
2599 allocation has been done.
2601 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2602 which is nonnegative if the reg has been commandeered for reloading into.
2603 It is copied into STATIC_RELOAD_REG_P and referenced from there
2604 by various subroutines.
2606 Return TRUE if some operands need to be changed, because of swapping
2607 commutative operands, reg_equiv_address substitution, or whatever. */
2610 find_reloads (rtx_insn
*insn
, int replace
, int ind_levels
, int live_known
,
2611 short *reload_reg_p
)
2613 int insn_code_number
;
2616 /* These start out as the constraints for the insn
2617 and they are chewed up as we consider alternatives. */
2618 const char *constraints
[MAX_RECOG_OPERANDS
];
2619 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2621 enum reg_class preferred_class
[MAX_RECOG_OPERANDS
];
2622 char pref_or_nothing
[MAX_RECOG_OPERANDS
];
2623 /* Nonzero for a MEM operand whose entire address needs a reload.
2624 May be -1 to indicate the entire address may or may not need a reload. */
2625 int address_reloaded
[MAX_RECOG_OPERANDS
];
2626 /* Nonzero for an address operand that needs to be completely reloaded.
2627 May be -1 to indicate the entire operand may or may not need a reload. */
2628 int address_operand_reloaded
[MAX_RECOG_OPERANDS
];
2629 /* Value of enum reload_type to use for operand. */
2630 enum reload_type operand_type
[MAX_RECOG_OPERANDS
];
2631 /* Value of enum reload_type to use within address of operand. */
2632 enum reload_type address_type
[MAX_RECOG_OPERANDS
];
2633 /* Save the usage of each operand. */
2634 enum reload_usage
{ RELOAD_READ
, RELOAD_READ_WRITE
, RELOAD_WRITE
} modified
[MAX_RECOG_OPERANDS
];
2635 int no_input_reloads
= 0, no_output_reloads
= 0;
2637 reg_class_t this_alternative
[MAX_RECOG_OPERANDS
];
2638 char this_alternative_match_win
[MAX_RECOG_OPERANDS
];
2639 char this_alternative_win
[MAX_RECOG_OPERANDS
];
2640 char this_alternative_offmemok
[MAX_RECOG_OPERANDS
];
2641 char this_alternative_earlyclobber
[MAX_RECOG_OPERANDS
];
2642 int this_alternative_matches
[MAX_RECOG_OPERANDS
];
2643 reg_class_t goal_alternative
[MAX_RECOG_OPERANDS
];
2644 int this_alternative_number
;
2645 int goal_alternative_number
= 0;
2646 int operand_reloadnum
[MAX_RECOG_OPERANDS
];
2647 int goal_alternative_matches
[MAX_RECOG_OPERANDS
];
2648 int goal_alternative_matched
[MAX_RECOG_OPERANDS
];
2649 char goal_alternative_match_win
[MAX_RECOG_OPERANDS
];
2650 char goal_alternative_win
[MAX_RECOG_OPERANDS
];
2651 char goal_alternative_offmemok
[MAX_RECOG_OPERANDS
];
2652 char goal_alternative_earlyclobber
[MAX_RECOG_OPERANDS
];
2653 int goal_alternative_swapped
;
2656 char operands_match
[MAX_RECOG_OPERANDS
][MAX_RECOG_OPERANDS
];
2657 rtx substed_operand
[MAX_RECOG_OPERANDS
];
2658 rtx body
= PATTERN (insn
);
2659 rtx set
= single_set (insn
);
2660 int goal_earlyclobber
= 0, this_earlyclobber
;
2661 machine_mode operand_mode
[MAX_RECOG_OPERANDS
];
2667 n_earlyclobbers
= 0;
2668 replace_reloads
= replace
;
2669 hard_regs_live_known
= live_known
;
2670 static_reload_reg_p
= reload_reg_p
;
2672 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2673 neither are insns that SET cc0. Insns that use CC0 are not allowed
2674 to have any input reloads. */
2675 if (JUMP_P (insn
) || CALL_P (insn
))
2676 no_output_reloads
= 1;
2678 if (HAVE_cc0
&& reg_referenced_p (cc0_rtx
, PATTERN (insn
)))
2679 no_input_reloads
= 1;
2680 if (HAVE_cc0
&& reg_set_p (cc0_rtx
, PATTERN (insn
)))
2681 no_output_reloads
= 1;
2683 #ifdef SECONDARY_MEMORY_NEEDED
2684 /* The eliminated forms of any secondary memory locations are per-insn, so
2685 clear them out here. */
2687 if (secondary_memlocs_elim_used
)
2689 memset (secondary_memlocs_elim
, 0,
2690 sizeof (secondary_memlocs_elim
[0]) * secondary_memlocs_elim_used
);
2691 secondary_memlocs_elim_used
= 0;
2695 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2696 is cheap to move between them. If it is not, there may not be an insn
2697 to do the copy, so we may need a reload. */
2698 if (GET_CODE (body
) == SET
2699 && REG_P (SET_DEST (body
))
2700 && REGNO (SET_DEST (body
)) < FIRST_PSEUDO_REGISTER
2701 && REG_P (SET_SRC (body
))
2702 && REGNO (SET_SRC (body
)) < FIRST_PSEUDO_REGISTER
2703 && register_move_cost (GET_MODE (SET_SRC (body
)),
2704 REGNO_REG_CLASS (REGNO (SET_SRC (body
))),
2705 REGNO_REG_CLASS (REGNO (SET_DEST (body
)))) == 2)
2708 extract_insn (insn
);
2710 noperands
= reload_n_operands
= recog_data
.n_operands
;
2711 n_alternatives
= recog_data
.n_alternatives
;
2713 /* Just return "no reloads" if insn has no operands with constraints. */
2714 if (noperands
== 0 || n_alternatives
== 0)
2717 insn_code_number
= INSN_CODE (insn
);
2718 this_insn_is_asm
= insn_code_number
< 0;
2720 memcpy (operand_mode
, recog_data
.operand_mode
,
2721 noperands
* sizeof (machine_mode
));
2722 memcpy (constraints
, recog_data
.constraints
,
2723 noperands
* sizeof (const char *));
2727 /* If we will need to know, later, whether some pair of operands
2728 are the same, we must compare them now and save the result.
2729 Reloading the base and index registers will clobber them
2730 and afterward they will fail to match. */
2732 for (i
= 0; i
< noperands
; i
++)
2738 substed_operand
[i
] = recog_data
.operand
[i
];
2741 modified
[i
] = RELOAD_READ
;
2743 /* Scan this operand's constraint to see if it is an output operand,
2744 an in-out operand, is commutative, or should match another. */
2748 p
+= CONSTRAINT_LEN (c
, p
);
2752 modified
[i
] = RELOAD_WRITE
;
2755 modified
[i
] = RELOAD_READ_WRITE
;
2759 /* The last operand should not be marked commutative. */
2760 gcc_assert (i
!= noperands
- 1);
2762 /* We currently only support one commutative pair of
2763 operands. Some existing asm code currently uses more
2764 than one pair. Previously, that would usually work,
2765 but sometimes it would crash the compiler. We
2766 continue supporting that case as well as we can by
2767 silently ignoring all but the first pair. In the
2768 future we may handle it correctly. */
2769 if (commutative
< 0)
2772 gcc_assert (this_insn_is_asm
);
2775 /* Use of ISDIGIT is tempting here, but it may get expensive because
2776 of locale support we don't want. */
2777 case '0': case '1': case '2': case '3': case '4':
2778 case '5': case '6': case '7': case '8': case '9':
2780 c
= strtoul (p
- 1, &end
, 10);
2783 operands_match
[c
][i
]
2784 = operands_match_p (recog_data
.operand
[c
],
2785 recog_data
.operand
[i
]);
2787 /* An operand may not match itself. */
2788 gcc_assert (c
!= i
);
2790 /* If C can be commuted with C+1, and C might need to match I,
2791 then C+1 might also need to match I. */
2792 if (commutative
>= 0)
2794 if (c
== commutative
|| c
== commutative
+ 1)
2796 int other
= c
+ (c
== commutative
? 1 : -1);
2797 operands_match
[other
][i
]
2798 = operands_match_p (recog_data
.operand
[other
],
2799 recog_data
.operand
[i
]);
2801 if (i
== commutative
|| i
== commutative
+ 1)
2803 int other
= i
+ (i
== commutative
? 1 : -1);
2804 operands_match
[c
][other
]
2805 = operands_match_p (recog_data
.operand
[c
],
2806 recog_data
.operand
[other
]);
2808 /* Note that C is supposed to be less than I.
2809 No need to consider altering both C and I because in
2810 that case we would alter one into the other. */
2817 /* Examine each operand that is a memory reference or memory address
2818 and reload parts of the addresses into index registers.
2819 Also here any references to pseudo regs that didn't get hard regs
2820 but are equivalent to constants get replaced in the insn itself
2821 with those constants. Nobody will ever see them again.
2823 Finally, set up the preferred classes of each operand. */
2825 for (i
= 0; i
< noperands
; i
++)
2827 RTX_CODE code
= GET_CODE (recog_data
.operand
[i
]);
2829 address_reloaded
[i
] = 0;
2830 address_operand_reloaded
[i
] = 0;
2831 operand_type
[i
] = (modified
[i
] == RELOAD_READ
? RELOAD_FOR_INPUT
2832 : modified
[i
] == RELOAD_WRITE
? RELOAD_FOR_OUTPUT
2835 = (modified
[i
] == RELOAD_READ
? RELOAD_FOR_INPUT_ADDRESS
2836 : modified
[i
] == RELOAD_WRITE
? RELOAD_FOR_OUTPUT_ADDRESS
2839 if (*constraints
[i
] == 0)
2840 /* Ignore things like match_operator operands. */
2842 else if (insn_extra_address_constraint
2843 (lookup_constraint (constraints
[i
])))
2845 address_operand_reloaded
[i
]
2846 = find_reloads_address (recog_data
.operand_mode
[i
], (rtx
*) 0,
2847 recog_data
.operand
[i
],
2848 recog_data
.operand_loc
[i
],
2849 i
, operand_type
[i
], ind_levels
, insn
);
2851 /* If we now have a simple operand where we used to have a
2852 PLUS or MULT, re-recognize and try again. */
2853 if ((OBJECT_P (*recog_data
.operand_loc
[i
])
2854 || GET_CODE (*recog_data
.operand_loc
[i
]) == SUBREG
)
2855 && (GET_CODE (recog_data
.operand
[i
]) == MULT
2856 || GET_CODE (recog_data
.operand
[i
]) == PLUS
))
2858 INSN_CODE (insn
) = -1;
2859 retval
= find_reloads (insn
, replace
, ind_levels
, live_known
,
2864 recog_data
.operand
[i
] = *recog_data
.operand_loc
[i
];
2865 substed_operand
[i
] = recog_data
.operand
[i
];
2867 /* Address operands are reloaded in their existing mode,
2868 no matter what is specified in the machine description. */
2869 operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2871 /* If the address is a single CONST_INT pick address mode
2872 instead otherwise we will later not know in which mode
2873 the reload should be performed. */
2874 if (operand_mode
[i
] == VOIDmode
)
2875 operand_mode
[i
] = Pmode
;
2878 else if (code
== MEM
)
2881 = find_reloads_address (GET_MODE (recog_data
.operand
[i
]),
2882 recog_data
.operand_loc
[i
],
2883 XEXP (recog_data
.operand
[i
], 0),
2884 &XEXP (recog_data
.operand
[i
], 0),
2885 i
, address_type
[i
], ind_levels
, insn
);
2886 recog_data
.operand
[i
] = *recog_data
.operand_loc
[i
];
2887 substed_operand
[i
] = recog_data
.operand
[i
];
2889 else if (code
== SUBREG
)
2891 rtx reg
= SUBREG_REG (recog_data
.operand
[i
]);
2893 = find_reloads_toplev (recog_data
.operand
[i
], i
, address_type
[i
],
2896 && &SET_DEST (set
) == recog_data
.operand_loc
[i
],
2898 &address_reloaded
[i
]);
2900 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2901 that didn't get a hard register, emit a USE with a REG_EQUAL
2902 note in front so that we might inherit a previous, possibly
2908 && (GET_MODE_SIZE (GET_MODE (reg
))
2909 >= GET_MODE_SIZE (GET_MODE (op
)))
2910 && reg_equiv_constant (REGNO (reg
)) == 0)
2911 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode
, reg
),
2913 REG_EQUAL
, reg_equiv_memory_loc (REGNO (reg
)));
2915 substed_operand
[i
] = recog_data
.operand
[i
] = op
;
2917 else if (code
== PLUS
|| GET_RTX_CLASS (code
) == RTX_UNARY
)
2918 /* We can get a PLUS as an "operand" as a result of register
2919 elimination. See eliminate_regs and gen_reload. We handle
2920 a unary operator by reloading the operand. */
2921 substed_operand
[i
] = recog_data
.operand
[i
]
2922 = find_reloads_toplev (recog_data
.operand
[i
], i
, address_type
[i
],
2923 ind_levels
, 0, insn
,
2924 &address_reloaded
[i
]);
2925 else if (code
== REG
)
2927 /* This is equivalent to calling find_reloads_toplev.
2928 The code is duplicated for speed.
2929 When we find a pseudo always equivalent to a constant,
2930 we replace it by the constant. We must be sure, however,
2931 that we don't try to replace it in the insn in which it
2933 int regno
= REGNO (recog_data
.operand
[i
]);
2934 if (reg_equiv_constant (regno
) != 0
2935 && (set
== 0 || &SET_DEST (set
) != recog_data
.operand_loc
[i
]))
2937 /* Record the existing mode so that the check if constants are
2938 allowed will work when operand_mode isn't specified. */
2940 if (operand_mode
[i
] == VOIDmode
)
2941 operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2943 substed_operand
[i
] = recog_data
.operand
[i
]
2944 = reg_equiv_constant (regno
);
2946 if (reg_equiv_memory_loc (regno
) != 0
2947 && (reg_equiv_address (regno
) != 0 || num_not_at_initial_offset
))
2948 /* We need not give a valid is_set_dest argument since the case
2949 of a constant equivalence was checked above. */
2950 substed_operand
[i
] = recog_data
.operand
[i
]
2951 = find_reloads_toplev (recog_data
.operand
[i
], i
, address_type
[i
],
2952 ind_levels
, 0, insn
,
2953 &address_reloaded
[i
]);
2955 /* If the operand is still a register (we didn't replace it with an
2956 equivalent), get the preferred class to reload it into. */
2957 code
= GET_CODE (recog_data
.operand
[i
]);
2959 = ((code
== REG
&& REGNO (recog_data
.operand
[i
])
2960 >= FIRST_PSEUDO_REGISTER
)
2961 ? reg_preferred_class (REGNO (recog_data
.operand
[i
]))
2965 && REGNO (recog_data
.operand
[i
]) >= FIRST_PSEUDO_REGISTER
2966 && reg_alternate_class (REGNO (recog_data
.operand
[i
])) == NO_REGS
);
2969 /* If this is simply a copy from operand 1 to operand 0, merge the
2970 preferred classes for the operands. */
2971 if (set
!= 0 && noperands
>= 2 && recog_data
.operand
[0] == SET_DEST (set
)
2972 && recog_data
.operand
[1] == SET_SRC (set
))
2974 preferred_class
[0] = preferred_class
[1]
2975 = reg_class_subunion
[(int) preferred_class
[0]][(int) preferred_class
[1]];
2976 pref_or_nothing
[0] |= pref_or_nothing
[1];
2977 pref_or_nothing
[1] |= pref_or_nothing
[0];
2980 /* Now see what we need for pseudo-regs that didn't get hard regs
2981 or got the wrong kind of hard reg. For this, we must consider
2982 all the operands together against the register constraints. */
2984 best
= MAX_RECOG_OPERANDS
* 2 + 600;
2986 goal_alternative_swapped
= 0;
2988 /* The constraints are made of several alternatives.
2989 Each operand's constraint looks like foo,bar,... with commas
2990 separating the alternatives. The first alternatives for all
2991 operands go together, the second alternatives go together, etc.
2993 First loop over alternatives. */
2995 alternative_mask enabled
= get_enabled_alternatives (insn
);
2996 for (this_alternative_number
= 0;
2997 this_alternative_number
< n_alternatives
;
2998 this_alternative_number
++)
3002 if (!TEST_BIT (enabled
, this_alternative_number
))
3006 for (i
= 0; i
< recog_data
.n_operands
; i
++)
3007 constraints
[i
] = skip_alternative (constraints
[i
]);
3012 /* If insn is commutative (it's safe to exchange a certain pair
3013 of operands) then we need to try each alternative twice, the
3014 second time matching those two operands as if we had
3015 exchanged them. To do this, really exchange them in
3017 for (swapped
= 0; swapped
< (commutative
>= 0 ? 2 : 1); swapped
++)
3019 /* Loop over operands for one constraint alternative. */
3020 /* LOSERS counts those that don't fit this alternative
3021 and would require loading. */
3023 /* BAD is set to 1 if it some operand can't fit this alternative
3024 even after reloading. */
3026 /* REJECT is a count of how undesirable this alternative says it is
3027 if any reloading is required. If the alternative matches exactly
3028 then REJECT is ignored, but otherwise it gets this much
3029 counted against it in addition to the reloading needed. Each
3030 ? counts three times here since we want the disparaging caused by
3031 a bad register class to only count 1/3 as much. */
3036 recog_data
.operand
[commutative
] = substed_operand
[commutative
+ 1];
3037 recog_data
.operand
[commutative
+ 1] = substed_operand
[commutative
];
3038 /* Swap the duplicates too. */
3039 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3040 if (recog_data
.dup_num
[i
] == commutative
3041 || recog_data
.dup_num
[i
] == commutative
+ 1)
3042 *recog_data
.dup_loc
[i
]
3043 = recog_data
.operand
[(int) recog_data
.dup_num
[i
]];
3045 std::swap (preferred_class
[commutative
],
3046 preferred_class
[commutative
+ 1]);
3047 std::swap (pref_or_nothing
[commutative
],
3048 pref_or_nothing
[commutative
+ 1]);
3049 std::swap (address_reloaded
[commutative
],
3050 address_reloaded
[commutative
+ 1]);
3053 this_earlyclobber
= 0;
3055 for (i
= 0; i
< noperands
; i
++)
3057 const char *p
= constraints
[i
];
3062 /* 0 => this operand can be reloaded somehow for this alternative. */
3064 /* 0 => this operand can be reloaded if the alternative allows regs. */
3068 rtx operand
= recog_data
.operand
[i
];
3070 /* Nonzero means this is a MEM that must be reloaded into a reg
3071 regardless of what the constraint says. */
3072 int force_reload
= 0;
3074 /* Nonzero if a constant forced into memory would be OK for this
3077 int earlyclobber
= 0;
3078 enum constraint_num cn
;
3081 /* If the predicate accepts a unary operator, it means that
3082 we need to reload the operand, but do not do this for
3083 match_operator and friends. */
3084 if (UNARY_P (operand
) && *p
!= 0)
3085 operand
= XEXP (operand
, 0);
3087 /* If the operand is a SUBREG, extract
3088 the REG or MEM (or maybe even a constant) within.
3089 (Constants can occur as a result of reg_equiv_constant.) */
3091 while (GET_CODE (operand
) == SUBREG
)
3093 /* Offset only matters when operand is a REG and
3094 it is a hard reg. This is because it is passed
3095 to reg_fits_class_p if it is a REG and all pseudos
3096 return 0 from that function. */
3097 if (REG_P (SUBREG_REG (operand
))
3098 && REGNO (SUBREG_REG (operand
)) < FIRST_PSEUDO_REGISTER
)
3100 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand
)),
3101 GET_MODE (SUBREG_REG (operand
)),
3102 SUBREG_BYTE (operand
),
3103 GET_MODE (operand
)) < 0)
3105 offset
+= subreg_regno_offset (REGNO (SUBREG_REG (operand
)),
3106 GET_MODE (SUBREG_REG (operand
)),
3107 SUBREG_BYTE (operand
),
3108 GET_MODE (operand
));
3110 operand
= SUBREG_REG (operand
);
3111 /* Force reload if this is a constant or PLUS or if there may
3112 be a problem accessing OPERAND in the outer mode. */
3113 if (CONSTANT_P (operand
)
3114 || GET_CODE (operand
) == PLUS
3115 /* We must force a reload of paradoxical SUBREGs
3116 of a MEM because the alignment of the inner value
3117 may not be enough to do the outer reference. On
3118 big-endian machines, it may also reference outside
3121 On machines that extend byte operations and we have a
3122 SUBREG where both the inner and outer modes are no wider
3123 than a word and the inner mode is narrower, is integral,
3124 and gets extended when loaded from memory, combine.c has
3125 made assumptions about the behavior of the machine in such
3126 register access. If the data is, in fact, in memory we
3127 must always load using the size assumed to be in the
3128 register and let the insn do the different-sized
3131 This is doubly true if WORD_REGISTER_OPERATIONS. In
3132 this case eliminate_regs has left non-paradoxical
3133 subregs for push_reload to see. Make sure it does
3134 by forcing the reload.
3136 ??? When is it right at this stage to have a subreg
3137 of a mem that is _not_ to be handled specially? IMO
3138 those should have been reduced to just a mem. */
3139 || ((MEM_P (operand
)
3141 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
))
3142 #if !WORD_REGISTER_OPERATIONS
3143 && (((GET_MODE_BITSIZE (GET_MODE (operand
))
3144 < BIGGEST_ALIGNMENT
)
3145 && (GET_MODE_SIZE (operand_mode
[i
])
3146 > GET_MODE_SIZE (GET_MODE (operand
))))
3148 #ifdef LOAD_EXTEND_OP
3149 || (GET_MODE_SIZE (operand_mode
[i
]) <= UNITS_PER_WORD
3150 && (GET_MODE_SIZE (GET_MODE (operand
))
3152 && (GET_MODE_SIZE (operand_mode
[i
])
3153 > GET_MODE_SIZE (GET_MODE (operand
)))
3154 && INTEGRAL_MODE_P (GET_MODE (operand
))
3155 && LOAD_EXTEND_OP (GET_MODE (operand
)) != UNKNOWN
)
3164 this_alternative
[i
] = NO_REGS
;
3165 this_alternative_win
[i
] = 0;
3166 this_alternative_match_win
[i
] = 0;
3167 this_alternative_offmemok
[i
] = 0;
3168 this_alternative_earlyclobber
[i
] = 0;
3169 this_alternative_matches
[i
] = -1;
3171 /* An empty constraint or empty alternative
3172 allows anything which matched the pattern. */
3173 if (*p
== 0 || *p
== ',')
3176 /* Scan this alternative's specs for this operand;
3177 set WIN if the operand fits any letter in this alternative.
3178 Otherwise, clear BADOP if this operand could
3179 fit some letter after reloads,
3180 or set WINREG if this operand could fit after reloads
3181 provided the constraint allows some registers. */
3184 switch ((c
= *p
, len
= CONSTRAINT_LEN (c
, p
)), c
)
3202 /* Ignore rest of this alternative as far as
3203 reloading is concerned. */
3206 while (*p
&& *p
!= ',');
3210 case '0': case '1': case '2': case '3': case '4':
3211 case '5': case '6': case '7': case '8': case '9':
3212 m
= strtoul (p
, &end
, 10);
3216 this_alternative_matches
[i
] = m
;
3217 /* We are supposed to match a previous operand.
3218 If we do, we win if that one did.
3219 If we do not, count both of the operands as losers.
3220 (This is too conservative, since most of the time
3221 only a single reload insn will be needed to make
3222 the two operands win. As a result, this alternative
3223 may be rejected when it is actually desirable.) */
3224 if ((swapped
&& (m
!= commutative
|| i
!= commutative
+ 1))
3225 /* If we are matching as if two operands were swapped,
3226 also pretend that operands_match had been computed
3228 But if I is the second of those and C is the first,
3229 don't exchange them, because operands_match is valid
3230 only on one side of its diagonal. */
3232 [(m
== commutative
|| m
== commutative
+ 1)
3233 ? 2 * commutative
+ 1 - m
: m
]
3234 [(i
== commutative
|| i
== commutative
+ 1)
3235 ? 2 * commutative
+ 1 - i
: i
])
3236 : operands_match
[m
][i
])
3238 /* If we are matching a non-offsettable address where an
3239 offsettable address was expected, then we must reject
3240 this combination, because we can't reload it. */
3241 if (this_alternative_offmemok
[m
]
3242 && MEM_P (recog_data
.operand
[m
])
3243 && this_alternative
[m
] == NO_REGS
3244 && ! this_alternative_win
[m
])
3247 did_match
= this_alternative_win
[m
];
3251 /* Operands don't match. */
3254 /* Retroactively mark the operand we had to match
3255 as a loser, if it wasn't already. */
3256 if (this_alternative_win
[m
])
3258 this_alternative_win
[m
] = 0;
3259 if (this_alternative
[m
] == NO_REGS
)
3261 /* But count the pair only once in the total badness of
3262 this alternative, if the pair can be a dummy reload.
3263 The pointers in operand_loc are not swapped; swap
3264 them by hand if necessary. */
3265 if (swapped
&& i
== commutative
)
3266 loc1
= commutative
+ 1;
3267 else if (swapped
&& i
== commutative
+ 1)
3271 if (swapped
&& m
== commutative
)
3272 loc2
= commutative
+ 1;
3273 else if (swapped
&& m
== commutative
+ 1)
3278 = find_dummy_reload (recog_data
.operand
[i
],
3279 recog_data
.operand
[m
],
3280 recog_data
.operand_loc
[loc1
],
3281 recog_data
.operand_loc
[loc2
],
3282 operand_mode
[i
], operand_mode
[m
],
3283 this_alternative
[m
], -1,
3284 this_alternative_earlyclobber
[m
]);
3289 /* This can be fixed with reloads if the operand
3290 we are supposed to match can be fixed with reloads. */
3292 this_alternative
[i
] = this_alternative
[m
];
3294 /* If we have to reload this operand and some previous
3295 operand also had to match the same thing as this
3296 operand, we don't know how to do that. So reject this
3298 if (! did_match
|| force_reload
)
3299 for (j
= 0; j
< i
; j
++)
3300 if (this_alternative_matches
[j
]
3301 == this_alternative_matches
[i
])
3309 /* All necessary reloads for an address_operand
3310 were handled in find_reloads_address. */
3312 = base_reg_class (VOIDmode
, ADDR_SPACE_GENERIC
,
3318 case TARGET_MEM_CONSTRAINT
:
3323 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
3324 && reg_renumber
[REGNO (operand
)] < 0))
3326 if (CONST_POOL_OK_P (operand_mode
[i
], operand
))
3333 && ! address_reloaded
[i
]
3334 && (GET_CODE (XEXP (operand
, 0)) == PRE_DEC
3335 || GET_CODE (XEXP (operand
, 0)) == POST_DEC
))
3341 && ! address_reloaded
[i
]
3342 && (GET_CODE (XEXP (operand
, 0)) == PRE_INC
3343 || GET_CODE (XEXP (operand
, 0)) == POST_INC
))
3347 /* Memory operand whose address is not offsettable. */
3352 && ! (ind_levels
? offsettable_memref_p (operand
)
3353 : offsettable_nonstrict_memref_p (operand
))
3354 /* Certain mem addresses will become offsettable
3355 after they themselves are reloaded. This is important;
3356 we don't want our own handling of unoffsettables
3357 to override the handling of reg_equiv_address. */
3358 && !(REG_P (XEXP (operand
, 0))
3360 || reg_equiv_address (REGNO (XEXP (operand
, 0))) != 0)))
3364 /* Memory operand whose address is offsettable. */
3368 if ((MEM_P (operand
)
3369 /* If IND_LEVELS, find_reloads_address won't reload a
3370 pseudo that didn't get a hard reg, so we have to
3371 reject that case. */
3372 && ((ind_levels
? offsettable_memref_p (operand
)
3373 : offsettable_nonstrict_memref_p (operand
))
3374 /* A reloaded address is offsettable because it is now
3375 just a simple register indirect. */
3376 || address_reloaded
[i
] == 1))
3378 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
3379 && reg_renumber
[REGNO (operand
)] < 0
3380 /* If reg_equiv_address is nonzero, we will be
3381 loading it into a register; hence it will be
3382 offsettable, but we cannot say that reg_equiv_mem
3383 is offsettable without checking. */
3384 && ((reg_equiv_mem (REGNO (operand
)) != 0
3385 && offsettable_memref_p (reg_equiv_mem (REGNO (operand
))))
3386 || (reg_equiv_address (REGNO (operand
)) != 0))))
3388 if (CONST_POOL_OK_P (operand_mode
[i
], operand
)
3396 /* Output operand that is stored before the need for the
3397 input operands (and their index registers) is over. */
3398 earlyclobber
= 1, this_earlyclobber
= 1;
3408 /* A PLUS is never a valid operand, but reload can make
3409 it from a register when eliminating registers. */
3410 && GET_CODE (operand
) != PLUS
3411 /* A SCRATCH is not a valid operand. */
3412 && GET_CODE (operand
) != SCRATCH
3413 && (! CONSTANT_P (operand
)
3415 || LEGITIMATE_PIC_OPERAND_P (operand
))
3416 && (GENERAL_REGS
== ALL_REGS
3418 || (REGNO (operand
) >= FIRST_PSEUDO_REGISTER
3419 && reg_renumber
[REGNO (operand
)] < 0)))
3425 cn
= lookup_constraint (p
);
3426 switch (get_constraint_type (cn
))
3429 cl
= reg_class_for_constraint (cn
);
3435 if (CONST_INT_P (operand
)
3436 && (insn_const_int_ok_for_constraint
3437 (INTVAL (operand
), cn
)))
3444 if (constraint_satisfied_p (operand
, cn
))
3446 /* If the address was already reloaded,
3448 else if (MEM_P (operand
) && address_reloaded
[i
] == 1)
3450 /* Likewise if the address will be reloaded because
3451 reg_equiv_address is nonzero. For reg_equiv_mem
3452 we have to check. */
3453 else if (REG_P (operand
)
3454 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
3455 && reg_renumber
[REGNO (operand
)] < 0
3456 && ((reg_equiv_mem (REGNO (operand
)) != 0
3457 && (constraint_satisfied_p
3458 (reg_equiv_mem (REGNO (operand
)),
3460 || (reg_equiv_address (REGNO (operand
))
3464 /* If we didn't already win, we can reload
3465 constants via force_const_mem, and other
3466 MEMs by reloading the address like for 'o'. */
3467 if (CONST_POOL_OK_P (operand_mode
[i
], operand
)
3474 case CT_SPECIAL_MEMORY
:
3477 if (constraint_satisfied_p (operand
, cn
))
3479 /* Likewise if the address will be reloaded because
3480 reg_equiv_address is nonzero. For reg_equiv_mem
3481 we have to check. */
3482 else if (REG_P (operand
)
3483 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
3484 && reg_renumber
[REGNO (operand
)] < 0
3485 && reg_equiv_mem (REGNO (operand
)) != 0
3486 && (constraint_satisfied_p
3487 (reg_equiv_mem (REGNO (operand
)), cn
)))
3492 if (constraint_satisfied_p (operand
, cn
))
3495 /* If we didn't already win, we can reload
3496 the address into a base register. */
3498 = base_reg_class (VOIDmode
, ADDR_SPACE_GENERIC
,
3504 if (constraint_satisfied_p (operand
, cn
))
3512 = reg_class_subunion
[this_alternative
[i
]][cl
];
3513 if (GET_MODE (operand
) == BLKmode
)
3517 && reg_fits_class_p (operand
, this_alternative
[i
],
3518 offset
, GET_MODE (recog_data
.operand
[i
])))
3522 while ((p
+= len
), c
);
3524 if (swapped
== (commutative
>= 0 ? 1 : 0))
3527 /* If this operand could be handled with a reg,
3528 and some reg is allowed, then this operand can be handled. */
3529 if (winreg
&& this_alternative
[i
] != NO_REGS
3530 && (win
|| !class_only_fixed_regs
[this_alternative
[i
]]))
3533 /* Record which operands fit this alternative. */
3534 this_alternative_earlyclobber
[i
] = earlyclobber
;
3535 if (win
&& ! force_reload
)
3536 this_alternative_win
[i
] = 1;
3537 else if (did_match
&& ! force_reload
)
3538 this_alternative_match_win
[i
] = 1;
3541 int const_to_mem
= 0;
3543 this_alternative_offmemok
[i
] = offmemok
;
3547 /* Alternative loses if it has no regs for a reg operand. */
3549 && this_alternative
[i
] == NO_REGS
3550 && this_alternative_matches
[i
] < 0)
3553 /* If this is a constant that is reloaded into the desired
3554 class by copying it to memory first, count that as another
3555 reload. This is consistent with other code and is
3556 required to avoid choosing another alternative when
3557 the constant is moved into memory by this function on
3558 an early reload pass. Note that the test here is
3559 precisely the same as in the code below that calls
3561 if (CONST_POOL_OK_P (operand_mode
[i
], operand
)
3562 && ((targetm
.preferred_reload_class (operand
,
3563 this_alternative
[i
])
3565 || no_input_reloads
))
3568 if (this_alternative
[i
] != NO_REGS
)
3572 /* Alternative loses if it requires a type of reload not
3573 permitted for this insn. We can always reload SCRATCH
3574 and objects with a REG_UNUSED note. */
3575 if (GET_CODE (operand
) != SCRATCH
3576 && modified
[i
] != RELOAD_READ
&& no_output_reloads
3577 && ! find_reg_note (insn
, REG_UNUSED
, operand
))
3579 else if (modified
[i
] != RELOAD_WRITE
&& no_input_reloads
3583 /* If we can't reload this value at all, reject this
3584 alternative. Note that we could also lose due to
3585 LIMIT_RELOAD_CLASS, but we don't check that
3588 if (! CONSTANT_P (operand
) && this_alternative
[i
] != NO_REGS
)
3590 if (targetm
.preferred_reload_class (operand
,
3591 this_alternative
[i
])
3595 if (operand_type
[i
] == RELOAD_FOR_OUTPUT
3596 && (targetm
.preferred_output_reload_class (operand
,
3597 this_alternative
[i
])
3602 /* We prefer to reload pseudos over reloading other things,
3603 since such reloads may be able to be eliminated later.
3604 If we are reloading a SCRATCH, we won't be generating any
3605 insns, just using a register, so it is also preferred.
3606 So bump REJECT in other cases. Don't do this in the
3607 case where we are forcing a constant into memory and
3608 it will then win since we don't want to have a different
3609 alternative match then. */
3610 if (! (REG_P (operand
)
3611 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
)
3612 && GET_CODE (operand
) != SCRATCH
3613 && ! (const_to_mem
&& constmemok
))
3616 /* Input reloads can be inherited more often than output
3617 reloads can be removed, so penalize output reloads. */
3618 if (operand_type
[i
] != RELOAD_FOR_INPUT
3619 && GET_CODE (operand
) != SCRATCH
)
3623 /* If this operand is a pseudo register that didn't get
3624 a hard reg and this alternative accepts some
3625 register, see if the class that we want is a subset
3626 of the preferred class for this register. If not,
3627 but it intersects that class, use the preferred class
3628 instead. If it does not intersect the preferred
3629 class, show that usage of this alternative should be
3630 discouraged; it will be discouraged more still if the
3631 register is `preferred or nothing'. We do this
3632 because it increases the chance of reusing our spill
3633 register in a later insn and avoiding a pair of
3634 memory stores and loads.
3636 Don't bother with this if this alternative will
3637 accept this operand.
3639 Don't do this for a multiword operand, since it is
3640 only a small win and has the risk of requiring more
3641 spill registers, which could cause a large loss.
3643 Don't do this if the preferred class has only one
3644 register because we might otherwise exhaust the
3647 if (! win
&& ! did_match
3648 && this_alternative
[i
] != NO_REGS
3649 && GET_MODE_SIZE (operand_mode
[i
]) <= UNITS_PER_WORD
3650 && reg_class_size
[(int) preferred_class
[i
]] > 0
3651 && ! small_register_class_p (preferred_class
[i
]))
3653 if (! reg_class_subset_p (this_alternative
[i
],
3654 preferred_class
[i
]))
3656 /* Since we don't have a way of forming the intersection,
3657 we just do something special if the preferred class
3658 is a subset of the class we have; that's the most
3659 common case anyway. */
3660 if (reg_class_subset_p (preferred_class
[i
],
3661 this_alternative
[i
]))
3662 this_alternative
[i
] = preferred_class
[i
];
3664 reject
+= (2 + 2 * pref_or_nothing
[i
]);
3669 /* Now see if any output operands that are marked "earlyclobber"
3670 in this alternative conflict with any input operands
3671 or any memory addresses. */
3673 for (i
= 0; i
< noperands
; i
++)
3674 if (this_alternative_earlyclobber
[i
]
3675 && (this_alternative_win
[i
] || this_alternative_match_win
[i
]))
3677 struct decomposition early_data
;
3679 early_data
= decompose (recog_data
.operand
[i
]);
3681 gcc_assert (modified
[i
] != RELOAD_READ
);
3683 if (this_alternative
[i
] == NO_REGS
)
3685 this_alternative_earlyclobber
[i
] = 0;
3686 gcc_assert (this_insn_is_asm
);
3687 error_for_asm (this_insn
,
3688 "%<&%> constraint used with no register class");
3691 for (j
= 0; j
< noperands
; j
++)
3692 /* Is this an input operand or a memory ref? */
3693 if ((MEM_P (recog_data
.operand
[j
])
3694 || modified
[j
] != RELOAD_WRITE
)
3696 /* Ignore things like match_operator operands. */
3697 && !recog_data
.is_operator
[j
]
3698 /* Don't count an input operand that is constrained to match
3699 the early clobber operand. */
3700 && ! (this_alternative_matches
[j
] == i
3701 && rtx_equal_p (recog_data
.operand
[i
],
3702 recog_data
.operand
[j
]))
3703 /* Is it altered by storing the earlyclobber operand? */
3704 && !immune_p (recog_data
.operand
[j
], recog_data
.operand
[i
],
3707 /* If the output is in a non-empty few-regs class,
3708 it's costly to reload it, so reload the input instead. */
3709 if (small_register_class_p (this_alternative
[i
])
3710 && (REG_P (recog_data
.operand
[j
])
3711 || GET_CODE (recog_data
.operand
[j
]) == SUBREG
))
3714 this_alternative_win
[j
] = 0;
3715 this_alternative_match_win
[j
] = 0;
3720 /* If an earlyclobber operand conflicts with something,
3721 it must be reloaded, so request this and count the cost. */
3725 this_alternative_win
[i
] = 0;
3726 this_alternative_match_win
[j
] = 0;
3727 for (j
= 0; j
< noperands
; j
++)
3728 if (this_alternative_matches
[j
] == i
3729 && this_alternative_match_win
[j
])
3731 this_alternative_win
[j
] = 0;
3732 this_alternative_match_win
[j
] = 0;
3738 /* If one alternative accepts all the operands, no reload required,
3739 choose that alternative; don't consider the remaining ones. */
3742 /* Unswap these so that they are never swapped at `finish'. */
3745 recog_data
.operand
[commutative
] = substed_operand
[commutative
];
3746 recog_data
.operand
[commutative
+ 1]
3747 = substed_operand
[commutative
+ 1];
3749 for (i
= 0; i
< noperands
; i
++)
3751 goal_alternative_win
[i
] = this_alternative_win
[i
];
3752 goal_alternative_match_win
[i
] = this_alternative_match_win
[i
];
3753 goal_alternative
[i
] = this_alternative
[i
];
3754 goal_alternative_offmemok
[i
] = this_alternative_offmemok
[i
];
3755 goal_alternative_matches
[i
] = this_alternative_matches
[i
];
3756 goal_alternative_earlyclobber
[i
]
3757 = this_alternative_earlyclobber
[i
];
3759 goal_alternative_number
= this_alternative_number
;
3760 goal_alternative_swapped
= swapped
;
3761 goal_earlyclobber
= this_earlyclobber
;
3765 /* REJECT, set by the ! and ? constraint characters and when a register
3766 would be reloaded into a non-preferred class, discourages the use of
3767 this alternative for a reload goal. REJECT is incremented by six
3768 for each ? and two for each non-preferred class. */
3769 losers
= losers
* 6 + reject
;
3771 /* If this alternative can be made to work by reloading,
3772 and it needs less reloading than the others checked so far,
3773 record it as the chosen goal for reloading. */
3778 for (i
= 0; i
< noperands
; i
++)
3780 goal_alternative
[i
] = this_alternative
[i
];
3781 goal_alternative_win
[i
] = this_alternative_win
[i
];
3782 goal_alternative_match_win
[i
]
3783 = this_alternative_match_win
[i
];
3784 goal_alternative_offmemok
[i
]
3785 = this_alternative_offmemok
[i
];
3786 goal_alternative_matches
[i
] = this_alternative_matches
[i
];
3787 goal_alternative_earlyclobber
[i
]
3788 = this_alternative_earlyclobber
[i
];
3790 goal_alternative_swapped
= swapped
;
3792 goal_alternative_number
= this_alternative_number
;
3793 goal_earlyclobber
= this_earlyclobber
;
3799 /* If the commutative operands have been swapped, swap
3800 them back in order to check the next alternative. */
3801 recog_data
.operand
[commutative
] = substed_operand
[commutative
];
3802 recog_data
.operand
[commutative
+ 1] = substed_operand
[commutative
+ 1];
3803 /* Unswap the duplicates too. */
3804 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3805 if (recog_data
.dup_num
[i
] == commutative
3806 || recog_data
.dup_num
[i
] == commutative
+ 1)
3807 *recog_data
.dup_loc
[i
]
3808 = recog_data
.operand
[(int) recog_data
.dup_num
[i
]];
3810 /* Unswap the operand related information as well. */
3811 std::swap (preferred_class
[commutative
],
3812 preferred_class
[commutative
+ 1]);
3813 std::swap (pref_or_nothing
[commutative
],
3814 pref_or_nothing
[commutative
+ 1]);
3815 std::swap (address_reloaded
[commutative
],
3816 address_reloaded
[commutative
+ 1]);
3821 /* The operands don't meet the constraints.
3822 goal_alternative describes the alternative
3823 that we could reach by reloading the fewest operands.
3824 Reload so as to fit it. */
3826 if (best
== MAX_RECOG_OPERANDS
* 2 + 600)
3828 /* No alternative works with reloads?? */
3829 if (insn_code_number
>= 0)
3830 fatal_insn ("unable to generate reloads for:", insn
);
3831 error_for_asm (insn
, "inconsistent operand constraints in an %<asm%>");
3832 /* Avoid further trouble with this insn. */
3833 PATTERN (insn
) = gen_rtx_USE (VOIDmode
, const0_rtx
);
3838 /* Jump to `finish' from above if all operands are valid already.
3839 In that case, goal_alternative_win is all 1. */
3842 /* Right now, for any pair of operands I and J that are required to match,
3844 goal_alternative_matches[J] is I.
3845 Set up goal_alternative_matched as the inverse function:
3846 goal_alternative_matched[I] = J. */
3848 for (i
= 0; i
< noperands
; i
++)
3849 goal_alternative_matched
[i
] = -1;
3851 for (i
= 0; i
< noperands
; i
++)
3852 if (! goal_alternative_win
[i
]
3853 && goal_alternative_matches
[i
] >= 0)
3854 goal_alternative_matched
[goal_alternative_matches
[i
]] = i
;
3856 for (i
= 0; i
< noperands
; i
++)
3857 goal_alternative_win
[i
] |= goal_alternative_match_win
[i
];
3859 /* If the best alternative is with operands 1 and 2 swapped,
3860 consider them swapped before reporting the reloads. Update the
3861 operand numbers of any reloads already pushed. */
3863 if (goal_alternative_swapped
)
3865 std::swap (substed_operand
[commutative
],
3866 substed_operand
[commutative
+ 1]);
3867 std::swap (recog_data
.operand
[commutative
],
3868 recog_data
.operand
[commutative
+ 1]);
3869 std::swap (*recog_data
.operand_loc
[commutative
],
3870 *recog_data
.operand_loc
[commutative
+ 1]);
3872 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3873 if (recog_data
.dup_num
[i
] == commutative
3874 || recog_data
.dup_num
[i
] == commutative
+ 1)
3875 *recog_data
.dup_loc
[i
]
3876 = recog_data
.operand
[(int) recog_data
.dup_num
[i
]];
3878 for (i
= 0; i
< n_reloads
; i
++)
3880 if (rld
[i
].opnum
== commutative
)
3881 rld
[i
].opnum
= commutative
+ 1;
3882 else if (rld
[i
].opnum
== commutative
+ 1)
3883 rld
[i
].opnum
= commutative
;
3887 for (i
= 0; i
< noperands
; i
++)
3889 operand_reloadnum
[i
] = -1;
3891 /* If this is an earlyclobber operand, we need to widen the scope.
3892 The reload must remain valid from the start of the insn being
3893 reloaded until after the operand is stored into its destination.
3894 We approximate this with RELOAD_OTHER even though we know that we
3895 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3897 One special case that is worth checking is when we have an
3898 output that is earlyclobber but isn't used past the insn (typically
3899 a SCRATCH). In this case, we only need have the reload live
3900 through the insn itself, but not for any of our input or output
3902 But we must not accidentally narrow the scope of an existing
3903 RELOAD_OTHER reload - leave these alone.
3905 In any case, anything needed to address this operand can remain
3906 however they were previously categorized. */
3908 if (goal_alternative_earlyclobber
[i
] && operand_type
[i
] != RELOAD_OTHER
)
3910 = (find_reg_note (insn
, REG_UNUSED
, recog_data
.operand
[i
])
3911 ? RELOAD_FOR_INSN
: RELOAD_OTHER
);
3914 /* Any constants that aren't allowed and can't be reloaded
3915 into registers are here changed into memory references. */
3916 for (i
= 0; i
< noperands
; i
++)
3917 if (! goal_alternative_win
[i
])
3919 rtx op
= recog_data
.operand
[i
];
3920 rtx subreg
= NULL_RTX
;
3921 rtx plus
= NULL_RTX
;
3922 machine_mode mode
= operand_mode
[i
];
3924 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3925 push_reload so we have to let them pass here. */
3926 if (GET_CODE (op
) == SUBREG
)
3929 op
= SUBREG_REG (op
);
3930 mode
= GET_MODE (op
);
3933 if (GET_CODE (op
) == PLUS
)
3939 if (CONST_POOL_OK_P (mode
, op
)
3940 && ((targetm
.preferred_reload_class (op
, goal_alternative
[i
])
3942 || no_input_reloads
))
3944 int this_address_reloaded
;
3945 rtx tem
= force_const_mem (mode
, op
);
3947 /* If we stripped a SUBREG or a PLUS above add it back. */
3948 if (plus
!= NULL_RTX
)
3949 tem
= gen_rtx_PLUS (mode
, XEXP (plus
, 0), tem
);
3951 if (subreg
!= NULL_RTX
)
3952 tem
= gen_rtx_SUBREG (operand_mode
[i
], tem
, SUBREG_BYTE (subreg
));
3954 this_address_reloaded
= 0;
3955 substed_operand
[i
] = recog_data
.operand
[i
]
3956 = find_reloads_toplev (tem
, i
, address_type
[i
], ind_levels
,
3957 0, insn
, &this_address_reloaded
);
3959 /* If the alternative accepts constant pool refs directly
3960 there will be no reload needed at all. */
3961 if (plus
== NULL_RTX
3962 && subreg
== NULL_RTX
3963 && alternative_allows_const_pool_ref (this_address_reloaded
== 0
3964 ? substed_operand
[i
]
3966 recog_data
.constraints
[i
],
3967 goal_alternative_number
))
3968 goal_alternative_win
[i
] = 1;
3972 /* Record the values of the earlyclobber operands for the caller. */
3973 if (goal_earlyclobber
)
3974 for (i
= 0; i
< noperands
; i
++)
3975 if (goal_alternative_earlyclobber
[i
])
3976 reload_earlyclobbers
[n_earlyclobbers
++] = recog_data
.operand
[i
];
3978 /* Now record reloads for all the operands that need them. */
3979 for (i
= 0; i
< noperands
; i
++)
3980 if (! goal_alternative_win
[i
])
3982 /* Operands that match previous ones have already been handled. */
3983 if (goal_alternative_matches
[i
] >= 0)
3985 /* Handle an operand with a nonoffsettable address
3986 appearing where an offsettable address will do
3987 by reloading the address into a base register.
3989 ??? We can also do this when the operand is a register and
3990 reg_equiv_mem is not offsettable, but this is a bit tricky,
3991 so we don't bother with it. It may not be worth doing. */
3992 else if (goal_alternative_matched
[i
] == -1
3993 && goal_alternative_offmemok
[i
]
3994 && MEM_P (recog_data
.operand
[i
]))
3996 /* If the address to be reloaded is a VOIDmode constant,
3997 use the default address mode as mode of the reload register,
3998 as would have been done by find_reloads_address. */
3999 addr_space_t as
= MEM_ADDR_SPACE (recog_data
.operand
[i
]);
4000 machine_mode address_mode
;
4002 address_mode
= get_address_mode (recog_data
.operand
[i
]);
4003 operand_reloadnum
[i
]
4004 = push_reload (XEXP (recog_data
.operand
[i
], 0), NULL_RTX
,
4005 &XEXP (recog_data
.operand
[i
], 0), (rtx
*) 0,
4006 base_reg_class (VOIDmode
, as
, MEM
, SCRATCH
),
4008 VOIDmode
, 0, 0, i
, RELOAD_FOR_INPUT
);
4009 rld
[operand_reloadnum
[i
]].inc
4010 = GET_MODE_SIZE (GET_MODE (recog_data
.operand
[i
]));
4012 /* If this operand is an output, we will have made any
4013 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4014 now we are treating part of the operand as an input, so
4015 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
4017 if (modified
[i
] == RELOAD_WRITE
)
4019 for (j
= 0; j
< n_reloads
; j
++)
4021 if (rld
[j
].opnum
== i
)
4023 if (rld
[j
].when_needed
== RELOAD_FOR_OUTPUT_ADDRESS
)
4024 rld
[j
].when_needed
= RELOAD_FOR_INPUT_ADDRESS
;
4025 else if (rld
[j
].when_needed
4026 == RELOAD_FOR_OUTADDR_ADDRESS
)
4027 rld
[j
].when_needed
= RELOAD_FOR_INPADDR_ADDRESS
;
4032 else if (goal_alternative_matched
[i
] == -1)
4034 operand_reloadnum
[i
]
4035 = push_reload ((modified
[i
] != RELOAD_WRITE
4036 ? recog_data
.operand
[i
] : 0),
4037 (modified
[i
] != RELOAD_READ
4038 ? recog_data
.operand
[i
] : 0),
4039 (modified
[i
] != RELOAD_WRITE
4040 ? recog_data
.operand_loc
[i
] : 0),
4041 (modified
[i
] != RELOAD_READ
4042 ? recog_data
.operand_loc
[i
] : 0),
4043 (enum reg_class
) goal_alternative
[i
],
4044 (modified
[i
] == RELOAD_WRITE
4045 ? VOIDmode
: operand_mode
[i
]),
4046 (modified
[i
] == RELOAD_READ
4047 ? VOIDmode
: operand_mode
[i
]),
4048 (insn_code_number
< 0 ? 0
4049 : insn_data
[insn_code_number
].operand
[i
].strict_low
),
4050 0, i
, operand_type
[i
]);
4052 /* In a matching pair of operands, one must be input only
4053 and the other must be output only.
4054 Pass the input operand as IN and the other as OUT. */
4055 else if (modified
[i
] == RELOAD_READ
4056 && modified
[goal_alternative_matched
[i
]] == RELOAD_WRITE
)
4058 operand_reloadnum
[i
]
4059 = push_reload (recog_data
.operand
[i
],
4060 recog_data
.operand
[goal_alternative_matched
[i
]],
4061 recog_data
.operand_loc
[i
],
4062 recog_data
.operand_loc
[goal_alternative_matched
[i
]],
4063 (enum reg_class
) goal_alternative
[i
],
4065 operand_mode
[goal_alternative_matched
[i
]],
4066 0, 0, i
, RELOAD_OTHER
);
4067 operand_reloadnum
[goal_alternative_matched
[i
]] = output_reloadnum
;
4069 else if (modified
[i
] == RELOAD_WRITE
4070 && modified
[goal_alternative_matched
[i
]] == RELOAD_READ
)
4072 operand_reloadnum
[goal_alternative_matched
[i
]]
4073 = push_reload (recog_data
.operand
[goal_alternative_matched
[i
]],
4074 recog_data
.operand
[i
],
4075 recog_data
.operand_loc
[goal_alternative_matched
[i
]],
4076 recog_data
.operand_loc
[i
],
4077 (enum reg_class
) goal_alternative
[i
],
4078 operand_mode
[goal_alternative_matched
[i
]],
4080 0, 0, i
, RELOAD_OTHER
);
4081 operand_reloadnum
[i
] = output_reloadnum
;
4085 gcc_assert (insn_code_number
< 0);
4086 error_for_asm (insn
, "inconsistent operand constraints "
4088 /* Avoid further trouble with this insn. */
4089 PATTERN (insn
) = gen_rtx_USE (VOIDmode
, const0_rtx
);
4094 else if (goal_alternative_matched
[i
] < 0
4095 && goal_alternative_matches
[i
] < 0
4096 && address_operand_reloaded
[i
] != 1
4099 /* For each non-matching operand that's a MEM or a pseudo-register
4100 that didn't get a hard register, make an optional reload.
4101 This may get done even if the insn needs no reloads otherwise. */
4103 rtx operand
= recog_data
.operand
[i
];
4105 while (GET_CODE (operand
) == SUBREG
)
4106 operand
= SUBREG_REG (operand
);
4107 if ((MEM_P (operand
)
4109 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
))
4110 /* If this is only for an output, the optional reload would not
4111 actually cause us to use a register now, just note that
4112 something is stored here. */
4113 && (goal_alternative
[i
] != NO_REGS
4114 || modified
[i
] == RELOAD_WRITE
)
4115 && ! no_input_reloads
4116 /* An optional output reload might allow to delete INSN later.
4117 We mustn't make in-out reloads on insns that are not permitted
4119 If this is an asm, we can't delete it; we must not even call
4120 push_reload for an optional output reload in this case,
4121 because we can't be sure that the constraint allows a register,
4122 and push_reload verifies the constraints for asms. */
4123 && (modified
[i
] == RELOAD_READ
4124 || (! no_output_reloads
&& ! this_insn_is_asm
)))
4125 operand_reloadnum
[i
]
4126 = push_reload ((modified
[i
] != RELOAD_WRITE
4127 ? recog_data
.operand
[i
] : 0),
4128 (modified
[i
] != RELOAD_READ
4129 ? recog_data
.operand
[i
] : 0),
4130 (modified
[i
] != RELOAD_WRITE
4131 ? recog_data
.operand_loc
[i
] : 0),
4132 (modified
[i
] != RELOAD_READ
4133 ? recog_data
.operand_loc
[i
] : 0),
4134 (enum reg_class
) goal_alternative
[i
],
4135 (modified
[i
] == RELOAD_WRITE
4136 ? VOIDmode
: operand_mode
[i
]),
4137 (modified
[i
] == RELOAD_READ
4138 ? VOIDmode
: operand_mode
[i
]),
4139 (insn_code_number
< 0 ? 0
4140 : insn_data
[insn_code_number
].operand
[i
].strict_low
),
4141 1, i
, operand_type
[i
]);
4142 /* If a memory reference remains (either as a MEM or a pseudo that
4143 did not get a hard register), yet we can't make an optional
4144 reload, check if this is actually a pseudo register reference;
4145 we then need to emit a USE and/or a CLOBBER so that reload
4146 inheritance will do the right thing. */
4150 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
4151 && reg_renumber
[REGNO (operand
)] < 0)))
4153 operand
= *recog_data
.operand_loc
[i
];
4155 while (GET_CODE (operand
) == SUBREG
)
4156 operand
= SUBREG_REG (operand
);
4157 if (REG_P (operand
))
4159 if (modified
[i
] != RELOAD_WRITE
)
4160 /* We mark the USE with QImode so that we recognize
4161 it as one that can be safely deleted at the end
4163 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode
, operand
),
4165 if (modified
[i
] != RELOAD_READ
)
4166 emit_insn_after (gen_clobber (operand
), insn
);
4170 else if (goal_alternative_matches
[i
] >= 0
4171 && goal_alternative_win
[goal_alternative_matches
[i
]]
4172 && modified
[i
] == RELOAD_READ
4173 && modified
[goal_alternative_matches
[i
]] == RELOAD_WRITE
4174 && ! no_input_reloads
&& ! no_output_reloads
4177 /* Similarly, make an optional reload for a pair of matching
4178 objects that are in MEM or a pseudo that didn't get a hard reg. */
4180 rtx operand
= recog_data
.operand
[i
];
4182 while (GET_CODE (operand
) == SUBREG
)
4183 operand
= SUBREG_REG (operand
);
4184 if ((MEM_P (operand
)
4186 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
))
4187 && (goal_alternative
[goal_alternative_matches
[i
]] != NO_REGS
))
4188 operand_reloadnum
[i
] = operand_reloadnum
[goal_alternative_matches
[i
]]
4189 = push_reload (recog_data
.operand
[goal_alternative_matches
[i
]],
4190 recog_data
.operand
[i
],
4191 recog_data
.operand_loc
[goal_alternative_matches
[i
]],
4192 recog_data
.operand_loc
[i
],
4193 (enum reg_class
) goal_alternative
[goal_alternative_matches
[i
]],
4194 operand_mode
[goal_alternative_matches
[i
]],
4196 0, 1, goal_alternative_matches
[i
], RELOAD_OTHER
);
4199 /* Perform whatever substitutions on the operands we are supposed
4200 to make due to commutativity or replacement of registers
4201 with equivalent constants or memory slots. */
4203 for (i
= 0; i
< noperands
; i
++)
4205 /* We only do this on the last pass through reload, because it is
4206 possible for some data (like reg_equiv_address) to be changed during
4207 later passes. Moreover, we lose the opportunity to get a useful
4208 reload_{in,out}_reg when we do these replacements. */
4212 rtx substitution
= substed_operand
[i
];
4214 *recog_data
.operand_loc
[i
] = substitution
;
4216 /* If we're replacing an operand with a LABEL_REF, we need to
4217 make sure that there's a REG_LABEL_OPERAND note attached to
4218 this instruction. */
4219 if (GET_CODE (substitution
) == LABEL_REF
4220 && !find_reg_note (insn
, REG_LABEL_OPERAND
,
4221 LABEL_REF_LABEL (substitution
))
4222 /* For a JUMP_P, if it was a branch target it must have
4223 already been recorded as such. */
4225 || !label_is_jump_target_p (LABEL_REF_LABEL (substitution
),
4228 add_reg_note (insn
, REG_LABEL_OPERAND
,
4229 LABEL_REF_LABEL (substitution
));
4230 if (LABEL_P (LABEL_REF_LABEL (substitution
)))
4231 ++LABEL_NUSES (LABEL_REF_LABEL (substitution
));
4236 retval
|= (substed_operand
[i
] != *recog_data
.operand_loc
[i
]);
4239 /* If this insn pattern contains any MATCH_DUP's, make sure that
4240 they will be substituted if the operands they match are substituted.
4241 Also do now any substitutions we already did on the operands.
4243 Don't do this if we aren't making replacements because we might be
4244 propagating things allocated by frame pointer elimination into places
4245 it doesn't expect. */
4247 if (insn_code_number
>= 0 && replace
)
4248 for (i
= insn_data
[insn_code_number
].n_dups
- 1; i
>= 0; i
--)
4250 int opno
= recog_data
.dup_num
[i
];
4251 *recog_data
.dup_loc
[i
] = *recog_data
.operand_loc
[opno
];
4252 dup_replacements (recog_data
.dup_loc
[i
], recog_data
.operand_loc
[opno
]);
4256 /* This loses because reloading of prior insns can invalidate the equivalence
4257 (or at least find_equiv_reg isn't smart enough to find it any more),
4258 causing this insn to need more reload regs than it needed before.
4259 It may be too late to make the reload regs available.
4260 Now this optimization is done safely in choose_reload_regs. */
4262 /* For each reload of a reg into some other class of reg,
4263 search for an existing equivalent reg (same value now) in the right class.
4264 We can use it as long as we don't need to change its contents. */
4265 for (i
= 0; i
< n_reloads
; i
++)
4266 if (rld
[i
].reg_rtx
== 0
4268 && REG_P (rld
[i
].in
)
4272 = find_equiv_reg (rld
[i
].in
, insn
, rld
[i
].rclass
, -1,
4273 static_reload_reg_p
, 0, rld
[i
].inmode
);
4274 /* Prevent generation of insn to load the value
4275 because the one we found already has the value. */
4277 rld
[i
].in
= rld
[i
].reg_rtx
;
4281 /* If we detected error and replaced asm instruction by USE, forget about the
4283 if (GET_CODE (PATTERN (insn
)) == USE
4284 && CONST_INT_P (XEXP (PATTERN (insn
), 0)))
4287 /* Perhaps an output reload can be combined with another
4288 to reduce needs by one. */
4289 if (!goal_earlyclobber
)
4292 /* If we have a pair of reloads for parts of an address, they are reloading
4293 the same object, the operands themselves were not reloaded, and they
4294 are for two operands that are supposed to match, merge the reloads and
4295 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4297 for (i
= 0; i
< n_reloads
; i
++)
4301 for (j
= i
+ 1; j
< n_reloads
; j
++)
4302 if ((rld
[i
].when_needed
== RELOAD_FOR_INPUT_ADDRESS
4303 || rld
[i
].when_needed
== RELOAD_FOR_OUTPUT_ADDRESS
4304 || rld
[i
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
4305 || rld
[i
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
)
4306 && (rld
[j
].when_needed
== RELOAD_FOR_INPUT_ADDRESS
4307 || rld
[j
].when_needed
== RELOAD_FOR_OUTPUT_ADDRESS
4308 || rld
[j
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
4309 || rld
[j
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
)
4310 && rtx_equal_p (rld
[i
].in
, rld
[j
].in
)
4311 && (operand_reloadnum
[rld
[i
].opnum
] < 0
4312 || rld
[operand_reloadnum
[rld
[i
].opnum
]].optional
)
4313 && (operand_reloadnum
[rld
[j
].opnum
] < 0
4314 || rld
[operand_reloadnum
[rld
[j
].opnum
]].optional
)
4315 && (goal_alternative_matches
[rld
[i
].opnum
] == rld
[j
].opnum
4316 || (goal_alternative_matches
[rld
[j
].opnum
]
4319 for (k
= 0; k
< n_replacements
; k
++)
4320 if (replacements
[k
].what
== j
)
4321 replacements
[k
].what
= i
;
4323 if (rld
[i
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
4324 || rld
[i
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
)
4325 rld
[i
].when_needed
= RELOAD_FOR_OPADDR_ADDR
;
4327 rld
[i
].when_needed
= RELOAD_FOR_OPERAND_ADDRESS
;
4332 /* Scan all the reloads and update their type.
4333 If a reload is for the address of an operand and we didn't reload
4334 that operand, change the type. Similarly, change the operand number
4335 of a reload when two operands match. If a reload is optional, treat it
4336 as though the operand isn't reloaded.
4338 ??? This latter case is somewhat odd because if we do the optional
4339 reload, it means the object is hanging around. Thus we need only
4340 do the address reload if the optional reload was NOT done.
4342 Change secondary reloads to be the address type of their operand, not
4345 If an operand's reload is now RELOAD_OTHER, change any
4346 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4347 RELOAD_FOR_OTHER_ADDRESS. */
4349 for (i
= 0; i
< n_reloads
; i
++)
4351 if (rld
[i
].secondary_p
4352 && rld
[i
].when_needed
== operand_type
[rld
[i
].opnum
])
4353 rld
[i
].when_needed
= address_type
[rld
[i
].opnum
];
4355 if ((rld
[i
].when_needed
== RELOAD_FOR_INPUT_ADDRESS
4356 || rld
[i
].when_needed
== RELOAD_FOR_OUTPUT_ADDRESS
4357 || rld
[i
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
4358 || rld
[i
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
)
4359 && (operand_reloadnum
[rld
[i
].opnum
] < 0
4360 || rld
[operand_reloadnum
[rld
[i
].opnum
]].optional
))
4362 /* If we have a secondary reload to go along with this reload,
4363 change its type to RELOAD_FOR_OPADDR_ADDR. */
4365 if ((rld
[i
].when_needed
== RELOAD_FOR_INPUT_ADDRESS
4366 || rld
[i
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
)
4367 && rld
[i
].secondary_in_reload
!= -1)
4369 int secondary_in_reload
= rld
[i
].secondary_in_reload
;
4371 rld
[secondary_in_reload
].when_needed
= RELOAD_FOR_OPADDR_ADDR
;
4373 /* If there's a tertiary reload we have to change it also. */
4374 if (secondary_in_reload
> 0
4375 && rld
[secondary_in_reload
].secondary_in_reload
!= -1)
4376 rld
[rld
[secondary_in_reload
].secondary_in_reload
].when_needed
4377 = RELOAD_FOR_OPADDR_ADDR
;
4380 if ((rld
[i
].when_needed
== RELOAD_FOR_OUTPUT_ADDRESS
4381 || rld
[i
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
)
4382 && rld
[i
].secondary_out_reload
!= -1)
4384 int secondary_out_reload
= rld
[i
].secondary_out_reload
;
4386 rld
[secondary_out_reload
].when_needed
= RELOAD_FOR_OPADDR_ADDR
;
4388 /* If there's a tertiary reload we have to change it also. */
4389 if (secondary_out_reload
4390 && rld
[secondary_out_reload
].secondary_out_reload
!= -1)
4391 rld
[rld
[secondary_out_reload
].secondary_out_reload
].when_needed
4392 = RELOAD_FOR_OPADDR_ADDR
;
4395 if (rld
[i
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
4396 || rld
[i
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
)
4397 rld
[i
].when_needed
= RELOAD_FOR_OPADDR_ADDR
;
4399 rld
[i
].when_needed
= RELOAD_FOR_OPERAND_ADDRESS
;
4402 if ((rld
[i
].when_needed
== RELOAD_FOR_INPUT_ADDRESS
4403 || rld
[i
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
)
4404 && operand_reloadnum
[rld
[i
].opnum
] >= 0
4405 && (rld
[operand_reloadnum
[rld
[i
].opnum
]].when_needed
4407 rld
[i
].when_needed
= RELOAD_FOR_OTHER_ADDRESS
;
4409 if (goal_alternative_matches
[rld
[i
].opnum
] >= 0)
4410 rld
[i
].opnum
= goal_alternative_matches
[rld
[i
].opnum
];
4413 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4414 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4415 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4417 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4418 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4419 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4420 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4421 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4422 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4423 This is complicated by the fact that a single operand can have more
4424 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4425 choose_reload_regs without affecting code quality, and cases that
4426 actually fail are extremely rare, so it turns out to be better to fix
4427 the problem here by not generating cases that choose_reload_regs will
4429 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4430 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4432 We can reduce the register pressure by exploiting that a
4433 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4434 does not conflict with any of them, if it is only used for the first of
4435 the RELOAD_FOR_X_ADDRESS reloads. */
4437 int first_op_addr_num
= -2;
4438 int first_inpaddr_num
[MAX_RECOG_OPERANDS
];
4439 int first_outpaddr_num
[MAX_RECOG_OPERANDS
];
4440 int need_change
= 0;
4441 /* We use last_op_addr_reload and the contents of the above arrays
4442 first as flags - -2 means no instance encountered, -1 means exactly
4443 one instance encountered.
4444 If more than one instance has been encountered, we store the reload
4445 number of the first reload of the kind in question; reload numbers
4446 are known to be non-negative. */
4447 for (i
= 0; i
< noperands
; i
++)
4448 first_inpaddr_num
[i
] = first_outpaddr_num
[i
] = -2;
4449 for (i
= n_reloads
- 1; i
>= 0; i
--)
4451 switch (rld
[i
].when_needed
)
4453 case RELOAD_FOR_OPERAND_ADDRESS
:
4454 if (++first_op_addr_num
>= 0)
4456 first_op_addr_num
= i
;
4460 case RELOAD_FOR_INPUT_ADDRESS
:
4461 if (++first_inpaddr_num
[rld
[i
].opnum
] >= 0)
4463 first_inpaddr_num
[rld
[i
].opnum
] = i
;
4467 case RELOAD_FOR_OUTPUT_ADDRESS
:
4468 if (++first_outpaddr_num
[rld
[i
].opnum
] >= 0)
4470 first_outpaddr_num
[rld
[i
].opnum
] = i
;
4481 for (i
= 0; i
< n_reloads
; i
++)
4484 enum reload_type type
;
4486 switch (rld
[i
].when_needed
)
4488 case RELOAD_FOR_OPADDR_ADDR
:
4489 first_num
= first_op_addr_num
;
4490 type
= RELOAD_FOR_OPERAND_ADDRESS
;
4492 case RELOAD_FOR_INPADDR_ADDRESS
:
4493 first_num
= first_inpaddr_num
[rld
[i
].opnum
];
4494 type
= RELOAD_FOR_INPUT_ADDRESS
;
4496 case RELOAD_FOR_OUTADDR_ADDRESS
:
4497 first_num
= first_outpaddr_num
[rld
[i
].opnum
];
4498 type
= RELOAD_FOR_OUTPUT_ADDRESS
;
4505 else if (i
> first_num
)
4506 rld
[i
].when_needed
= type
;
4509 /* Check if the only TYPE reload that uses reload I is
4510 reload FIRST_NUM. */
4511 for (j
= n_reloads
- 1; j
> first_num
; j
--)
4513 if (rld
[j
].when_needed
== type
4514 && (rld
[i
].secondary_p
4515 ? rld
[j
].secondary_in_reload
== i
4516 : reg_mentioned_p (rld
[i
].in
, rld
[j
].in
)))
4518 rld
[i
].when_needed
= type
;
4527 /* See if we have any reloads that are now allowed to be merged
4528 because we've changed when the reload is needed to
4529 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4530 check for the most common cases. */
4532 for (i
= 0; i
< n_reloads
; i
++)
4533 if (rld
[i
].in
!= 0 && rld
[i
].out
== 0
4534 && (rld
[i
].when_needed
== RELOAD_FOR_OPERAND_ADDRESS
4535 || rld
[i
].when_needed
== RELOAD_FOR_OPADDR_ADDR
4536 || rld
[i
].when_needed
== RELOAD_FOR_OTHER_ADDRESS
))
4537 for (j
= 0; j
< n_reloads
; j
++)
4538 if (i
!= j
&& rld
[j
].in
!= 0 && rld
[j
].out
== 0
4539 && rld
[j
].when_needed
== rld
[i
].when_needed
4540 && MATCHES (rld
[i
].in
, rld
[j
].in
)
4541 && rld
[i
].rclass
== rld
[j
].rclass
4542 && !rld
[i
].nocombine
&& !rld
[j
].nocombine
4543 && rld
[i
].reg_rtx
== rld
[j
].reg_rtx
)
4545 rld
[i
].opnum
= MIN (rld
[i
].opnum
, rld
[j
].opnum
);
4546 transfer_replacements (i
, j
);
4550 /* If we made any reloads for addresses, see if they violate a
4551 "no input reloads" requirement for this insn. But loads that we
4552 do after the insn (such as for output addresses) are fine. */
4553 if (HAVE_cc0
&& no_input_reloads
)
4554 for (i
= 0; i
< n_reloads
; i
++)
4555 gcc_assert (rld
[i
].in
== 0
4556 || rld
[i
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
4557 || rld
[i
].when_needed
== RELOAD_FOR_OUTPUT_ADDRESS
);
4559 /* Compute reload_mode and reload_nregs. */
4560 for (i
= 0; i
< n_reloads
; i
++)
4563 = (rld
[i
].inmode
== VOIDmode
4564 || (GET_MODE_SIZE (rld
[i
].outmode
)
4565 > GET_MODE_SIZE (rld
[i
].inmode
)))
4566 ? rld
[i
].outmode
: rld
[i
].inmode
;
4568 rld
[i
].nregs
= ira_reg_class_max_nregs
[rld
[i
].rclass
][rld
[i
].mode
];
4571 /* Special case a simple move with an input reload and a
4572 destination of a hard reg, if the hard reg is ok, use it. */
4573 for (i
= 0; i
< n_reloads
; i
++)
4574 if (rld
[i
].when_needed
== RELOAD_FOR_INPUT
4575 && GET_CODE (PATTERN (insn
)) == SET
4576 && REG_P (SET_DEST (PATTERN (insn
)))
4577 && (SET_SRC (PATTERN (insn
)) == rld
[i
].in
4578 || SET_SRC (PATTERN (insn
)) == rld
[i
].in_reg
)
4579 && !elimination_target_reg_p (SET_DEST (PATTERN (insn
))))
4581 rtx dest
= SET_DEST (PATTERN (insn
));
4582 unsigned int regno
= REGNO (dest
);
4584 if (regno
< FIRST_PSEUDO_REGISTER
4585 && TEST_HARD_REG_BIT (reg_class_contents
[rld
[i
].rclass
], regno
)
4586 && HARD_REGNO_MODE_OK (regno
, rld
[i
].mode
))
4588 int nr
= hard_regno_nregs
[regno
][rld
[i
].mode
];
4591 for (nri
= 1; nri
< nr
; nri
++)
4592 if (! TEST_HARD_REG_BIT (reg_class_contents
[rld
[i
].rclass
], regno
+ nri
))
4599 rld
[i
].reg_rtx
= dest
;
4606 /* Return true if alternative number ALTNUM in constraint-string
4607 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4608 MEM gives the reference if it didn't need any reloads, otherwise it
4612 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED
,
4613 const char *constraint
, int altnum
)
4617 /* Skip alternatives before the one requested. */
4620 while (*constraint
++ != ',')
4624 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4625 If one of them is present, this alternative accepts the result of
4626 passing a constant-pool reference through find_reloads_toplev.
4628 The same is true of extra memory constraints if the address
4629 was reloaded into a register. However, the target may elect
4630 to disallow the original constant address, forcing it to be
4631 reloaded into a register instead. */
4632 for (; (c
= *constraint
) && c
!= ',' && c
!= '#';
4633 constraint
+= CONSTRAINT_LEN (c
, constraint
))
4635 enum constraint_num cn
= lookup_constraint (constraint
);
4636 if (insn_extra_memory_constraint (cn
)
4637 && (mem
== NULL
|| constraint_satisfied_p (mem
, cn
)))
4643 /* Scan X for memory references and scan the addresses for reloading.
4644 Also checks for references to "constant" regs that we want to eliminate
4645 and replaces them with the values they stand for.
4646 We may alter X destructively if it contains a reference to such.
4647 If X is just a constant reg, we return the equivalent value
4650 IND_LEVELS says how many levels of indirect addressing this machine
4653 OPNUM and TYPE identify the purpose of the reload.
4655 IS_SET_DEST is true if X is the destination of a SET, which is not
4656 appropriate to be replaced by a constant.
4658 INSN, if nonzero, is the insn in which we do the reload. It is used
4659 to determine if we may generate output reloads, and where to put USEs
4660 for pseudos that we have to replace with stack slots.
4662 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4663 result of find_reloads_address. */
4666 find_reloads_toplev (rtx x
, int opnum
, enum reload_type type
,
4667 int ind_levels
, int is_set_dest
, rtx_insn
*insn
,
4668 int *address_reloaded
)
4670 RTX_CODE code
= GET_CODE (x
);
4672 const char *fmt
= GET_RTX_FORMAT (code
);
4678 /* This code is duplicated for speed in find_reloads. */
4679 int regno
= REGNO (x
);
4680 if (reg_equiv_constant (regno
) != 0 && !is_set_dest
)
4681 x
= reg_equiv_constant (regno
);
4683 /* This creates (subreg (mem...)) which would cause an unnecessary
4684 reload of the mem. */
4685 else if (reg_equiv_mem (regno
) != 0)
4686 x
= reg_equiv_mem (regno
);
4688 else if (reg_equiv_memory_loc (regno
)
4689 && (reg_equiv_address (regno
) != 0 || num_not_at_initial_offset
))
4691 rtx mem
= make_memloc (x
, regno
);
4692 if (reg_equiv_address (regno
)
4693 || ! rtx_equal_p (mem
, reg_equiv_mem (regno
)))
4695 /* If this is not a toplevel operand, find_reloads doesn't see
4696 this substitution. We have to emit a USE of the pseudo so
4697 that delete_output_reload can see it. */
4698 if (replace_reloads
&& recog_data
.operand
[opnum
] != x
)
4699 /* We mark the USE with QImode so that we recognize it
4700 as one that can be safely deleted at the end of
4702 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode
, x
), insn
),
4705 i
= find_reloads_address (GET_MODE (x
), &x
, XEXP (x
, 0), &XEXP (x
, 0),
4706 opnum
, type
, ind_levels
, insn
);
4707 if (!rtx_equal_p (x
, mem
))
4708 push_reg_equiv_alt_mem (regno
, x
);
4709 if (address_reloaded
)
4710 *address_reloaded
= i
;
4719 i
= find_reloads_address (GET_MODE (x
), &tem
, XEXP (x
, 0), &XEXP (x
, 0),
4720 opnum
, type
, ind_levels
, insn
);
4721 if (address_reloaded
)
4722 *address_reloaded
= i
;
4727 if (code
== SUBREG
&& REG_P (SUBREG_REG (x
)))
4729 /* Check for SUBREG containing a REG that's equivalent to a
4730 constant. If the constant has a known value, truncate it
4731 right now. Similarly if we are extracting a single-word of a
4732 multi-word constant. If the constant is symbolic, allow it
4733 to be substituted normally. push_reload will strip the
4734 subreg later. The constant must not be VOIDmode, because we
4735 will lose the mode of the register (this should never happen
4736 because one of the cases above should handle it). */
4738 int regno
= REGNO (SUBREG_REG (x
));
4741 if (regno
>= FIRST_PSEUDO_REGISTER
4742 && reg_renumber
[regno
] < 0
4743 && reg_equiv_constant (regno
) != 0)
4746 simplify_gen_subreg (GET_MODE (x
), reg_equiv_constant (regno
),
4747 GET_MODE (SUBREG_REG (x
)), SUBREG_BYTE (x
));
4749 if (CONSTANT_P (tem
)
4750 && !targetm
.legitimate_constant_p (GET_MODE (x
), tem
))
4752 tem
= force_const_mem (GET_MODE (x
), tem
);
4753 i
= find_reloads_address (GET_MODE (tem
), &tem
, XEXP (tem
, 0),
4754 &XEXP (tem
, 0), opnum
, type
,
4756 if (address_reloaded
)
4757 *address_reloaded
= i
;
4762 /* If the subreg contains a reg that will be converted to a mem,
4763 attempt to convert the whole subreg to a (narrower or wider)
4764 memory reference instead. If this succeeds, we're done --
4765 otherwise fall through to check whether the inner reg still
4766 needs address reloads anyway. */
4768 if (regno
>= FIRST_PSEUDO_REGISTER
4769 && reg_equiv_memory_loc (regno
) != 0)
4771 tem
= find_reloads_subreg_address (x
, opnum
, type
, ind_levels
,
4772 insn
, address_reloaded
);
4778 for (copied
= 0, i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4782 rtx new_part
= find_reloads_toplev (XEXP (x
, i
), opnum
, type
,
4783 ind_levels
, is_set_dest
, insn
,
4785 /* If we have replaced a reg with it's equivalent memory loc -
4786 that can still be handled here e.g. if it's in a paradoxical
4787 subreg - we must make the change in a copy, rather than using
4788 a destructive change. This way, find_reloads can still elect
4789 not to do the change. */
4790 if (new_part
!= XEXP (x
, i
) && ! CONSTANT_P (new_part
) && ! copied
)
4792 x
= shallow_copy_rtx (x
);
4795 XEXP (x
, i
) = new_part
;
4801 /* Return a mem ref for the memory equivalent of reg REGNO.
4802 This mem ref is not shared with anything. */
4805 make_memloc (rtx ad
, int regno
)
4807 /* We must rerun eliminate_regs, in case the elimination
4808 offsets have changed. */
4810 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno
), VOIDmode
, NULL_RTX
),
4813 /* If TEM might contain a pseudo, we must copy it to avoid
4814 modifying it when we do the substitution for the reload. */
4815 if (rtx_varies_p (tem
, 0))
4816 tem
= copy_rtx (tem
);
4818 tem
= replace_equiv_address_nv (reg_equiv_memory_loc (regno
), tem
);
4819 tem
= adjust_address_nv (tem
, GET_MODE (ad
), 0);
4821 /* Copy the result if it's still the same as the equivalence, to avoid
4822 modifying it when we do the substitution for the reload. */
4823 if (tem
== reg_equiv_memory_loc (regno
))
4824 tem
= copy_rtx (tem
);
4828 /* Returns true if AD could be turned into a valid memory reference
4829 to mode MODE in address space AS by reloading the part pointed to
4830 by PART into a register. */
4833 maybe_memory_address_addr_space_p (machine_mode mode
, rtx ad
,
4834 addr_space_t as
, rtx
*part
)
4838 rtx reg
= gen_rtx_REG (GET_MODE (tem
), max_reg_num ());
4841 retv
= memory_address_addr_space_p (mode
, ad
, as
);
4847 /* Record all reloads needed for handling memory address AD
4848 which appears in *LOC in a memory reference to mode MODE
4849 which itself is found in location *MEMREFLOC.
4850 Note that we take shortcuts assuming that no multi-reg machine mode
4851 occurs as part of an address.
4853 OPNUM and TYPE specify the purpose of this reload.
4855 IND_LEVELS says how many levels of indirect addressing this machine
4858 INSN, if nonzero, is the insn in which we do the reload. It is used
4859 to determine if we may generate output reloads, and where to put USEs
4860 for pseudos that we have to replace with stack slots.
4862 Value is one if this address is reloaded or replaced as a whole; it is
4863 zero if the top level of this address was not reloaded or replaced, and
4864 it is -1 if it may or may not have been reloaded or replaced.
4866 Note that there is no verification that the address will be valid after
4867 this routine does its work. Instead, we rely on the fact that the address
4868 was valid when reload started. So we need only undo things that reload
4869 could have broken. These are wrong register types, pseudos not allocated
4870 to a hard register, and frame pointer elimination. */
4873 find_reloads_address (machine_mode mode
, rtx
*memrefloc
, rtx ad
,
4874 rtx
*loc
, int opnum
, enum reload_type type
,
4875 int ind_levels
, rtx_insn
*insn
)
4877 addr_space_t as
= memrefloc
? MEM_ADDR_SPACE (*memrefloc
)
4878 : ADDR_SPACE_GENERIC
;
4880 int removed_and
= 0;
4884 /* If the address is a register, see if it is a legitimate address and
4885 reload if not. We first handle the cases where we need not reload
4886 or where we must reload in a non-standard way. */
4892 if (reg_equiv_constant (regno
) != 0)
4894 find_reloads_address_part (reg_equiv_constant (regno
), loc
,
4895 base_reg_class (mode
, as
, MEM
, SCRATCH
),
4896 GET_MODE (ad
), opnum
, type
, ind_levels
);
4900 tem
= reg_equiv_memory_loc (regno
);
4903 if (reg_equiv_address (regno
) != 0 || num_not_at_initial_offset
)
4905 tem
= make_memloc (ad
, regno
);
4906 if (! strict_memory_address_addr_space_p (GET_MODE (tem
),
4908 MEM_ADDR_SPACE (tem
)))
4912 find_reloads_address (GET_MODE (tem
), &tem
, XEXP (tem
, 0),
4913 &XEXP (tem
, 0), opnum
,
4914 ADDR_TYPE (type
), ind_levels
, insn
);
4915 if (!rtx_equal_p (tem
, orig
))
4916 push_reg_equiv_alt_mem (regno
, tem
);
4918 /* We can avoid a reload if the register's equivalent memory
4919 expression is valid as an indirect memory address.
4920 But not all addresses are valid in a mem used as an indirect
4921 address: only reg or reg+constant. */
4924 && strict_memory_address_addr_space_p (mode
, tem
, as
)
4925 && (REG_P (XEXP (tem
, 0))
4926 || (GET_CODE (XEXP (tem
, 0)) == PLUS
4927 && REG_P (XEXP (XEXP (tem
, 0), 0))
4928 && CONSTANT_P (XEXP (XEXP (tem
, 0), 1)))))
4930 /* TEM is not the same as what we'll be replacing the
4931 pseudo with after reload, put a USE in front of INSN
4932 in the final reload pass. */
4934 && num_not_at_initial_offset
4935 && ! rtx_equal_p (tem
, reg_equiv_mem (regno
)))
4938 /* We mark the USE with QImode so that we
4939 recognize it as one that can be safely
4940 deleted at the end of reload. */
4941 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode
, ad
),
4944 /* This doesn't really count as replacing the address
4945 as a whole, since it is still a memory access. */
4953 /* The only remaining case where we can avoid a reload is if this is a
4954 hard register that is valid as a base register and which is not the
4955 subject of a CLOBBER in this insn. */
4957 else if (regno
< FIRST_PSEUDO_REGISTER
4958 && regno_ok_for_base_p (regno
, mode
, as
, MEM
, SCRATCH
)
4959 && ! regno_clobbered_p (regno
, this_insn
, mode
, 0))
4962 /* If we do not have one of the cases above, we must do the reload. */
4963 push_reload (ad
, NULL_RTX
, loc
, (rtx
*) 0,
4964 base_reg_class (mode
, as
, MEM
, SCRATCH
),
4965 GET_MODE (ad
), VOIDmode
, 0, 0, opnum
, type
);
4969 if (strict_memory_address_addr_space_p (mode
, ad
, as
))
4971 /* The address appears valid, so reloads are not needed.
4972 But the address may contain an eliminable register.
4973 This can happen because a machine with indirect addressing
4974 may consider a pseudo register by itself a valid address even when
4975 it has failed to get a hard reg.
4976 So do a tree-walk to find and eliminate all such regs. */
4978 /* But first quickly dispose of a common case. */
4979 if (GET_CODE (ad
) == PLUS
4980 && CONST_INT_P (XEXP (ad
, 1))
4981 && REG_P (XEXP (ad
, 0))
4982 && reg_equiv_constant (REGNO (XEXP (ad
, 0))) == 0)
4985 subst_reg_equivs_changed
= 0;
4986 *loc
= subst_reg_equivs (ad
, insn
);
4988 if (! subst_reg_equivs_changed
)
4991 /* Check result for validity after substitution. */
4992 if (strict_memory_address_addr_space_p (mode
, ad
, as
))
4996 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4999 if (memrefloc
&& ADDR_SPACE_GENERIC_P (as
))
5001 LEGITIMIZE_RELOAD_ADDRESS (ad
, GET_MODE (*memrefloc
), opnum
, type
,
5006 *memrefloc
= copy_rtx (*memrefloc
);
5007 XEXP (*memrefloc
, 0) = ad
;
5008 move_replacements (&ad
, &XEXP (*memrefloc
, 0));
5014 /* The address is not valid. We have to figure out why. First see if
5015 we have an outer AND and remove it if so. Then analyze what's inside. */
5017 if (GET_CODE (ad
) == AND
)
5020 loc
= &XEXP (ad
, 0);
5024 /* One possibility for why the address is invalid is that it is itself
5025 a MEM. This can happen when the frame pointer is being eliminated, a
5026 pseudo is not allocated to a hard register, and the offset between the
5027 frame and stack pointers is not its initial value. In that case the
5028 pseudo will have been replaced by a MEM referring to the
5032 /* First ensure that the address in this MEM is valid. Then, unless
5033 indirect addresses are valid, reload the MEM into a register. */
5035 find_reloads_address (GET_MODE (ad
), &tem
, XEXP (ad
, 0), &XEXP (ad
, 0),
5036 opnum
, ADDR_TYPE (type
),
5037 ind_levels
== 0 ? 0 : ind_levels
- 1, insn
);
5039 /* If tem was changed, then we must create a new memory reference to
5040 hold it and store it back into memrefloc. */
5041 if (tem
!= ad
&& memrefloc
)
5043 *memrefloc
= copy_rtx (*memrefloc
);
5044 copy_replacements (tem
, XEXP (*memrefloc
, 0));
5045 loc
= &XEXP (*memrefloc
, 0);
5047 loc
= &XEXP (*loc
, 0);
5050 /* Check similar cases as for indirect addresses as above except
5051 that we can allow pseudos and a MEM since they should have been
5052 taken care of above. */
5055 || (GET_CODE (XEXP (tem
, 0)) == SYMBOL_REF
&& ! indirect_symref_ok
)
5056 || MEM_P (XEXP (tem
, 0))
5057 || ! (REG_P (XEXP (tem
, 0))
5058 || (GET_CODE (XEXP (tem
, 0)) == PLUS
5059 && REG_P (XEXP (XEXP (tem
, 0), 0))
5060 && CONST_INT_P (XEXP (XEXP (tem
, 0), 1)))))
5062 /* Must use TEM here, not AD, since it is the one that will
5063 have any subexpressions reloaded, if needed. */
5064 push_reload (tem
, NULL_RTX
, loc
, (rtx
*) 0,
5065 base_reg_class (mode
, as
, MEM
, SCRATCH
), GET_MODE (tem
),
5068 return ! removed_and
;
5074 /* If we have address of a stack slot but it's not valid because the
5075 displacement is too large, compute the sum in a register.
5076 Handle all base registers here, not just fp/ap/sp, because on some
5077 targets (namely SH) we can also get too large displacements from
5078 big-endian corrections. */
5079 else if (GET_CODE (ad
) == PLUS
5080 && REG_P (XEXP (ad
, 0))
5081 && REGNO (XEXP (ad
, 0)) < FIRST_PSEUDO_REGISTER
5082 && CONST_INT_P (XEXP (ad
, 1))
5083 && (regno_ok_for_base_p (REGNO (XEXP (ad
, 0)), mode
, as
, PLUS
,
5085 /* Similarly, if we were to reload the base register and the
5086 mem+offset address is still invalid, then we want to reload
5087 the whole address, not just the base register. */
5088 || ! maybe_memory_address_addr_space_p
5089 (mode
, ad
, as
, &(XEXP (ad
, 0)))))
5092 /* Unshare the MEM rtx so we can safely alter it. */
5095 *memrefloc
= copy_rtx (*memrefloc
);
5096 loc
= &XEXP (*memrefloc
, 0);
5098 loc
= &XEXP (*loc
, 0);
5101 if (double_reg_address_ok
5102 && regno_ok_for_base_p (REGNO (XEXP (ad
, 0)), mode
, as
,
5105 /* Unshare the sum as well. */
5106 *loc
= ad
= copy_rtx (ad
);
5108 /* Reload the displacement into an index reg.
5109 We assume the frame pointer or arg pointer is a base reg. */
5110 find_reloads_address_part (XEXP (ad
, 1), &XEXP (ad
, 1),
5111 INDEX_REG_CLASS
, GET_MODE (ad
), opnum
,
5117 /* If the sum of two regs is not necessarily valid,
5118 reload the sum into a base reg.
5119 That will at least work. */
5120 find_reloads_address_part (ad
, loc
,
5121 base_reg_class (mode
, as
, MEM
, SCRATCH
),
5122 GET_MODE (ad
), opnum
, type
, ind_levels
);
5124 return ! removed_and
;
5127 /* If we have an indexed stack slot, there are three possible reasons why
5128 it might be invalid: The index might need to be reloaded, the address
5129 might have been made by frame pointer elimination and hence have a
5130 constant out of range, or both reasons might apply.
5132 We can easily check for an index needing reload, but even if that is the
5133 case, we might also have an invalid constant. To avoid making the
5134 conservative assumption and requiring two reloads, we see if this address
5135 is valid when not interpreted strictly. If it is, the only problem is
5136 that the index needs a reload and find_reloads_address_1 will take care
5139 Handle all base registers here, not just fp/ap/sp, because on some
5140 targets (namely SPARC) we can also get invalid addresses from preventive
5141 subreg big-endian corrections made by find_reloads_toplev. We
5142 can also get expressions involving LO_SUM (rather than PLUS) from
5143 find_reloads_subreg_address.
5145 If we decide to do something, it must be that `double_reg_address_ok'
5146 is true. We generate a reload of the base register + constant and
5147 rework the sum so that the reload register will be added to the index.
5148 This is safe because we know the address isn't shared.
5150 We check for the base register as both the first and second operand of
5151 the innermost PLUS and/or LO_SUM. */
5153 for (op_index
= 0; op_index
< 2; ++op_index
)
5155 rtx operand
, addend
;
5156 enum rtx_code inner_code
;
5158 if (GET_CODE (ad
) != PLUS
)
5161 inner_code
= GET_CODE (XEXP (ad
, 0));
5162 if (!(GET_CODE (ad
) == PLUS
5163 && CONST_INT_P (XEXP (ad
, 1))
5164 && (inner_code
== PLUS
|| inner_code
== LO_SUM
)))
5167 operand
= XEXP (XEXP (ad
, 0), op_index
);
5168 if (!REG_P (operand
) || REGNO (operand
) >= FIRST_PSEUDO_REGISTER
)
5171 addend
= XEXP (XEXP (ad
, 0), 1 - op_index
);
5173 if ((regno_ok_for_base_p (REGNO (operand
), mode
, as
, inner_code
,
5175 || operand
== frame_pointer_rtx
5176 || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
5177 && operand
== hard_frame_pointer_rtx
)
5178 || (FRAME_POINTER_REGNUM
!= ARG_POINTER_REGNUM
5179 && operand
== arg_pointer_rtx
)
5180 || operand
== stack_pointer_rtx
)
5181 && ! maybe_memory_address_addr_space_p
5182 (mode
, ad
, as
, &XEXP (XEXP (ad
, 0), 1 - op_index
)))
5187 offset_reg
= plus_constant (GET_MODE (ad
), operand
,
5188 INTVAL (XEXP (ad
, 1)));
5190 /* Form the adjusted address. */
5191 if (GET_CODE (XEXP (ad
, 0)) == PLUS
)
5192 ad
= gen_rtx_PLUS (GET_MODE (ad
),
5193 op_index
== 0 ? offset_reg
: addend
,
5194 op_index
== 0 ? addend
: offset_reg
);
5196 ad
= gen_rtx_LO_SUM (GET_MODE (ad
),
5197 op_index
== 0 ? offset_reg
: addend
,
5198 op_index
== 0 ? addend
: offset_reg
);
5201 cls
= base_reg_class (mode
, as
, MEM
, GET_CODE (addend
));
5202 find_reloads_address_part (XEXP (ad
, op_index
),
5203 &XEXP (ad
, op_index
), cls
,
5204 GET_MODE (ad
), opnum
, type
, ind_levels
);
5205 find_reloads_address_1 (mode
, as
,
5206 XEXP (ad
, 1 - op_index
), 1, GET_CODE (ad
),
5207 GET_CODE (XEXP (ad
, op_index
)),
5208 &XEXP (ad
, 1 - op_index
), opnum
,
5215 /* See if address becomes valid when an eliminable register
5216 in a sum is replaced. */
5219 if (GET_CODE (ad
) == PLUS
)
5220 tem
= subst_indexed_address (ad
);
5221 if (tem
!= ad
&& strict_memory_address_addr_space_p (mode
, tem
, as
))
5223 /* Ok, we win that way. Replace any additional eliminable
5226 subst_reg_equivs_changed
= 0;
5227 tem
= subst_reg_equivs (tem
, insn
);
5229 /* Make sure that didn't make the address invalid again. */
5231 if (! subst_reg_equivs_changed
5232 || strict_memory_address_addr_space_p (mode
, tem
, as
))
5239 /* If constants aren't valid addresses, reload the constant address
5241 if (CONSTANT_P (ad
) && ! strict_memory_address_addr_space_p (mode
, ad
, as
))
5243 machine_mode address_mode
= GET_MODE (ad
);
5244 if (address_mode
== VOIDmode
)
5245 address_mode
= targetm
.addr_space
.address_mode (as
);
5247 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5248 Unshare it so we can safely alter it. */
5249 if (memrefloc
&& GET_CODE (ad
) == SYMBOL_REF
5250 && CONSTANT_POOL_ADDRESS_P (ad
))
5252 *memrefloc
= copy_rtx (*memrefloc
);
5253 loc
= &XEXP (*memrefloc
, 0);
5255 loc
= &XEXP (*loc
, 0);
5258 find_reloads_address_part (ad
, loc
,
5259 base_reg_class (mode
, as
, MEM
, SCRATCH
),
5260 address_mode
, opnum
, type
, ind_levels
);
5261 return ! removed_and
;
5264 return find_reloads_address_1 (mode
, as
, ad
, 0, MEM
, SCRATCH
, loc
,
5265 opnum
, type
, ind_levels
, insn
);
5268 /* Find all pseudo regs appearing in AD
5269 that are eliminable in favor of equivalent values
5270 and do not have hard regs; replace them by their equivalents.
5271 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5272 front of it for pseudos that we have to replace with stack slots. */
5275 subst_reg_equivs (rtx ad
, rtx_insn
*insn
)
5277 RTX_CODE code
= GET_CODE (ad
);
5294 int regno
= REGNO (ad
);
5296 if (reg_equiv_constant (regno
) != 0)
5298 subst_reg_equivs_changed
= 1;
5299 return reg_equiv_constant (regno
);
5301 if (reg_equiv_memory_loc (regno
) && num_not_at_initial_offset
)
5303 rtx mem
= make_memloc (ad
, regno
);
5304 if (! rtx_equal_p (mem
, reg_equiv_mem (regno
)))
5306 subst_reg_equivs_changed
= 1;
5307 /* We mark the USE with QImode so that we recognize it
5308 as one that can be safely deleted at the end of
5310 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode
, ad
), insn
),
5319 /* Quickly dispose of a common case. */
5320 if (XEXP (ad
, 0) == frame_pointer_rtx
5321 && CONST_INT_P (XEXP (ad
, 1)))
5329 fmt
= GET_RTX_FORMAT (code
);
5330 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
5332 XEXP (ad
, i
) = subst_reg_equivs (XEXP (ad
, i
), insn
);
5336 /* Compute the sum of X and Y, making canonicalizations assumed in an
5337 address, namely: sum constant integers, surround the sum of two
5338 constants with a CONST, put the constant as the second operand, and
5339 group the constant on the outermost sum.
5341 This routine assumes both inputs are already in canonical form. */
5344 form_sum (machine_mode mode
, rtx x
, rtx y
)
5348 gcc_assert (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
);
5349 gcc_assert (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
);
5351 if (CONST_INT_P (x
))
5352 return plus_constant (mode
, y
, INTVAL (x
));
5353 else if (CONST_INT_P (y
))
5354 return plus_constant (mode
, x
, INTVAL (y
));
5355 else if (CONSTANT_P (x
))
5356 tem
= x
, x
= y
, y
= tem
;
5358 if (GET_CODE (x
) == PLUS
&& CONSTANT_P (XEXP (x
, 1)))
5359 return form_sum (mode
, XEXP (x
, 0), form_sum (mode
, XEXP (x
, 1), y
));
5361 /* Note that if the operands of Y are specified in the opposite
5362 order in the recursive calls below, infinite recursion will occur. */
5363 if (GET_CODE (y
) == PLUS
&& CONSTANT_P (XEXP (y
, 1)))
5364 return form_sum (mode
, form_sum (mode
, x
, XEXP (y
, 0)), XEXP (y
, 1));
5366 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5367 constant will have been placed second. */
5368 if (CONSTANT_P (x
) && CONSTANT_P (y
))
5370 if (GET_CODE (x
) == CONST
)
5372 if (GET_CODE (y
) == CONST
)
5375 return gen_rtx_CONST (VOIDmode
, gen_rtx_PLUS (mode
, x
, y
));
5378 return gen_rtx_PLUS (mode
, x
, y
);
5381 /* If ADDR is a sum containing a pseudo register that should be
5382 replaced with a constant (from reg_equiv_constant),
5383 return the result of doing so, and also apply the associative
5384 law so that the result is more likely to be a valid address.
5385 (But it is not guaranteed to be one.)
5387 Note that at most one register is replaced, even if more are
5388 replaceable. Also, we try to put the result into a canonical form
5389 so it is more likely to be a valid address.
5391 In all other cases, return ADDR. */
5394 subst_indexed_address (rtx addr
)
5396 rtx op0
= 0, op1
= 0, op2
= 0;
5400 if (GET_CODE (addr
) == PLUS
)
5402 /* Try to find a register to replace. */
5403 op0
= XEXP (addr
, 0), op1
= XEXP (addr
, 1), op2
= 0;
5405 && (regno
= REGNO (op0
)) >= FIRST_PSEUDO_REGISTER
5406 && reg_renumber
[regno
] < 0
5407 && reg_equiv_constant (regno
) != 0)
5408 op0
= reg_equiv_constant (regno
);
5409 else if (REG_P (op1
)
5410 && (regno
= REGNO (op1
)) >= FIRST_PSEUDO_REGISTER
5411 && reg_renumber
[regno
] < 0
5412 && reg_equiv_constant (regno
) != 0)
5413 op1
= reg_equiv_constant (regno
);
5414 else if (GET_CODE (op0
) == PLUS
5415 && (tem
= subst_indexed_address (op0
)) != op0
)
5417 else if (GET_CODE (op1
) == PLUS
5418 && (tem
= subst_indexed_address (op1
)) != op1
)
5423 /* Pick out up to three things to add. */
5424 if (GET_CODE (op1
) == PLUS
)
5425 op2
= XEXP (op1
, 1), op1
= XEXP (op1
, 0);
5426 else if (GET_CODE (op0
) == PLUS
)
5427 op2
= op1
, op1
= XEXP (op0
, 1), op0
= XEXP (op0
, 0);
5429 /* Compute the sum. */
5431 op1
= form_sum (GET_MODE (addr
), op1
, op2
);
5433 op0
= form_sum (GET_MODE (addr
), op0
, op1
);
5440 /* Update the REG_INC notes for an insn. It updates all REG_INC
5441 notes for the instruction which refer to REGNO the to refer
5442 to the reload number.
5444 INSN is the insn for which any REG_INC notes need updating.
5446 REGNO is the register number which has been reloaded.
5448 RELOADNUM is the reload number. */
5451 update_auto_inc_notes (rtx_insn
*insn ATTRIBUTE_UNUSED
, int regno ATTRIBUTE_UNUSED
,
5452 int reloadnum ATTRIBUTE_UNUSED
)
5457 for (rtx link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
5458 if (REG_NOTE_KIND (link
) == REG_INC
5459 && (int) REGNO (XEXP (link
, 0)) == regno
)
5460 push_replacement (&XEXP (link
, 0), reloadnum
, VOIDmode
);
5463 /* Record the pseudo registers we must reload into hard registers in a
5464 subexpression of a would-be memory address, X referring to a value
5465 in mode MODE. (This function is not called if the address we find
5468 CONTEXT = 1 means we are considering regs as index regs,
5469 = 0 means we are considering them as base regs.
5470 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5472 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5473 is the code of the index part of the address. Otherwise, pass SCRATCH
5475 OPNUM and TYPE specify the purpose of any reloads made.
5477 IND_LEVELS says how many levels of indirect addressing are
5478 supported at this point in the address.
5480 INSN, if nonzero, is the insn in which we do the reload. It is used
5481 to determine if we may generate output reloads.
5483 We return nonzero if X, as a whole, is reloaded or replaced. */
5485 /* Note that we take shortcuts assuming that no multi-reg machine mode
5486 occurs as part of an address.
5487 Also, this is not fully machine-customizable; it works for machines
5488 such as VAXen and 68000's and 32000's, but other possible machines
5489 could have addressing modes that this does not handle right.
5490 If you add push_reload calls here, you need to make sure gen_reload
5491 handles those cases gracefully. */
5494 find_reloads_address_1 (machine_mode mode
, addr_space_t as
,
5496 enum rtx_code outer_code
, enum rtx_code index_code
,
5497 rtx
*loc
, int opnum
, enum reload_type type
,
5498 int ind_levels
, rtx_insn
*insn
)
5500 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5502 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5503 : REGNO_OK_FOR_INDEX_P (REGNO))
5505 enum reg_class context_reg_class
;
5506 RTX_CODE code
= GET_CODE (x
);
5507 bool reloaded_inner_of_autoinc
= false;
5510 context_reg_class
= INDEX_REG_CLASS
;
5512 context_reg_class
= base_reg_class (mode
, as
, outer_code
, index_code
);
5518 rtx orig_op0
= XEXP (x
, 0);
5519 rtx orig_op1
= XEXP (x
, 1);
5520 RTX_CODE code0
= GET_CODE (orig_op0
);
5521 RTX_CODE code1
= GET_CODE (orig_op1
);
5525 if (GET_CODE (op0
) == SUBREG
)
5527 op0
= SUBREG_REG (op0
);
5528 code0
= GET_CODE (op0
);
5529 if (code0
== REG
&& REGNO (op0
) < FIRST_PSEUDO_REGISTER
)
5530 op0
= gen_rtx_REG (word_mode
,
5532 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0
)),
5533 GET_MODE (SUBREG_REG (orig_op0
)),
5534 SUBREG_BYTE (orig_op0
),
5535 GET_MODE (orig_op0
))));
5538 if (GET_CODE (op1
) == SUBREG
)
5540 op1
= SUBREG_REG (op1
);
5541 code1
= GET_CODE (op1
);
5542 if (code1
== REG
&& REGNO (op1
) < FIRST_PSEUDO_REGISTER
)
5543 /* ??? Why is this given op1's mode and above for
5544 ??? op0 SUBREGs we use word_mode? */
5545 op1
= gen_rtx_REG (GET_MODE (op1
),
5547 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1
)),
5548 GET_MODE (SUBREG_REG (orig_op1
)),
5549 SUBREG_BYTE (orig_op1
),
5550 GET_MODE (orig_op1
))));
5552 /* Plus in the index register may be created only as a result of
5553 register rematerialization for expression like &localvar*4. Reload it.
5554 It may be possible to combine the displacement on the outer level,
5555 but it is probably not worthwhile to do so. */
5558 find_reloads_address (GET_MODE (x
), loc
, XEXP (x
, 0), &XEXP (x
, 0),
5559 opnum
, ADDR_TYPE (type
), ind_levels
, insn
);
5560 push_reload (*loc
, NULL_RTX
, loc
, (rtx
*) 0,
5562 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
5566 if (code0
== MULT
|| code0
== SIGN_EXTEND
|| code0
== TRUNCATE
5567 || code0
== ZERO_EXTEND
|| code1
== MEM
)
5569 find_reloads_address_1 (mode
, as
, orig_op0
, 1, PLUS
, SCRATCH
,
5570 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5572 find_reloads_address_1 (mode
, as
, orig_op1
, 0, PLUS
, code0
,
5573 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5577 else if (code1
== MULT
|| code1
== SIGN_EXTEND
|| code1
== TRUNCATE
5578 || code1
== ZERO_EXTEND
|| code0
== MEM
)
5580 find_reloads_address_1 (mode
, as
, orig_op0
, 0, PLUS
, code1
,
5581 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5583 find_reloads_address_1 (mode
, as
, orig_op1
, 1, PLUS
, SCRATCH
,
5584 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5588 else if (code0
== CONST_INT
|| code0
== CONST
5589 || code0
== SYMBOL_REF
|| code0
== LABEL_REF
)
5590 find_reloads_address_1 (mode
, as
, orig_op1
, 0, PLUS
, code0
,
5591 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5594 else if (code1
== CONST_INT
|| code1
== CONST
5595 || code1
== SYMBOL_REF
|| code1
== LABEL_REF
)
5596 find_reloads_address_1 (mode
, as
, orig_op0
, 0, PLUS
, code1
,
5597 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5600 else if (code0
== REG
&& code1
== REG
)
5602 if (REGNO_OK_FOR_INDEX_P (REGNO (op1
))
5603 && regno_ok_for_base_p (REGNO (op0
), mode
, as
, PLUS
, REG
))
5605 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0
))
5606 && regno_ok_for_base_p (REGNO (op1
), mode
, as
, PLUS
, REG
))
5608 else if (regno_ok_for_base_p (REGNO (op0
), mode
, as
, PLUS
, REG
))
5609 find_reloads_address_1 (mode
, as
, orig_op1
, 1, PLUS
, SCRATCH
,
5610 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5612 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1
)))
5613 find_reloads_address_1 (mode
, as
, orig_op0
, 0, PLUS
, REG
,
5614 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5616 else if (regno_ok_for_base_p (REGNO (op1
), mode
, as
, PLUS
, REG
))
5617 find_reloads_address_1 (mode
, as
, orig_op0
, 1, PLUS
, SCRATCH
,
5618 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5620 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0
)))
5621 find_reloads_address_1 (mode
, as
, orig_op1
, 0, PLUS
, REG
,
5622 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5626 find_reloads_address_1 (mode
, as
, orig_op0
, 0, PLUS
, REG
,
5627 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5629 find_reloads_address_1 (mode
, as
, orig_op1
, 1, PLUS
, SCRATCH
,
5630 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5635 else if (code0
== REG
)
5637 find_reloads_address_1 (mode
, as
, orig_op0
, 1, PLUS
, SCRATCH
,
5638 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5640 find_reloads_address_1 (mode
, as
, orig_op1
, 0, PLUS
, REG
,
5641 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5645 else if (code1
== REG
)
5647 find_reloads_address_1 (mode
, as
, orig_op1
, 1, PLUS
, SCRATCH
,
5648 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5650 find_reloads_address_1 (mode
, as
, orig_op0
, 0, PLUS
, REG
,
5651 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5661 rtx op0
= XEXP (x
, 0);
5662 rtx op1
= XEXP (x
, 1);
5663 enum rtx_code index_code
;
5667 if (GET_CODE (op1
) != PLUS
&& GET_CODE (op1
) != MINUS
)
5670 /* Currently, we only support {PRE,POST}_MODIFY constructs
5671 where a base register is {inc,dec}remented by the contents
5672 of another register or by a constant value. Thus, these
5673 operands must match. */
5674 gcc_assert (op0
== XEXP (op1
, 0));
5676 /* Require index register (or constant). Let's just handle the
5677 register case in the meantime... If the target allows
5678 auto-modify by a constant then we could try replacing a pseudo
5679 register with its equivalent constant where applicable.
5681 We also handle the case where the register was eliminated
5682 resulting in a PLUS subexpression.
5684 If we later decide to reload the whole PRE_MODIFY or
5685 POST_MODIFY, inc_for_reload might clobber the reload register
5686 before reading the index. The index register might therefore
5687 need to live longer than a TYPE reload normally would, so be
5688 conservative and class it as RELOAD_OTHER. */
5689 if ((REG_P (XEXP (op1
, 1))
5690 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1
, 1))))
5691 || GET_CODE (XEXP (op1
, 1)) == PLUS
)
5692 find_reloads_address_1 (mode
, as
, XEXP (op1
, 1), 1, code
, SCRATCH
,
5693 &XEXP (op1
, 1), opnum
, RELOAD_OTHER
,
5696 gcc_assert (REG_P (XEXP (op1
, 0)));
5698 regno
= REGNO (XEXP (op1
, 0));
5699 index_code
= GET_CODE (XEXP (op1
, 1));
5701 /* A register that is incremented cannot be constant! */
5702 gcc_assert (regno
< FIRST_PSEUDO_REGISTER
5703 || reg_equiv_constant (regno
) == 0);
5705 /* Handle a register that is equivalent to a memory location
5706 which cannot be addressed directly. */
5707 if (reg_equiv_memory_loc (regno
) != 0
5708 && (reg_equiv_address (regno
) != 0
5709 || num_not_at_initial_offset
))
5711 rtx tem
= make_memloc (XEXP (x
, 0), regno
);
5713 if (reg_equiv_address (regno
)
5714 || ! rtx_equal_p (tem
, reg_equiv_mem (regno
)))
5718 /* First reload the memory location's address.
5719 We can't use ADDR_TYPE (type) here, because we need to
5720 write back the value after reading it, hence we actually
5721 need two registers. */
5722 find_reloads_address (GET_MODE (tem
), &tem
, XEXP (tem
, 0),
5723 &XEXP (tem
, 0), opnum
,
5727 if (!rtx_equal_p (tem
, orig
))
5728 push_reg_equiv_alt_mem (regno
, tem
);
5730 /* Then reload the memory location into a base
5732 reloadnum
= push_reload (tem
, tem
, &XEXP (x
, 0),
5734 base_reg_class (mode
, as
,
5736 GET_MODE (x
), GET_MODE (x
), 0,
5737 0, opnum
, RELOAD_OTHER
);
5739 update_auto_inc_notes (this_insn
, regno
, reloadnum
);
5744 if (reg_renumber
[regno
] >= 0)
5745 regno
= reg_renumber
[regno
];
5747 /* We require a base register here... */
5748 if (!regno_ok_for_base_p (regno
, GET_MODE (x
), as
, code
, index_code
))
5750 reloadnum
= push_reload (XEXP (op1
, 0), XEXP (x
, 0),
5751 &XEXP (op1
, 0), &XEXP (x
, 0),
5752 base_reg_class (mode
, as
,
5754 GET_MODE (x
), GET_MODE (x
), 0, 0,
5755 opnum
, RELOAD_OTHER
);
5757 update_auto_inc_notes (this_insn
, regno
, reloadnum
);
5767 if (REG_P (XEXP (x
, 0)))
5769 int regno
= REGNO (XEXP (x
, 0));
5773 /* A register that is incremented cannot be constant! */
5774 gcc_assert (regno
< FIRST_PSEUDO_REGISTER
5775 || reg_equiv_constant (regno
) == 0);
5777 /* Handle a register that is equivalent to a memory location
5778 which cannot be addressed directly. */
5779 if (reg_equiv_memory_loc (regno
) != 0
5780 && (reg_equiv_address (regno
) != 0 || num_not_at_initial_offset
))
5782 rtx tem
= make_memloc (XEXP (x
, 0), regno
);
5783 if (reg_equiv_address (regno
)
5784 || ! rtx_equal_p (tem
, reg_equiv_mem (regno
)))
5788 /* First reload the memory location's address.
5789 We can't use ADDR_TYPE (type) here, because we need to
5790 write back the value after reading it, hence we actually
5791 need two registers. */
5792 find_reloads_address (GET_MODE (tem
), &tem
, XEXP (tem
, 0),
5793 &XEXP (tem
, 0), opnum
, type
,
5795 reloaded_inner_of_autoinc
= true;
5796 if (!rtx_equal_p (tem
, orig
))
5797 push_reg_equiv_alt_mem (regno
, tem
);
5798 /* Put this inside a new increment-expression. */
5799 x
= gen_rtx_fmt_e (GET_CODE (x
), GET_MODE (x
), tem
);
5800 /* Proceed to reload that, as if it contained a register. */
5804 /* If we have a hard register that is ok in this incdec context,
5805 don't make a reload. If the register isn't nice enough for
5806 autoincdec, we can reload it. But, if an autoincrement of a
5807 register that we here verified as playing nice, still outside
5808 isn't "valid", it must be that no autoincrement is "valid".
5809 If that is true and something made an autoincrement anyway,
5810 this must be a special context where one is allowed.
5811 (For example, a "push" instruction.)
5812 We can't improve this address, so leave it alone. */
5814 /* Otherwise, reload the autoincrement into a suitable hard reg
5815 and record how much to increment by. */
5817 if (reg_renumber
[regno
] >= 0)
5818 regno
= reg_renumber
[regno
];
5819 if (regno
>= FIRST_PSEUDO_REGISTER
5820 || !REG_OK_FOR_CONTEXT (context
, regno
, mode
, as
, code
,
5825 /* If we can output the register afterwards, do so, this
5826 saves the extra update.
5827 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5828 CALL_INSN - and it does not set CC0.
5829 But don't do this if we cannot directly address the
5830 memory location, since this will make it harder to
5831 reuse address reloads, and increases register pressure.
5832 Also don't do this if we can probably update x directly. */
5833 rtx equiv
= (MEM_P (XEXP (x
, 0))
5835 : reg_equiv_mem (regno
));
5836 enum insn_code icode
= optab_handler (add_optab
, GET_MODE (x
));
5837 if (insn
&& NONJUMP_INSN_P (insn
)
5839 && ! sets_cc0_p (PATTERN (insn
))
5841 && (regno
< FIRST_PSEUDO_REGISTER
5843 && memory_operand (equiv
, GET_MODE (equiv
))
5844 && ! (icode
!= CODE_FOR_nothing
5845 && insn_operand_matches (icode
, 0, equiv
)
5846 && insn_operand_matches (icode
, 1, equiv
))))
5847 /* Using RELOAD_OTHER means we emit this and the reload we
5848 made earlier in the wrong order. */
5849 && !reloaded_inner_of_autoinc
)
5851 /* We use the original pseudo for loc, so that
5852 emit_reload_insns() knows which pseudo this
5853 reload refers to and updates the pseudo rtx, not
5854 its equivalent memory location, as well as the
5855 corresponding entry in reg_last_reload_reg. */
5856 loc
= &XEXP (x_orig
, 0);
5859 = push_reload (x
, x
, loc
, loc
,
5861 GET_MODE (x
), GET_MODE (x
), 0, 0,
5862 opnum
, RELOAD_OTHER
);
5867 = push_reload (x
, x
, loc
, (rtx
*) 0,
5869 GET_MODE (x
), GET_MODE (x
), 0, 0,
5872 = find_inc_amount (PATTERN (this_insn
), XEXP (x_orig
, 0));
5877 update_auto_inc_notes (this_insn
, REGNO (XEXP (x_orig
, 0)),
5887 /* Look for parts to reload in the inner expression and reload them
5888 too, in addition to this operation. Reloading all inner parts in
5889 addition to this one shouldn't be necessary, but at this point,
5890 we don't know if we can possibly omit any part that *can* be
5891 reloaded. Targets that are better off reloading just either part
5892 (or perhaps even a different part of an outer expression), should
5893 define LEGITIMIZE_RELOAD_ADDRESS. */
5894 find_reloads_address_1 (GET_MODE (XEXP (x
, 0)), as
, XEXP (x
, 0),
5895 context
, code
, SCRATCH
, &XEXP (x
, 0), opnum
,
5896 type
, ind_levels
, insn
);
5897 push_reload (x
, NULL_RTX
, loc
, (rtx
*) 0,
5899 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
5903 /* This is probably the result of a substitution, by eliminate_regs, of
5904 an equivalent address for a pseudo that was not allocated to a hard
5905 register. Verify that the specified address is valid and reload it
5908 Since we know we are going to reload this item, don't decrement for
5909 the indirection level.
5911 Note that this is actually conservative: it would be slightly more
5912 efficient to use the value of SPILL_INDIRECT_LEVELS from
5915 find_reloads_address (GET_MODE (x
), loc
, XEXP (x
, 0), &XEXP (x
, 0),
5916 opnum
, ADDR_TYPE (type
), ind_levels
, insn
);
5917 push_reload (*loc
, NULL_RTX
, loc
, (rtx
*) 0,
5919 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
5924 int regno
= REGNO (x
);
5926 if (reg_equiv_constant (regno
) != 0)
5928 find_reloads_address_part (reg_equiv_constant (regno
), loc
,
5930 GET_MODE (x
), opnum
, type
, ind_levels
);
5934 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5935 that feeds this insn. */
5936 if (reg_equiv_mem (regno
) != 0)
5938 push_reload (reg_equiv_mem (regno
), NULL_RTX
, loc
, (rtx
*) 0,
5940 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
5945 if (reg_equiv_memory_loc (regno
)
5946 && (reg_equiv_address (regno
) != 0 || num_not_at_initial_offset
))
5948 rtx tem
= make_memloc (x
, regno
);
5949 if (reg_equiv_address (regno
) != 0
5950 || ! rtx_equal_p (tem
, reg_equiv_mem (regno
)))
5953 find_reloads_address (GET_MODE (x
), &x
, XEXP (x
, 0),
5954 &XEXP (x
, 0), opnum
, ADDR_TYPE (type
),
5956 if (!rtx_equal_p (x
, tem
))
5957 push_reg_equiv_alt_mem (regno
, x
);
5961 if (reg_renumber
[regno
] >= 0)
5962 regno
= reg_renumber
[regno
];
5964 if (regno
>= FIRST_PSEUDO_REGISTER
5965 || !REG_OK_FOR_CONTEXT (context
, regno
, mode
, as
, outer_code
,
5968 push_reload (x
, NULL_RTX
, loc
, (rtx
*) 0,
5970 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
5974 /* If a register appearing in an address is the subject of a CLOBBER
5975 in this insn, reload it into some other register to be safe.
5976 The CLOBBER is supposed to make the register unavailable
5977 from before this insn to after it. */
5978 if (regno_clobbered_p (regno
, this_insn
, GET_MODE (x
), 0))
5980 push_reload (x
, NULL_RTX
, loc
, (rtx
*) 0,
5982 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
5989 if (REG_P (SUBREG_REG (x
)))
5991 /* If this is a SUBREG of a hard register and the resulting register
5992 is of the wrong class, reload the whole SUBREG. This avoids
5993 needless copies if SUBREG_REG is multi-word. */
5994 if (REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
)
5996 int regno ATTRIBUTE_UNUSED
= subreg_regno (x
);
5998 if (!REG_OK_FOR_CONTEXT (context
, regno
, mode
, as
, outer_code
,
6001 push_reload (x
, NULL_RTX
, loc
, (rtx
*) 0,
6003 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
6007 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6008 is larger than the class size, then reload the whole SUBREG. */
6011 enum reg_class rclass
= context_reg_class
;
6012 if (ira_reg_class_max_nregs
[rclass
][GET_MODE (SUBREG_REG (x
))]
6013 > reg_class_size
[(int) rclass
])
6015 /* If the inner register will be replaced by a memory
6016 reference, we can do this only if we can replace the
6017 whole subreg by a (narrower) memory reference. If
6018 this is not possible, fall through and reload just
6019 the inner register (including address reloads). */
6020 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x
))) != 0)
6022 rtx tem
= find_reloads_subreg_address (x
, opnum
,
6028 push_reload (tem
, NULL_RTX
, loc
, (rtx
*) 0, rclass
,
6029 GET_MODE (tem
), VOIDmode
, 0, 0,
6036 push_reload (x
, NULL_RTX
, loc
, (rtx
*) 0, rclass
,
6037 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
6050 const char *fmt
= GET_RTX_FORMAT (code
);
6053 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6056 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6058 find_reloads_address_1 (mode
, as
, XEXP (x
, i
), context
,
6059 code
, SCRATCH
, &XEXP (x
, i
),
6060 opnum
, type
, ind_levels
, insn
);
6064 #undef REG_OK_FOR_CONTEXT
6068 /* X, which is found at *LOC, is a part of an address that needs to be
6069 reloaded into a register of class RCLASS. If X is a constant, or if
6070 X is a PLUS that contains a constant, check that the constant is a
6071 legitimate operand and that we are supposed to be able to load
6072 it into the register.
6074 If not, force the constant into memory and reload the MEM instead.
6076 MODE is the mode to use, in case X is an integer constant.
6078 OPNUM and TYPE describe the purpose of any reloads made.
6080 IND_LEVELS says how many levels of indirect addressing this machine
6084 find_reloads_address_part (rtx x
, rtx
*loc
, enum reg_class rclass
,
6085 machine_mode mode
, int opnum
,
6086 enum reload_type type
, int ind_levels
)
6089 && (!targetm
.legitimate_constant_p (mode
, x
)
6090 || targetm
.preferred_reload_class (x
, rclass
) == NO_REGS
))
6092 x
= force_const_mem (mode
, x
);
6093 find_reloads_address (mode
, &x
, XEXP (x
, 0), &XEXP (x
, 0),
6094 opnum
, type
, ind_levels
, 0);
6097 else if (GET_CODE (x
) == PLUS
6098 && CONSTANT_P (XEXP (x
, 1))
6099 && (!targetm
.legitimate_constant_p (GET_MODE (x
), XEXP (x
, 1))
6100 || targetm
.preferred_reload_class (XEXP (x
, 1), rclass
)
6105 tem
= force_const_mem (GET_MODE (x
), XEXP (x
, 1));
6106 x
= gen_rtx_PLUS (GET_MODE (x
), XEXP (x
, 0), tem
);
6107 find_reloads_address (mode
, &XEXP (x
, 1), XEXP (tem
, 0), &XEXP (tem
, 0),
6108 opnum
, type
, ind_levels
, 0);
6111 push_reload (x
, NULL_RTX
, loc
, (rtx
*) 0, rclass
,
6112 mode
, VOIDmode
, 0, 0, opnum
, type
);
6115 /* X, a subreg of a pseudo, is a part of an address that needs to be
6116 reloaded, and the pseusdo is equivalent to a memory location.
6118 Attempt to replace the whole subreg by a (possibly narrower or wider)
6119 memory reference. If this is possible, return this new memory
6120 reference, and push all required address reloads. Otherwise,
6123 OPNUM and TYPE identify the purpose of the reload.
6125 IND_LEVELS says how many levels of indirect addressing are
6126 supported at this point in the address.
6128 INSN, if nonzero, is the insn in which we do the reload. It is used
6129 to determine where to put USEs for pseudos that we have to replace with
6133 find_reloads_subreg_address (rtx x
, int opnum
, enum reload_type type
,
6134 int ind_levels
, rtx_insn
*insn
,
6135 int *address_reloaded
)
6137 machine_mode outer_mode
= GET_MODE (x
);
6138 machine_mode inner_mode
= GET_MODE (SUBREG_REG (x
));
6139 int regno
= REGNO (SUBREG_REG (x
));
6144 gcc_assert (reg_equiv_memory_loc (regno
) != 0);
6146 /* We cannot replace the subreg with a modified memory reference if:
6148 - we have a paradoxical subreg that implicitly acts as a zero or
6149 sign extension operation due to LOAD_EXTEND_OP;
6151 - we have a subreg that is implicitly supposed to act on the full
6152 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6154 - the address of the equivalent memory location is mode-dependent; or
6156 - we have a paradoxical subreg and the resulting memory is not
6157 sufficiently aligned to allow access in the wider mode.
6159 In addition, we choose not to perform the replacement for *any*
6160 paradoxical subreg, even if it were possible in principle. This
6161 is to avoid generating wider memory references than necessary.
6163 This corresponds to how previous versions of reload used to handle
6164 paradoxical subregs where no address reload was required. */
6166 if (paradoxical_subreg_p (x
))
6169 if (WORD_REGISTER_OPERATIONS
6170 && GET_MODE_SIZE (outer_mode
) < GET_MODE_SIZE (inner_mode
)
6171 && ((GET_MODE_SIZE (outer_mode
) - 1) / UNITS_PER_WORD
6172 == (GET_MODE_SIZE (inner_mode
) - 1) / UNITS_PER_WORD
))
6175 /* Since we don't attempt to handle paradoxical subregs, we can just
6176 call into simplify_subreg, which will handle all remaining checks
6178 orig
= make_memloc (SUBREG_REG (x
), regno
);
6179 offset
= SUBREG_BYTE (x
);
6180 tem
= simplify_subreg (outer_mode
, orig
, inner_mode
, offset
);
6181 if (!tem
|| !MEM_P (tem
))
6184 /* Now push all required address reloads, if any. */
6185 reloaded
= find_reloads_address (GET_MODE (tem
), &tem
,
6186 XEXP (tem
, 0), &XEXP (tem
, 0),
6187 opnum
, type
, ind_levels
, insn
);
6188 /* ??? Do we need to handle nonzero offsets somehow? */
6189 if (!offset
&& !rtx_equal_p (tem
, orig
))
6190 push_reg_equiv_alt_mem (regno
, tem
);
6192 /* For some processors an address may be valid in the original mode but
6193 not in a smaller mode. For example, ARM accepts a scaled index register
6194 in SImode but not in HImode. Note that this is only a problem if the
6195 address in reg_equiv_mem is already invalid in the new mode; other
6196 cases would be fixed by find_reloads_address as usual.
6198 ??? We attempt to handle such cases here by doing an additional reload
6199 of the full address after the usual processing by find_reloads_address.
6200 Note that this may not work in the general case, but it seems to cover
6201 the cases where this situation currently occurs. A more general fix
6202 might be to reload the *value* instead of the address, but this would
6203 not be expected by the callers of this routine as-is.
6205 If find_reloads_address already completed replaced the address, there
6206 is nothing further to do. */
6208 && reg_equiv_mem (regno
) != 0
6209 && !strict_memory_address_addr_space_p
6210 (GET_MODE (x
), XEXP (reg_equiv_mem (regno
), 0),
6211 MEM_ADDR_SPACE (reg_equiv_mem (regno
))))
6213 push_reload (XEXP (tem
, 0), NULL_RTX
, &XEXP (tem
, 0), (rtx
*) 0,
6214 base_reg_class (GET_MODE (tem
), MEM_ADDR_SPACE (tem
),
6216 GET_MODE (XEXP (tem
, 0)), VOIDmode
, 0, 0, opnum
, type
);
6220 /* If this is not a toplevel operand, find_reloads doesn't see this
6221 substitution. We have to emit a USE of the pseudo so that
6222 delete_output_reload can see it. */
6223 if (replace_reloads
&& recog_data
.operand
[opnum
] != x
)
6224 /* We mark the USE with QImode so that we recognize it as one that
6225 can be safely deleted at the end of reload. */
6226 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode
, SUBREG_REG (x
)), insn
),
6229 if (address_reloaded
)
6230 *address_reloaded
= reloaded
;
6235 /* Substitute into the current INSN the registers into which we have reloaded
6236 the things that need reloading. The array `replacements'
6237 contains the locations of all pointers that must be changed
6238 and says what to replace them with.
6240 Return the rtx that X translates into; usually X, but modified. */
6243 subst_reloads (rtx_insn
*insn
)
6247 for (i
= 0; i
< n_replacements
; i
++)
6249 struct replacement
*r
= &replacements
[i
];
6250 rtx reloadreg
= rld
[r
->what
].reg_rtx
;
6254 /* This checking takes a very long time on some platforms
6255 causing the gcc.c-torture/compile/limits-fnargs.c test
6256 to time out during testing. See PR 31850.
6258 Internal consistency test. Check that we don't modify
6259 anything in the equivalence arrays. Whenever something from
6260 those arrays needs to be reloaded, it must be unshared before
6261 being substituted into; the equivalence must not be modified.
6262 Otherwise, if the equivalence is used after that, it will
6263 have been modified, and the thing substituted (probably a
6264 register) is likely overwritten and not a usable equivalence. */
6267 for (check_regno
= 0; check_regno
< max_regno
; check_regno
++)
6269 #define CHECK_MODF(ARRAY) \
6270 gcc_assert (!(*reg_equivs)[check_regno].ARRAY \
6271 || !loc_mentioned_in_p (r->where, \
6272 (*reg_equivs)[check_regno].ARRAY))
6274 CHECK_MODF (constant
);
6275 CHECK_MODF (memory_loc
);
6276 CHECK_MODF (address
);
6280 #endif /* DEBUG_RELOAD */
6282 /* If we're replacing a LABEL_REF with a register, there must
6283 already be an indication (to e.g. flow) which label this
6284 register refers to. */
6285 gcc_assert (GET_CODE (*r
->where
) != LABEL_REF
6287 || find_reg_note (insn
,
6289 XEXP (*r
->where
, 0))
6290 || label_is_jump_target_p (XEXP (*r
->where
, 0), insn
));
6292 /* Encapsulate RELOADREG so its machine mode matches what
6293 used to be there. Note that gen_lowpart_common will
6294 do the wrong thing if RELOADREG is multi-word. RELOADREG
6295 will always be a REG here. */
6296 if (GET_MODE (reloadreg
) != r
->mode
&& r
->mode
!= VOIDmode
)
6297 reloadreg
= reload_adjust_reg_for_mode (reloadreg
, r
->mode
);
6299 *r
->where
= reloadreg
;
6301 /* If reload got no reg and isn't optional, something's wrong. */
6303 gcc_assert (rld
[r
->what
].optional
);
6307 /* Make a copy of any replacements being done into X and move those
6308 copies to locations in Y, a copy of X. */
6311 copy_replacements (rtx x
, rtx y
)
6313 copy_replacements_1 (&x
, &y
, n_replacements
);
6317 copy_replacements_1 (rtx
*px
, rtx
*py
, int orig_replacements
)
6321 struct replacement
*r
;
6325 for (j
= 0; j
< orig_replacements
; j
++)
6326 if (replacements
[j
].where
== px
)
6328 r
= &replacements
[n_replacements
++];
6330 r
->what
= replacements
[j
].what
;
6331 r
->mode
= replacements
[j
].mode
;
6336 code
= GET_CODE (x
);
6337 fmt
= GET_RTX_FORMAT (code
);
6339 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6342 copy_replacements_1 (&XEXP (x
, i
), &XEXP (y
, i
), orig_replacements
);
6343 else if (fmt
[i
] == 'E')
6344 for (j
= XVECLEN (x
, i
); --j
>= 0; )
6345 copy_replacements_1 (&XVECEXP (x
, i
, j
), &XVECEXP (y
, i
, j
),
6350 /* Change any replacements being done to *X to be done to *Y. */
6353 move_replacements (rtx
*x
, rtx
*y
)
6357 for (i
= 0; i
< n_replacements
; i
++)
6358 if (replacements
[i
].where
== x
)
6359 replacements
[i
].where
= y
;
6362 /* If LOC was scheduled to be replaced by something, return the replacement.
6363 Otherwise, return *LOC. */
6366 find_replacement (rtx
*loc
)
6368 struct replacement
*r
;
6370 for (r
= &replacements
[0]; r
< &replacements
[n_replacements
]; r
++)
6372 rtx reloadreg
= rld
[r
->what
].reg_rtx
;
6374 if (reloadreg
&& r
->where
== loc
)
6376 if (r
->mode
!= VOIDmode
&& GET_MODE (reloadreg
) != r
->mode
)
6377 reloadreg
= reload_adjust_reg_for_mode (reloadreg
, r
->mode
);
6381 else if (reloadreg
&& GET_CODE (*loc
) == SUBREG
6382 && r
->where
== &SUBREG_REG (*loc
))
6384 if (r
->mode
!= VOIDmode
&& GET_MODE (reloadreg
) != r
->mode
)
6385 reloadreg
= reload_adjust_reg_for_mode (reloadreg
, r
->mode
);
6387 return simplify_gen_subreg (GET_MODE (*loc
), reloadreg
,
6388 GET_MODE (SUBREG_REG (*loc
)),
6389 SUBREG_BYTE (*loc
));
6393 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6394 what's inside and make a new rtl if so. */
6395 if (GET_CODE (*loc
) == PLUS
|| GET_CODE (*loc
) == MINUS
6396 || GET_CODE (*loc
) == MULT
)
6398 rtx x
= find_replacement (&XEXP (*loc
, 0));
6399 rtx y
= find_replacement (&XEXP (*loc
, 1));
6401 if (x
!= XEXP (*loc
, 0) || y
!= XEXP (*loc
, 1))
6402 return gen_rtx_fmt_ee (GET_CODE (*loc
), GET_MODE (*loc
), x
, y
);
6408 /* Return nonzero if register in range [REGNO, ENDREGNO)
6409 appears either explicitly or implicitly in X
6410 other than being stored into (except for earlyclobber operands).
6412 References contained within the substructure at LOC do not count.
6413 LOC may be zero, meaning don't ignore anything.
6415 This is similar to refers_to_regno_p in rtlanal.c except that we
6416 look at equivalences for pseudos that didn't get hard registers. */
6419 refers_to_regno_for_reload_p (unsigned int regno
, unsigned int endregno
,
6431 code
= GET_CODE (x
);
6438 /* If this is a pseudo, a hard register must not have been allocated.
6439 X must therefore either be a constant or be in memory. */
6440 if (r
>= FIRST_PSEUDO_REGISTER
)
6442 if (reg_equiv_memory_loc (r
))
6443 return refers_to_regno_for_reload_p (regno
, endregno
,
6444 reg_equiv_memory_loc (r
),
6447 gcc_assert (reg_equiv_constant (r
) || reg_equiv_invariant (r
));
6451 return (endregno
> r
6452 && regno
< r
+ (r
< FIRST_PSEUDO_REGISTER
6453 ? hard_regno_nregs
[r
][GET_MODE (x
)]
6457 /* If this is a SUBREG of a hard reg, we can see exactly which
6458 registers are being modified. Otherwise, handle normally. */
6459 if (REG_P (SUBREG_REG (x
))
6460 && REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
)
6462 unsigned int inner_regno
= subreg_regno (x
);
6463 unsigned int inner_endregno
6464 = inner_regno
+ (inner_regno
< FIRST_PSEUDO_REGISTER
6465 ? subreg_nregs (x
) : 1);
6467 return endregno
> inner_regno
&& regno
< inner_endregno
;
6473 if (&SET_DEST (x
) != loc
6474 /* Note setting a SUBREG counts as referring to the REG it is in for
6475 a pseudo but not for hard registers since we can
6476 treat each word individually. */
6477 && ((GET_CODE (SET_DEST (x
)) == SUBREG
6478 && loc
!= &SUBREG_REG (SET_DEST (x
))
6479 && REG_P (SUBREG_REG (SET_DEST (x
)))
6480 && REGNO (SUBREG_REG (SET_DEST (x
))) >= FIRST_PSEUDO_REGISTER
6481 && refers_to_regno_for_reload_p (regno
, endregno
,
6482 SUBREG_REG (SET_DEST (x
)),
6484 /* If the output is an earlyclobber operand, this is
6486 || ((!REG_P (SET_DEST (x
))
6487 || earlyclobber_operand_p (SET_DEST (x
)))
6488 && refers_to_regno_for_reload_p (regno
, endregno
,
6489 SET_DEST (x
), loc
))))
6492 if (code
== CLOBBER
|| loc
== &SET_SRC (x
))
6501 /* X does not match, so try its subexpressions. */
6503 fmt
= GET_RTX_FORMAT (code
);
6504 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6506 if (fmt
[i
] == 'e' && loc
!= &XEXP (x
, i
))
6514 if (refers_to_regno_for_reload_p (regno
, endregno
,
6518 else if (fmt
[i
] == 'E')
6521 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
6522 if (loc
!= &XVECEXP (x
, i
, j
)
6523 && refers_to_regno_for_reload_p (regno
, endregno
,
6524 XVECEXP (x
, i
, j
), loc
))
6531 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6532 we check if any register number in X conflicts with the relevant register
6533 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6534 contains a MEM (we don't bother checking for memory addresses that can't
6535 conflict because we expect this to be a rare case.
6537 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6538 that we look at equivalences for pseudos that didn't get hard registers. */
6541 reg_overlap_mentioned_for_reload_p (rtx x
, rtx in
)
6543 int regno
, endregno
;
6545 /* Overly conservative. */
6546 if (GET_CODE (x
) == STRICT_LOW_PART
6547 || GET_RTX_CLASS (GET_CODE (x
)) == RTX_AUTOINC
)
6550 /* If either argument is a constant, then modifying X can not affect IN. */
6551 if (CONSTANT_P (x
) || CONSTANT_P (in
))
6553 else if (GET_CODE (x
) == SUBREG
&& MEM_P (SUBREG_REG (x
)))
6554 return refers_to_mem_for_reload_p (in
);
6555 else if (GET_CODE (x
) == SUBREG
)
6557 regno
= REGNO (SUBREG_REG (x
));
6558 if (regno
< FIRST_PSEUDO_REGISTER
)
6559 regno
+= subreg_regno_offset (REGNO (SUBREG_REG (x
)),
6560 GET_MODE (SUBREG_REG (x
)),
6563 endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
6564 ? subreg_nregs (x
) : 1);
6566 return refers_to_regno_for_reload_p (regno
, endregno
, in
, (rtx
*) 0);
6572 /* If this is a pseudo, it must not have been assigned a hard register.
6573 Therefore, it must either be in memory or be a constant. */
6575 if (regno
>= FIRST_PSEUDO_REGISTER
)
6577 if (reg_equiv_memory_loc (regno
))
6578 return refers_to_mem_for_reload_p (in
);
6579 gcc_assert (reg_equiv_constant (regno
));
6583 endregno
= END_REGNO (x
);
6585 return refers_to_regno_for_reload_p (regno
, endregno
, in
, (rtx
*) 0);
6588 return refers_to_mem_for_reload_p (in
);
6589 else if (GET_CODE (x
) == SCRATCH
|| GET_CODE (x
) == PC
6590 || GET_CODE (x
) == CC0
)
6591 return reg_mentioned_p (x
, in
);
6594 gcc_assert (GET_CODE (x
) == PLUS
);
6596 /* We actually want to know if X is mentioned somewhere inside IN.
6597 We must not say that (plus (sp) (const_int 124)) is in
6598 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6599 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6600 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6605 else if (GET_CODE (in
) == PLUS
)
6606 return (rtx_equal_p (x
, in
)
6607 || reg_overlap_mentioned_for_reload_p (x
, XEXP (in
, 0))
6608 || reg_overlap_mentioned_for_reload_p (x
, XEXP (in
, 1)));
6609 else return (reg_overlap_mentioned_for_reload_p (XEXP (x
, 0), in
)
6610 || reg_overlap_mentioned_for_reload_p (XEXP (x
, 1), in
));
6616 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6620 refers_to_mem_for_reload_p (rtx x
)
6629 return (REGNO (x
) >= FIRST_PSEUDO_REGISTER
6630 && reg_equiv_memory_loc (REGNO (x
)));
6632 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
6633 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
6635 && (MEM_P (XEXP (x
, i
))
6636 || refers_to_mem_for_reload_p (XEXP (x
, i
))))
6642 /* Check the insns before INSN to see if there is a suitable register
6643 containing the same value as GOAL.
6644 If OTHER is -1, look for a register in class RCLASS.
6645 Otherwise, just see if register number OTHER shares GOAL's value.
6647 Return an rtx for the register found, or zero if none is found.
6649 If RELOAD_REG_P is (short *)1,
6650 we reject any hard reg that appears in reload_reg_rtx
6651 because such a hard reg is also needed coming into this insn.
6653 If RELOAD_REG_P is any other nonzero value,
6654 it is a vector indexed by hard reg number
6655 and we reject any hard reg whose element in the vector is nonnegative
6656 as well as any that appears in reload_reg_rtx.
6658 If GOAL is zero, then GOALREG is a register number; we look
6659 for an equivalent for that register.
6661 MODE is the machine mode of the value we want an equivalence for.
6662 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6664 This function is used by jump.c as well as in the reload pass.
6666 If GOAL is the sum of the stack pointer and a constant, we treat it
6667 as if it were a constant except that sp is required to be unchanging. */
6670 find_equiv_reg (rtx goal
, rtx_insn
*insn
, enum reg_class rclass
, int other
,
6671 short *reload_reg_p
, int goalreg
, machine_mode mode
)
6674 rtx goaltry
, valtry
, value
;
6681 int goal_mem_addr_varies
= 0;
6682 int need_stable_sp
= 0;
6689 else if (REG_P (goal
))
6690 regno
= REGNO (goal
);
6691 else if (MEM_P (goal
))
6693 enum rtx_code code
= GET_CODE (XEXP (goal
, 0));
6694 if (MEM_VOLATILE_P (goal
))
6696 if (flag_float_store
&& SCALAR_FLOAT_MODE_P (GET_MODE (goal
)))
6698 /* An address with side effects must be reexecuted. */
6713 else if (CONSTANT_P (goal
))
6715 else if (GET_CODE (goal
) == PLUS
6716 && XEXP (goal
, 0) == stack_pointer_rtx
6717 && CONSTANT_P (XEXP (goal
, 1)))
6718 goal_const
= need_stable_sp
= 1;
6719 else if (GET_CODE (goal
) == PLUS
6720 && XEXP (goal
, 0) == frame_pointer_rtx
6721 && CONSTANT_P (XEXP (goal
, 1)))
6727 /* Scan insns back from INSN, looking for one that copies
6728 a value into or out of GOAL.
6729 Stop and give up if we reach a label. */
6734 if (p
&& DEBUG_INSN_P (p
))
6737 if (p
== 0 || LABEL_P (p
)
6738 || num
> PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS
))
6741 /* Don't reuse register contents from before a setjmp-type
6742 function call; on the second return (from the longjmp) it
6743 might have been clobbered by a later reuse. It doesn't
6744 seem worthwhile to actually go and see if it is actually
6745 reused even if that information would be readily available;
6746 just don't reuse it across the setjmp call. */
6747 if (CALL_P (p
) && find_reg_note (p
, REG_SETJMP
, NULL_RTX
))
6750 if (NONJUMP_INSN_P (p
)
6751 /* If we don't want spill regs ... */
6752 && (! (reload_reg_p
!= 0
6753 && reload_reg_p
!= (short *) (HOST_WIDE_INT
) 1)
6754 /* ... then ignore insns introduced by reload; they aren't
6755 useful and can cause results in reload_as_needed to be
6756 different from what they were when calculating the need for
6757 spills. If we notice an input-reload insn here, we will
6758 reject it below, but it might hide a usable equivalent.
6759 That makes bad code. It may even fail: perhaps no reg was
6760 spilled for this insn because it was assumed we would find
6762 || INSN_UID (p
) < reload_first_uid
))
6765 pat
= single_set (p
);
6767 /* First check for something that sets some reg equal to GOAL. */
6770 && true_regnum (SET_SRC (pat
)) == regno
6771 && (valueno
= true_regnum (valtry
= SET_DEST (pat
))) >= 0)
6774 && true_regnum (SET_DEST (pat
)) == regno
6775 && (valueno
= true_regnum (valtry
= SET_SRC (pat
))) >= 0)
6777 (goal_const
&& rtx_equal_p (SET_SRC (pat
), goal
)
6778 /* When looking for stack pointer + const,
6779 make sure we don't use a stack adjust. */
6780 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat
), goal
)
6781 && (valueno
= true_regnum (valtry
= SET_DEST (pat
))) >= 0)
6783 && (valueno
= true_regnum (valtry
= SET_DEST (pat
))) >= 0
6784 && rtx_renumbered_equal_p (goal
, SET_SRC (pat
)))
6786 && (valueno
= true_regnum (valtry
= SET_SRC (pat
))) >= 0
6787 && rtx_renumbered_equal_p (goal
, SET_DEST (pat
)))
6788 /* If we are looking for a constant,
6789 and something equivalent to that constant was copied
6790 into a reg, we can use that reg. */
6791 || (goal_const
&& REG_NOTES (p
) != 0
6792 && (tem
= find_reg_note (p
, REG_EQUIV
, NULL_RTX
))
6793 && ((rtx_equal_p (XEXP (tem
, 0), goal
)
6795 = true_regnum (valtry
= SET_DEST (pat
))) >= 0)
6796 || (REG_P (SET_DEST (pat
))
6797 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem
, 0))
6798 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem
, 0)))
6799 && CONST_INT_P (goal
)
6801 = operand_subword (XEXP (tem
, 0), 0, 0,
6803 && rtx_equal_p (goal
, goaltry
)
6805 = operand_subword (SET_DEST (pat
), 0, 0,
6807 && (valueno
= true_regnum (valtry
)) >= 0)))
6808 || (goal_const
&& (tem
= find_reg_note (p
, REG_EQUIV
,
6810 && REG_P (SET_DEST (pat
))
6811 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem
, 0))
6812 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem
, 0)))
6813 && CONST_INT_P (goal
)
6814 && 0 != (goaltry
= operand_subword (XEXP (tem
, 0), 1, 0,
6816 && rtx_equal_p (goal
, goaltry
)
6818 = operand_subword (SET_DEST (pat
), 1, 0, VOIDmode
))
6819 && (valueno
= true_regnum (valtry
)) >= 0)))
6823 if (valueno
!= other
)
6826 else if ((unsigned) valueno
>= FIRST_PSEUDO_REGISTER
)
6828 else if (!in_hard_reg_set_p (reg_class_contents
[(int) rclass
],
6838 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6839 (or copying VALUE into GOAL, if GOAL is also a register).
6840 Now verify that VALUE is really valid. */
6842 /* VALUENO is the register number of VALUE; a hard register. */
6844 /* Don't try to re-use something that is killed in this insn. We want
6845 to be able to trust REG_UNUSED notes. */
6846 if (REG_NOTES (where
) != 0 && find_reg_note (where
, REG_UNUSED
, value
))
6849 /* If we propose to get the value from the stack pointer or if GOAL is
6850 a MEM based on the stack pointer, we need a stable SP. */
6851 if (valueno
== STACK_POINTER_REGNUM
|| regno
== STACK_POINTER_REGNUM
6852 || (goal_mem
&& reg_overlap_mentioned_for_reload_p (stack_pointer_rtx
,
6856 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6857 if (GET_MODE (value
) != mode
)
6860 /* Reject VALUE if it was loaded from GOAL
6861 and is also a register that appears in the address of GOAL. */
6863 if (goal_mem
&& value
== SET_DEST (single_set (where
))
6864 && refers_to_regno_for_reload_p (valueno
, end_hard_regno (mode
, valueno
),
6868 /* Reject registers that overlap GOAL. */
6870 if (regno
>= 0 && regno
< FIRST_PSEUDO_REGISTER
)
6871 nregs
= hard_regno_nregs
[regno
][mode
];
6874 valuenregs
= hard_regno_nregs
[valueno
][mode
];
6876 if (!goal_mem
&& !goal_const
6877 && regno
+ nregs
> valueno
&& regno
< valueno
+ valuenregs
)
6880 /* Reject VALUE if it is one of the regs reserved for reloads.
6881 Reload1 knows how to reuse them anyway, and it would get
6882 confused if we allocated one without its knowledge.
6883 (Now that insns introduced by reload are ignored above,
6884 this case shouldn't happen, but I'm not positive.) */
6886 if (reload_reg_p
!= 0 && reload_reg_p
!= (short *) (HOST_WIDE_INT
) 1)
6889 for (i
= 0; i
< valuenregs
; ++i
)
6890 if (reload_reg_p
[valueno
+ i
] >= 0)
6894 /* Reject VALUE if it is a register being used for an input reload
6895 even if it is not one of those reserved. */
6897 if (reload_reg_p
!= 0)
6900 for (i
= 0; i
< n_reloads
; i
++)
6901 if (rld
[i
].reg_rtx
!= 0 && rld
[i
].in
)
6903 int regno1
= REGNO (rld
[i
].reg_rtx
);
6904 int nregs1
= hard_regno_nregs
[regno1
]
6905 [GET_MODE (rld
[i
].reg_rtx
)];
6906 if (regno1
< valueno
+ valuenregs
6907 && regno1
+ nregs1
> valueno
)
6913 /* We must treat frame pointer as varying here,
6914 since it can vary--in a nonlocal goto as generated by expand_goto. */
6915 goal_mem_addr_varies
= !CONSTANT_ADDRESS_P (XEXP (goal
, 0));
6917 /* Now verify that the values of GOAL and VALUE remain unaltered
6918 until INSN is reached. */
6927 /* Don't trust the conversion past a function call
6928 if either of the two is in a call-clobbered register, or memory. */
6933 if (goal_mem
|| need_stable_sp
)
6936 if (regno
>= 0 && regno
< FIRST_PSEUDO_REGISTER
)
6937 for (i
= 0; i
< nregs
; ++i
)
6938 if (call_used_regs
[regno
+ i
]
6939 || HARD_REGNO_CALL_PART_CLOBBERED (regno
+ i
, mode
))
6942 if (valueno
>= 0 && valueno
< FIRST_PSEUDO_REGISTER
)
6943 for (i
= 0; i
< valuenregs
; ++i
)
6944 if (call_used_regs
[valueno
+ i
]
6945 || HARD_REGNO_CALL_PART_CLOBBERED (valueno
+ i
, mode
))
6953 /* Watch out for unspec_volatile, and volatile asms. */
6954 if (volatile_insn_p (pat
))
6957 /* If this insn P stores in either GOAL or VALUE, return 0.
6958 If GOAL is a memory ref and this insn writes memory, return 0.
6959 If GOAL is a memory ref and its address is not constant,
6960 and this insn P changes a register used in GOAL, return 0. */
6962 if (GET_CODE (pat
) == COND_EXEC
)
6963 pat
= COND_EXEC_CODE (pat
);
6964 if (GET_CODE (pat
) == SET
|| GET_CODE (pat
) == CLOBBER
)
6966 rtx dest
= SET_DEST (pat
);
6967 while (GET_CODE (dest
) == SUBREG
6968 || GET_CODE (dest
) == ZERO_EXTRACT
6969 || GET_CODE (dest
) == STRICT_LOW_PART
)
6970 dest
= XEXP (dest
, 0);
6973 int xregno
= REGNO (dest
);
6975 if (REGNO (dest
) < FIRST_PSEUDO_REGISTER
)
6976 xnregs
= hard_regno_nregs
[xregno
][GET_MODE (dest
)];
6979 if (xregno
< regno
+ nregs
&& xregno
+ xnregs
> regno
)
6981 if (xregno
< valueno
+ valuenregs
6982 && xregno
+ xnregs
> valueno
)
6984 if (goal_mem_addr_varies
6985 && reg_overlap_mentioned_for_reload_p (dest
, goal
))
6987 if (xregno
== STACK_POINTER_REGNUM
&& need_stable_sp
)
6990 else if (goal_mem
&& MEM_P (dest
)
6991 && ! push_operand (dest
, GET_MODE (dest
)))
6993 else if (MEM_P (dest
) && regno
>= FIRST_PSEUDO_REGISTER
6994 && reg_equiv_memory_loc (regno
) != 0)
6996 else if (need_stable_sp
&& push_operand (dest
, GET_MODE (dest
)))
6999 else if (GET_CODE (pat
) == PARALLEL
)
7002 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; i
--)
7004 rtx v1
= XVECEXP (pat
, 0, i
);
7005 if (GET_CODE (v1
) == COND_EXEC
)
7006 v1
= COND_EXEC_CODE (v1
);
7007 if (GET_CODE (v1
) == SET
|| GET_CODE (v1
) == CLOBBER
)
7009 rtx dest
= SET_DEST (v1
);
7010 while (GET_CODE (dest
) == SUBREG
7011 || GET_CODE (dest
) == ZERO_EXTRACT
7012 || GET_CODE (dest
) == STRICT_LOW_PART
)
7013 dest
= XEXP (dest
, 0);
7016 int xregno
= REGNO (dest
);
7018 if (REGNO (dest
) < FIRST_PSEUDO_REGISTER
)
7019 xnregs
= hard_regno_nregs
[xregno
][GET_MODE (dest
)];
7022 if (xregno
< regno
+ nregs
7023 && xregno
+ xnregs
> regno
)
7025 if (xregno
< valueno
+ valuenregs
7026 && xregno
+ xnregs
> valueno
)
7028 if (goal_mem_addr_varies
7029 && reg_overlap_mentioned_for_reload_p (dest
,
7032 if (xregno
== STACK_POINTER_REGNUM
&& need_stable_sp
)
7035 else if (goal_mem
&& MEM_P (dest
)
7036 && ! push_operand (dest
, GET_MODE (dest
)))
7038 else if (MEM_P (dest
) && regno
>= FIRST_PSEUDO_REGISTER
7039 && reg_equiv_memory_loc (regno
) != 0)
7041 else if (need_stable_sp
7042 && push_operand (dest
, GET_MODE (dest
)))
7048 if (CALL_P (p
) && CALL_INSN_FUNCTION_USAGE (p
))
7052 for (link
= CALL_INSN_FUNCTION_USAGE (p
); XEXP (link
, 1) != 0;
7053 link
= XEXP (link
, 1))
7055 pat
= XEXP (link
, 0);
7056 if (GET_CODE (pat
) == CLOBBER
)
7058 rtx dest
= SET_DEST (pat
);
7062 int xregno
= REGNO (dest
);
7064 = hard_regno_nregs
[xregno
][GET_MODE (dest
)];
7066 if (xregno
< regno
+ nregs
7067 && xregno
+ xnregs
> regno
)
7069 else if (xregno
< valueno
+ valuenregs
7070 && xregno
+ xnregs
> valueno
)
7072 else if (goal_mem_addr_varies
7073 && reg_overlap_mentioned_for_reload_p (dest
,
7078 else if (goal_mem
&& MEM_P (dest
)
7079 && ! push_operand (dest
, GET_MODE (dest
)))
7081 else if (need_stable_sp
7082 && push_operand (dest
, GET_MODE (dest
)))
7089 /* If this insn auto-increments or auto-decrements
7090 either regno or valueno, return 0 now.
7091 If GOAL is a memory ref and its address is not constant,
7092 and this insn P increments a register used in GOAL, return 0. */
7096 for (link
= REG_NOTES (p
); link
; link
= XEXP (link
, 1))
7097 if (REG_NOTE_KIND (link
) == REG_INC
7098 && REG_P (XEXP (link
, 0)))
7100 int incno
= REGNO (XEXP (link
, 0));
7101 if (incno
< regno
+ nregs
&& incno
>= regno
)
7103 if (incno
< valueno
+ valuenregs
&& incno
>= valueno
)
7105 if (goal_mem_addr_varies
7106 && reg_overlap_mentioned_for_reload_p (XEXP (link
, 0),
7116 /* Find a place where INCED appears in an increment or decrement operator
7117 within X, and return the amount INCED is incremented or decremented by.
7118 The value is always positive. */
7121 find_inc_amount (rtx x
, rtx inced
)
7123 enum rtx_code code
= GET_CODE (x
);
7129 rtx addr
= XEXP (x
, 0);
7130 if ((GET_CODE (addr
) == PRE_DEC
7131 || GET_CODE (addr
) == POST_DEC
7132 || GET_CODE (addr
) == PRE_INC
7133 || GET_CODE (addr
) == POST_INC
)
7134 && XEXP (addr
, 0) == inced
)
7135 return GET_MODE_SIZE (GET_MODE (x
));
7136 else if ((GET_CODE (addr
) == PRE_MODIFY
7137 || GET_CODE (addr
) == POST_MODIFY
)
7138 && GET_CODE (XEXP (addr
, 1)) == PLUS
7139 && XEXP (addr
, 0) == XEXP (XEXP (addr
, 1), 0)
7140 && XEXP (addr
, 0) == inced
7141 && CONST_INT_P (XEXP (XEXP (addr
, 1), 1)))
7143 i
= INTVAL (XEXP (XEXP (addr
, 1), 1));
7144 return i
< 0 ? -i
: i
;
7148 fmt
= GET_RTX_FORMAT (code
);
7149 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
7153 int tem
= find_inc_amount (XEXP (x
, i
), inced
);
7160 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
7162 int tem
= find_inc_amount (XVECEXP (x
, i
, j
), inced
);
7172 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7173 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7176 reg_inc_found_and_valid_p (unsigned int regno
, unsigned int endregno
,
7186 if (! INSN_P (insn
))
7189 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
7190 if (REG_NOTE_KIND (link
) == REG_INC
)
7192 unsigned int test
= (int) REGNO (XEXP (link
, 0));
7193 if (test
>= regno
&& test
< endregno
)
7199 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7200 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7201 REG_INC. REGNO must refer to a hard register. */
7204 regno_clobbered_p (unsigned int regno
, rtx_insn
*insn
, machine_mode mode
,
7207 unsigned int nregs
, endregno
;
7209 /* regno must be a hard register. */
7210 gcc_assert (regno
< FIRST_PSEUDO_REGISTER
);
7212 nregs
= hard_regno_nregs
[regno
][mode
];
7213 endregno
= regno
+ nregs
;
7215 if ((GET_CODE (PATTERN (insn
)) == CLOBBER
7216 || (sets
== 1 && GET_CODE (PATTERN (insn
)) == SET
))
7217 && REG_P (XEXP (PATTERN (insn
), 0)))
7219 unsigned int test
= REGNO (XEXP (PATTERN (insn
), 0));
7221 return test
>= regno
&& test
< endregno
;
7224 if (sets
== 2 && reg_inc_found_and_valid_p (regno
, endregno
, insn
))
7227 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
7229 int i
= XVECLEN (PATTERN (insn
), 0) - 1;
7233 rtx elt
= XVECEXP (PATTERN (insn
), 0, i
);
7234 if ((GET_CODE (elt
) == CLOBBER
7235 || (sets
== 1 && GET_CODE (elt
) == SET
))
7236 && REG_P (XEXP (elt
, 0)))
7238 unsigned int test
= REGNO (XEXP (elt
, 0));
7240 if (test
>= regno
&& test
< endregno
)
7244 && reg_inc_found_and_valid_p (regno
, endregno
, elt
))
7252 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7254 reload_adjust_reg_for_mode (rtx reloadreg
, machine_mode mode
)
7258 if (GET_MODE (reloadreg
) == mode
)
7261 regno
= REGNO (reloadreg
);
7263 if (REG_WORDS_BIG_ENDIAN
)
7264 regno
+= (int) hard_regno_nregs
[regno
][GET_MODE (reloadreg
)]
7265 - (int) hard_regno_nregs
[regno
][mode
];
7267 return gen_rtx_REG (mode
, regno
);
7270 static const char *const reload_when_needed_name
[] =
7273 "RELOAD_FOR_OUTPUT",
7275 "RELOAD_FOR_INPUT_ADDRESS",
7276 "RELOAD_FOR_INPADDR_ADDRESS",
7277 "RELOAD_FOR_OUTPUT_ADDRESS",
7278 "RELOAD_FOR_OUTADDR_ADDRESS",
7279 "RELOAD_FOR_OPERAND_ADDRESS",
7280 "RELOAD_FOR_OPADDR_ADDR",
7282 "RELOAD_FOR_OTHER_ADDRESS"
7285 /* These functions are used to print the variables set by 'find_reloads' */
7288 debug_reload_to_stream (FILE *f
)
7295 for (r
= 0; r
< n_reloads
; r
++)
7297 fprintf (f
, "Reload %d: ", r
);
7301 fprintf (f
, "reload_in (%s) = ",
7302 GET_MODE_NAME (rld
[r
].inmode
));
7303 print_inline_rtx (f
, rld
[r
].in
, 24);
7304 fprintf (f
, "\n\t");
7307 if (rld
[r
].out
!= 0)
7309 fprintf (f
, "reload_out (%s) = ",
7310 GET_MODE_NAME (rld
[r
].outmode
));
7311 print_inline_rtx (f
, rld
[r
].out
, 24);
7312 fprintf (f
, "\n\t");
7315 fprintf (f
, "%s, ", reg_class_names
[(int) rld
[r
].rclass
]);
7317 fprintf (f
, "%s (opnum = %d)",
7318 reload_when_needed_name
[(int) rld
[r
].when_needed
],
7321 if (rld
[r
].optional
)
7322 fprintf (f
, ", optional");
7324 if (rld
[r
].nongroup
)
7325 fprintf (f
, ", nongroup");
7327 if (rld
[r
].inc
!= 0)
7328 fprintf (f
, ", inc by %d", rld
[r
].inc
);
7330 if (rld
[r
].nocombine
)
7331 fprintf (f
, ", can't combine");
7333 if (rld
[r
].secondary_p
)
7334 fprintf (f
, ", secondary_reload_p");
7336 if (rld
[r
].in_reg
!= 0)
7338 fprintf (f
, "\n\treload_in_reg: ");
7339 print_inline_rtx (f
, rld
[r
].in_reg
, 24);
7342 if (rld
[r
].out_reg
!= 0)
7344 fprintf (f
, "\n\treload_out_reg: ");
7345 print_inline_rtx (f
, rld
[r
].out_reg
, 24);
7348 if (rld
[r
].reg_rtx
!= 0)
7350 fprintf (f
, "\n\treload_reg_rtx: ");
7351 print_inline_rtx (f
, rld
[r
].reg_rtx
, 24);
7355 if (rld
[r
].secondary_in_reload
!= -1)
7357 fprintf (f
, "%ssecondary_in_reload = %d",
7358 prefix
, rld
[r
].secondary_in_reload
);
7362 if (rld
[r
].secondary_out_reload
!= -1)
7363 fprintf (f
, "%ssecondary_out_reload = %d\n",
7364 prefix
, rld
[r
].secondary_out_reload
);
7367 if (rld
[r
].secondary_in_icode
!= CODE_FOR_nothing
)
7369 fprintf (f
, "%ssecondary_in_icode = %s", prefix
,
7370 insn_data
[rld
[r
].secondary_in_icode
].name
);
7374 if (rld
[r
].secondary_out_icode
!= CODE_FOR_nothing
)
7375 fprintf (f
, "%ssecondary_out_icode = %s", prefix
,
7376 insn_data
[rld
[r
].secondary_out_icode
].name
);
7385 debug_reload_to_stream (stderr
);