1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains subroutines used only from the file reload1.c.
23 It knows how to scan one insn for operands and values
24 that need to be copied into registers to make valid code.
25 It also finds other operands and values which are valid
26 but for which equivalent values in registers exist and
27 ought to be used instead.
29 Before processing the first insn of the function, call `init_reload'.
30 init_reload actually has to be called earlier anyway.
32 To scan an insn, call `find_reloads'. This does two things:
33 1. sets up tables describing which values must be reloaded
34 for this insn, and what kind of hard regs they must be reloaded into;
35 2. optionally record the locations where those values appear in
36 the data, so they can be replaced properly later.
37 This is done only if the second arg to `find_reloads' is nonzero.
39 The third arg to `find_reloads' specifies the number of levels
40 of indirect addressing supported by the machine. If it is zero,
41 indirect addressing is not valid. If it is one, (MEM (REG n))
42 is valid even if (REG n) did not get a hard register; if it is two,
43 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
44 hard register, and similarly for higher values.
46 Then you must choose the hard regs to reload those pseudo regs into,
47 and generate appropriate load insns before this insn and perhaps
48 also store insns after this insn. Set up the array `reload_reg_rtx'
49 to contain the REG rtx's for the registers you used. In some
50 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
51 for certain reloads. Then that tells you which register to use,
52 so you do not need to allocate one. But you still do need to add extra
53 instructions to copy the value into and out of that register.
55 Finally you must call `subst_reloads' to substitute the reload reg rtx's
56 into the locations already recorded.
60 find_reloads can alter the operands of the instruction it is called on.
62 1. Two operands of any sort may be interchanged, if they are in a
63 commutative instruction.
64 This happens only if find_reloads thinks the instruction will compile
67 2. Pseudo-registers that are equivalent to constants are replaced
68 with those constants if they are not in hard registers.
70 1 happens every time find_reloads is called.
71 2 happens only when REPLACE is 1, which is only when
72 actually doing the reloads, not when just counting them.
74 Using a reload register for several reloads in one insn:
76 When an insn has reloads, it is considered as having three parts:
77 the input reloads, the insn itself after reloading, and the output reloads.
78 Reloads of values used in memory addresses are often needed for only one part.
80 When this is so, reload_when_needed records which part needs the reload.
81 Two reloads for different parts of the insn can share the same reload
84 When a reload is used for addresses in multiple parts, or when it is
85 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
86 a register with any other reload. */
90 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
95 #include "coretypes.h"
97 #include "rtl-error.h"
99 #include "insn-config.h"
106 #include "addresses.h"
107 #include "hard-reg-set.h"
110 #include "function.h"
114 #include "toplev.h" /* exact_log2 may be used by targets */
116 /* True if X is a constant that can be forced into the constant pool. */
117 #define CONST_POOL_OK_P(X) \
119 && GET_CODE (X) != HIGH \
120 && !targetm.cannot_force_const_mem (X))
122 /* True if C is a non-empty register class that has too few registers
123 to be safely used as a reload target class. */
126 small_register_class_p (reg_class_t rclass
)
128 return (reg_class_size
[(int) rclass
] == 1
129 || (reg_class_size
[(int) rclass
] >= 1
130 && targetm
.class_likely_spilled_p (rclass
)));
134 /* All reloads of the current insn are recorded here. See reload.h for
137 struct reload rld
[MAX_RELOADS
];
139 /* All the "earlyclobber" operands of the current insn
140 are recorded here. */
142 rtx reload_earlyclobbers
[MAX_RECOG_OPERANDS
];
144 int reload_n_operands
;
146 /* Replacing reloads.
148 If `replace_reloads' is nonzero, then as each reload is recorded
149 an entry is made for it in the table `replacements'.
150 Then later `subst_reloads' can look through that table and
151 perform all the replacements needed. */
153 /* Nonzero means record the places to replace. */
154 static int replace_reloads
;
156 /* Each replacement is recorded with a structure like this. */
159 rtx
*where
; /* Location to store in */
160 rtx
*subreg_loc
; /* Location of SUBREG if WHERE is inside
161 a SUBREG; 0 otherwise. */
162 int what
; /* which reload this is for */
163 enum machine_mode mode
; /* mode it must have */
166 static struct replacement replacements
[MAX_RECOG_OPERANDS
* ((MAX_REGS_PER_ADDRESS
* 2) + 1)];
168 /* Number of replacements currently recorded. */
169 static int n_replacements
;
171 /* Used to track what is modified by an operand. */
174 int reg_flag
; /* Nonzero if referencing a register. */
175 int safe
; /* Nonzero if this can't conflict with anything. */
176 rtx base
; /* Base address for MEM. */
177 HOST_WIDE_INT start
; /* Starting offset or register number. */
178 HOST_WIDE_INT end
; /* Ending offset or register number. */
181 #ifdef SECONDARY_MEMORY_NEEDED
183 /* Save MEMs needed to copy from one class of registers to another. One MEM
184 is used per mode, but normally only one or two modes are ever used.
186 We keep two versions, before and after register elimination. The one
187 after register elimination is record separately for each operand. This
188 is done in case the address is not valid to be sure that we separately
191 static rtx secondary_memlocs
[NUM_MACHINE_MODES
];
192 static rtx secondary_memlocs_elim
[NUM_MACHINE_MODES
][MAX_RECOG_OPERANDS
];
193 static int secondary_memlocs_elim_used
= 0;
196 /* The instruction we are doing reloads for;
197 so we can test whether a register dies in it. */
198 static rtx this_insn
;
200 /* Nonzero if this instruction is a user-specified asm with operands. */
201 static int this_insn_is_asm
;
203 /* If hard_regs_live_known is nonzero,
204 we can tell which hard regs are currently live,
205 at least enough to succeed in choosing dummy reloads. */
206 static int hard_regs_live_known
;
208 /* Indexed by hard reg number,
209 element is nonnegative if hard reg has been spilled.
210 This vector is passed to `find_reloads' as an argument
211 and is not changed here. */
212 static short *static_reload_reg_p
;
214 /* Set to 1 in subst_reg_equivs if it changes anything. */
215 static int subst_reg_equivs_changed
;
217 /* On return from push_reload, holds the reload-number for the OUT
218 operand, which can be different for that from the input operand. */
219 static int output_reloadnum
;
221 /* Compare two RTX's. */
222 #define MATCHES(x, y) \
223 (x == y || (x != 0 && (REG_P (x) \
224 ? REG_P (y) && REGNO (x) == REGNO (y) \
225 : rtx_equal_p (x, y) && ! side_effects_p (x))))
227 /* Indicates if two reloads purposes are for similar enough things that we
228 can merge their reloads. */
229 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
230 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
231 || ((when1) == (when2) && (op1) == (op2)) \
232 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
233 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
234 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
235 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
236 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
238 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
239 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
240 ((when1) != (when2) \
241 || ! ((op1) == (op2) \
242 || (when1) == RELOAD_FOR_INPUT \
243 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
244 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
246 /* If we are going to reload an address, compute the reload type to
248 #define ADDR_TYPE(type) \
249 ((type) == RELOAD_FOR_INPUT_ADDRESS \
250 ? RELOAD_FOR_INPADDR_ADDRESS \
251 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
252 ? RELOAD_FOR_OUTADDR_ADDRESS \
255 static int push_secondary_reload (int, rtx
, int, int, enum reg_class
,
256 enum machine_mode
, enum reload_type
,
257 enum insn_code
*, secondary_reload_info
*);
258 static enum reg_class
find_valid_class (enum machine_mode
, enum machine_mode
,
260 static int reload_inner_reg_of_subreg (rtx
, enum machine_mode
, int);
261 static void push_replacement (rtx
*, int, enum machine_mode
);
262 static void dup_replacements (rtx
*, rtx
*);
263 static void combine_reloads (void);
264 static int find_reusable_reload (rtx
*, rtx
, enum reg_class
,
265 enum reload_type
, int, int);
266 static rtx
find_dummy_reload (rtx
, rtx
, rtx
*, rtx
*, enum machine_mode
,
267 enum machine_mode
, reg_class_t
, int, int);
268 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx
);
269 static struct decomposition
decompose (rtx
);
270 static int immune_p (rtx
, rtx
, struct decomposition
);
271 static bool alternative_allows_const_pool_ref (rtx
, const char *, int);
272 static rtx
find_reloads_toplev (rtx
, int, enum reload_type
, int, int, rtx
,
274 static rtx
make_memloc (rtx
, int);
275 static int maybe_memory_address_addr_space_p (enum machine_mode
, rtx
,
276 addr_space_t
, rtx
*);
277 static int find_reloads_address (enum machine_mode
, rtx
*, rtx
, rtx
*,
278 int, enum reload_type
, int, rtx
);
279 static rtx
subst_reg_equivs (rtx
, rtx
);
280 static rtx
subst_indexed_address (rtx
);
281 static void update_auto_inc_notes (rtx
, int, int);
282 static int find_reloads_address_1 (enum machine_mode
, rtx
, int,
283 enum rtx_code
, enum rtx_code
, rtx
*,
284 int, enum reload_type
,int, rtx
);
285 static void find_reloads_address_part (rtx
, rtx
*, enum reg_class
,
286 enum machine_mode
, int,
287 enum reload_type
, int);
288 static rtx
find_reloads_subreg_address (rtx
, int, int, enum reload_type
,
290 static void copy_replacements_1 (rtx
*, rtx
*, int);
291 static int find_inc_amount (rtx
, rtx
);
292 static int refers_to_mem_for_reload_p (rtx
);
293 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
296 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
300 push_reg_equiv_alt_mem (int regno
, rtx mem
)
304 for (it
= reg_equiv_alt_mem_list
[regno
]; it
; it
= XEXP (it
, 1))
305 if (rtx_equal_p (XEXP (it
, 0), mem
))
308 reg_equiv_alt_mem_list
[regno
]
309 = alloc_EXPR_LIST (REG_EQUIV
, mem
,
310 reg_equiv_alt_mem_list
[regno
]);
313 /* Determine if any secondary reloads are needed for loading (if IN_P is
314 nonzero) or storing (if IN_P is zero) X to or from a reload register of
315 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
316 are needed, push them.
318 Return the reload number of the secondary reload we made, or -1 if
319 we didn't need one. *PICODE is set to the insn_code to use if we do
320 need a secondary reload. */
323 push_secondary_reload (int in_p
, rtx x
, int opnum
, int optional
,
324 enum reg_class reload_class
,
325 enum machine_mode reload_mode
, enum reload_type type
,
326 enum insn_code
*picode
, secondary_reload_info
*prev_sri
)
328 enum reg_class rclass
= NO_REGS
;
329 enum reg_class scratch_class
;
330 enum machine_mode mode
= reload_mode
;
331 enum insn_code icode
= CODE_FOR_nothing
;
332 enum insn_code t_icode
= CODE_FOR_nothing
;
333 enum reload_type secondary_type
;
334 int s_reload
, t_reload
= -1;
335 const char *scratch_constraint
;
337 secondary_reload_info sri
;
339 if (type
== RELOAD_FOR_INPUT_ADDRESS
340 || type
== RELOAD_FOR_OUTPUT_ADDRESS
341 || type
== RELOAD_FOR_INPADDR_ADDRESS
342 || type
== RELOAD_FOR_OUTADDR_ADDRESS
)
343 secondary_type
= type
;
345 secondary_type
= in_p
? RELOAD_FOR_INPUT_ADDRESS
: RELOAD_FOR_OUTPUT_ADDRESS
;
347 *picode
= CODE_FOR_nothing
;
349 /* If X is a paradoxical SUBREG, use the inner value to determine both the
350 mode and object being reloaded. */
351 if (GET_CODE (x
) == SUBREG
352 && (GET_MODE_SIZE (GET_MODE (x
))
353 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)))))
356 reload_mode
= GET_MODE (x
);
359 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
360 is still a pseudo-register by now, it *must* have an equivalent MEM
361 but we don't want to assume that), use that equivalent when seeing if
362 a secondary reload is needed since whether or not a reload is needed
363 might be sensitive to the form of the MEM. */
365 if (REG_P (x
) && REGNO (x
) >= FIRST_PSEUDO_REGISTER
366 && reg_equiv_mem
[REGNO (x
)] != 0)
367 x
= reg_equiv_mem
[REGNO (x
)];
369 sri
.icode
= CODE_FOR_nothing
;
370 sri
.prev_sri
= prev_sri
;
371 rclass
= (enum reg_class
) targetm
.secondary_reload (in_p
, x
, reload_class
,
373 icode
= (enum insn_code
) sri
.icode
;
375 /* If we don't need any secondary registers, done. */
376 if (rclass
== NO_REGS
&& icode
== CODE_FOR_nothing
)
379 if (rclass
!= NO_REGS
)
380 t_reload
= push_secondary_reload (in_p
, x
, opnum
, optional
, rclass
,
381 reload_mode
, type
, &t_icode
, &sri
);
383 /* If we will be using an insn, the secondary reload is for a
386 if (icode
!= CODE_FOR_nothing
)
388 /* If IN_P is nonzero, the reload register will be the output in
389 operand 0. If IN_P is zero, the reload register will be the input
390 in operand 1. Outputs should have an initial "=", which we must
393 /* ??? It would be useful to be able to handle only two, or more than
394 three, operands, but for now we can only handle the case of having
395 exactly three: output, input and one temp/scratch. */
396 gcc_assert (insn_data
[(int) icode
].n_operands
== 3);
398 /* ??? We currently have no way to represent a reload that needs
399 an icode to reload from an intermediate tertiary reload register.
400 We should probably have a new field in struct reload to tag a
401 chain of scratch operand reloads onto. */
402 gcc_assert (rclass
== NO_REGS
);
404 scratch_constraint
= insn_data
[(int) icode
].operand
[2].constraint
;
405 gcc_assert (*scratch_constraint
== '=');
406 scratch_constraint
++;
407 if (*scratch_constraint
== '&')
408 scratch_constraint
++;
409 letter
= *scratch_constraint
;
410 scratch_class
= (letter
== 'r' ? GENERAL_REGS
411 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) letter
,
412 scratch_constraint
));
414 rclass
= scratch_class
;
415 mode
= insn_data
[(int) icode
].operand
[2].mode
;
418 /* This case isn't valid, so fail. Reload is allowed to use the same
419 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
420 in the case of a secondary register, we actually need two different
421 registers for correct code. We fail here to prevent the possibility of
422 silently generating incorrect code later.
424 The convention is that secondary input reloads are valid only if the
425 secondary_class is different from class. If you have such a case, you
426 can not use secondary reloads, you must work around the problem some
429 Allow this when a reload_in/out pattern is being used. I.e. assume
430 that the generated code handles this case. */
432 gcc_assert (!in_p
|| rclass
!= reload_class
|| icode
!= CODE_FOR_nothing
433 || t_icode
!= CODE_FOR_nothing
);
435 /* See if we can reuse an existing secondary reload. */
436 for (s_reload
= 0; s_reload
< n_reloads
; s_reload
++)
437 if (rld
[s_reload
].secondary_p
438 && (reg_class_subset_p (rclass
, rld
[s_reload
].rclass
)
439 || reg_class_subset_p (rld
[s_reload
].rclass
, rclass
))
440 && ((in_p
&& rld
[s_reload
].inmode
== mode
)
441 || (! in_p
&& rld
[s_reload
].outmode
== mode
))
442 && ((in_p
&& rld
[s_reload
].secondary_in_reload
== t_reload
)
443 || (! in_p
&& rld
[s_reload
].secondary_out_reload
== t_reload
))
444 && ((in_p
&& rld
[s_reload
].secondary_in_icode
== t_icode
)
445 || (! in_p
&& rld
[s_reload
].secondary_out_icode
== t_icode
))
446 && (small_register_class_p (rclass
)
447 || targetm
.small_register_classes_for_mode_p (VOIDmode
))
448 && MERGABLE_RELOADS (secondary_type
, rld
[s_reload
].when_needed
,
449 opnum
, rld
[s_reload
].opnum
))
452 rld
[s_reload
].inmode
= mode
;
454 rld
[s_reload
].outmode
= mode
;
456 if (reg_class_subset_p (rclass
, rld
[s_reload
].rclass
))
457 rld
[s_reload
].rclass
= rclass
;
459 rld
[s_reload
].opnum
= MIN (rld
[s_reload
].opnum
, opnum
);
460 rld
[s_reload
].optional
&= optional
;
461 rld
[s_reload
].secondary_p
= 1;
462 if (MERGE_TO_OTHER (secondary_type
, rld
[s_reload
].when_needed
,
463 opnum
, rld
[s_reload
].opnum
))
464 rld
[s_reload
].when_needed
= RELOAD_OTHER
;
469 if (s_reload
== n_reloads
)
471 #ifdef SECONDARY_MEMORY_NEEDED
472 /* If we need a memory location to copy between the two reload regs,
473 set it up now. Note that we do the input case before making
474 the reload and the output case after. This is due to the
475 way reloads are output. */
477 if (in_p
&& icode
== CODE_FOR_nothing
478 && SECONDARY_MEMORY_NEEDED (rclass
, reload_class
, mode
))
480 get_secondary_mem (x
, reload_mode
, opnum
, type
);
482 /* We may have just added new reloads. Make sure we add
483 the new reload at the end. */
484 s_reload
= n_reloads
;
488 /* We need to make a new secondary reload for this register class. */
489 rld
[s_reload
].in
= rld
[s_reload
].out
= 0;
490 rld
[s_reload
].rclass
= rclass
;
492 rld
[s_reload
].inmode
= in_p
? mode
: VOIDmode
;
493 rld
[s_reload
].outmode
= ! in_p
? mode
: VOIDmode
;
494 rld
[s_reload
].reg_rtx
= 0;
495 rld
[s_reload
].optional
= optional
;
496 rld
[s_reload
].inc
= 0;
497 /* Maybe we could combine these, but it seems too tricky. */
498 rld
[s_reload
].nocombine
= 1;
499 rld
[s_reload
].in_reg
= 0;
500 rld
[s_reload
].out_reg
= 0;
501 rld
[s_reload
].opnum
= opnum
;
502 rld
[s_reload
].when_needed
= secondary_type
;
503 rld
[s_reload
].secondary_in_reload
= in_p
? t_reload
: -1;
504 rld
[s_reload
].secondary_out_reload
= ! in_p
? t_reload
: -1;
505 rld
[s_reload
].secondary_in_icode
= in_p
? t_icode
: CODE_FOR_nothing
;
506 rld
[s_reload
].secondary_out_icode
507 = ! in_p
? t_icode
: CODE_FOR_nothing
;
508 rld
[s_reload
].secondary_p
= 1;
512 #ifdef SECONDARY_MEMORY_NEEDED
513 if (! in_p
&& icode
== CODE_FOR_nothing
514 && SECONDARY_MEMORY_NEEDED (reload_class
, rclass
, mode
))
515 get_secondary_mem (x
, mode
, opnum
, type
);
523 /* If a secondary reload is needed, return its class. If both an intermediate
524 register and a scratch register is needed, we return the class of the
525 intermediate register. */
527 secondary_reload_class (bool in_p
, reg_class_t rclass
, enum machine_mode mode
,
530 enum insn_code icode
;
531 secondary_reload_info sri
;
533 sri
.icode
= CODE_FOR_nothing
;
536 = (enum reg_class
) targetm
.secondary_reload (in_p
, x
, rclass
, mode
, &sri
);
537 icode
= (enum insn_code
) sri
.icode
;
539 /* If there are no secondary reloads at all, we return NO_REGS.
540 If an intermediate register is needed, we return its class. */
541 if (icode
== CODE_FOR_nothing
|| rclass
!= NO_REGS
)
544 /* No intermediate register is needed, but we have a special reload
545 pattern, which we assume for now needs a scratch register. */
546 return scratch_reload_class (icode
);
549 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
550 three operands, verify that operand 2 is an output operand, and return
552 ??? We'd like to be able to handle any pattern with at least 2 operands,
553 for zero or more scratch registers, but that needs more infrastructure. */
555 scratch_reload_class (enum insn_code icode
)
557 const char *scratch_constraint
;
559 enum reg_class rclass
;
561 gcc_assert (insn_data
[(int) icode
].n_operands
== 3);
562 scratch_constraint
= insn_data
[(int) icode
].operand
[2].constraint
;
563 gcc_assert (*scratch_constraint
== '=');
564 scratch_constraint
++;
565 if (*scratch_constraint
== '&')
566 scratch_constraint
++;
567 scratch_letter
= *scratch_constraint
;
568 if (scratch_letter
== 'r')
570 rclass
= REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter
,
572 gcc_assert (rclass
!= NO_REGS
);
576 #ifdef SECONDARY_MEMORY_NEEDED
578 /* Return a memory location that will be used to copy X in mode MODE.
579 If we haven't already made a location for this mode in this insn,
580 call find_reloads_address on the location being returned. */
583 get_secondary_mem (rtx x ATTRIBUTE_UNUSED
, enum machine_mode mode
,
584 int opnum
, enum reload_type type
)
589 /* By default, if MODE is narrower than a word, widen it to a word.
590 This is required because most machines that require these memory
591 locations do not support short load and stores from all registers
592 (e.g., FP registers). */
594 #ifdef SECONDARY_MEMORY_NEEDED_MODE
595 mode
= SECONDARY_MEMORY_NEEDED_MODE (mode
);
597 if (GET_MODE_BITSIZE (mode
) < BITS_PER_WORD
&& INTEGRAL_MODE_P (mode
))
598 mode
= mode_for_size (BITS_PER_WORD
, GET_MODE_CLASS (mode
), 0);
601 /* If we already have made a MEM for this operand in MODE, return it. */
602 if (secondary_memlocs_elim
[(int) mode
][opnum
] != 0)
603 return secondary_memlocs_elim
[(int) mode
][opnum
];
605 /* If this is the first time we've tried to get a MEM for this mode,
606 allocate a new one. `something_changed' in reload will get set
607 by noticing that the frame size has changed. */
609 if (secondary_memlocs
[(int) mode
] == 0)
611 #ifdef SECONDARY_MEMORY_NEEDED_RTX
612 secondary_memlocs
[(int) mode
] = SECONDARY_MEMORY_NEEDED_RTX (mode
);
614 secondary_memlocs
[(int) mode
]
615 = assign_stack_local (mode
, GET_MODE_SIZE (mode
), 0);
619 /* Get a version of the address doing any eliminations needed. If that
620 didn't give us a new MEM, make a new one if it isn't valid. */
622 loc
= eliminate_regs (secondary_memlocs
[(int) mode
], VOIDmode
, NULL_RTX
);
623 mem_valid
= strict_memory_address_addr_space_p (mode
, XEXP (loc
, 0),
624 MEM_ADDR_SPACE (loc
));
626 if (! mem_valid
&& loc
== secondary_memlocs
[(int) mode
])
627 loc
= copy_rtx (loc
);
629 /* The only time the call below will do anything is if the stack
630 offset is too large. In that case IND_LEVELS doesn't matter, so we
631 can just pass a zero. Adjust the type to be the address of the
632 corresponding object. If the address was valid, save the eliminated
633 address. If it wasn't valid, we need to make a reload each time, so
638 type
= (type
== RELOAD_FOR_INPUT
? RELOAD_FOR_INPUT_ADDRESS
639 : type
== RELOAD_FOR_OUTPUT
? RELOAD_FOR_OUTPUT_ADDRESS
642 find_reloads_address (mode
, &loc
, XEXP (loc
, 0), &XEXP (loc
, 0),
646 secondary_memlocs_elim
[(int) mode
][opnum
] = loc
;
647 if (secondary_memlocs_elim_used
<= (int)mode
)
648 secondary_memlocs_elim_used
= (int)mode
+ 1;
652 /* Clear any secondary memory locations we've made. */
655 clear_secondary_mem (void)
657 memset (secondary_memlocs
, 0, sizeof secondary_memlocs
);
659 #endif /* SECONDARY_MEMORY_NEEDED */
662 /* Find the largest class which has at least one register valid in
663 mode INNER, and which for every such register, that register number
664 plus N is also valid in OUTER (if in range) and is cheap to move
665 into REGNO. Such a class must exist. */
667 static enum reg_class
668 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED
,
669 enum machine_mode inner ATTRIBUTE_UNUSED
, int n
,
670 unsigned int dest_regno ATTRIBUTE_UNUSED
)
675 enum reg_class best_class
= NO_REGS
;
676 enum reg_class dest_class ATTRIBUTE_UNUSED
= REGNO_REG_CLASS (dest_regno
);
677 unsigned int best_size
= 0;
680 for (rclass
= 1; rclass
< N_REG_CLASSES
; rclass
++)
684 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
- n
&& ! bad
; regno
++)
685 if (TEST_HARD_REG_BIT (reg_class_contents
[rclass
], regno
))
687 if (HARD_REGNO_MODE_OK (regno
, inner
))
690 if (! TEST_HARD_REG_BIT (reg_class_contents
[rclass
], regno
+ n
)
691 || ! HARD_REGNO_MODE_OK (regno
+ n
, outer
))
698 cost
= register_move_cost (outer
, (enum reg_class
) rclass
, dest_class
);
700 if ((reg_class_size
[rclass
] > best_size
701 && (best_cost
< 0 || best_cost
>= cost
))
704 best_class
= (enum reg_class
) rclass
;
705 best_size
= reg_class_size
[rclass
];
706 best_cost
= register_move_cost (outer
, (enum reg_class
) rclass
,
711 gcc_assert (best_size
!= 0);
716 /* Return the number of a previously made reload that can be combined with
717 a new one, or n_reloads if none of the existing reloads can be used.
718 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
719 push_reload, they determine the kind of the new reload that we try to
720 combine. P_IN points to the corresponding value of IN, which can be
721 modified by this function.
722 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
725 find_reusable_reload (rtx
*p_in
, rtx out
, enum reg_class rclass
,
726 enum reload_type type
, int opnum
, int dont_share
)
730 /* We can't merge two reloads if the output of either one is
733 if (earlyclobber_operand_p (out
))
736 /* We can use an existing reload if the class is right
737 and at least one of IN and OUT is a match
738 and the other is at worst neutral.
739 (A zero compared against anything is neutral.)
741 For targets with small register classes, don't use existing reloads
742 unless they are for the same thing since that can cause us to need
743 more reload registers than we otherwise would. */
745 for (i
= 0; i
< n_reloads
; i
++)
746 if ((reg_class_subset_p (rclass
, rld
[i
].rclass
)
747 || reg_class_subset_p (rld
[i
].rclass
, rclass
))
748 /* If the existing reload has a register, it must fit our class. */
749 && (rld
[i
].reg_rtx
== 0
750 || TEST_HARD_REG_BIT (reg_class_contents
[(int) rclass
],
751 true_regnum (rld
[i
].reg_rtx
)))
752 && ((in
!= 0 && MATCHES (rld
[i
].in
, in
) && ! dont_share
753 && (out
== 0 || rld
[i
].out
== 0 || MATCHES (rld
[i
].out
, out
)))
754 || (out
!= 0 && MATCHES (rld
[i
].out
, out
)
755 && (in
== 0 || rld
[i
].in
== 0 || MATCHES (rld
[i
].in
, in
))))
756 && (rld
[i
].out
== 0 || ! earlyclobber_operand_p (rld
[i
].out
))
757 && (small_register_class_p (rclass
)
758 || targetm
.small_register_classes_for_mode_p (VOIDmode
))
759 && MERGABLE_RELOADS (type
, rld
[i
].when_needed
, opnum
, rld
[i
].opnum
))
762 /* Reloading a plain reg for input can match a reload to postincrement
763 that reg, since the postincrement's value is the right value.
764 Likewise, it can match a preincrement reload, since we regard
765 the preincrementation as happening before any ref in this insn
767 for (i
= 0; i
< n_reloads
; i
++)
768 if ((reg_class_subset_p (rclass
, rld
[i
].rclass
)
769 || reg_class_subset_p (rld
[i
].rclass
, rclass
))
770 /* If the existing reload has a register, it must fit our
772 && (rld
[i
].reg_rtx
== 0
773 || TEST_HARD_REG_BIT (reg_class_contents
[(int) rclass
],
774 true_regnum (rld
[i
].reg_rtx
)))
775 && out
== 0 && rld
[i
].out
== 0 && rld
[i
].in
!= 0
777 && GET_RTX_CLASS (GET_CODE (rld
[i
].in
)) == RTX_AUTOINC
778 && MATCHES (XEXP (rld
[i
].in
, 0), in
))
779 || (REG_P (rld
[i
].in
)
780 && GET_RTX_CLASS (GET_CODE (in
)) == RTX_AUTOINC
781 && MATCHES (XEXP (in
, 0), rld
[i
].in
)))
782 && (rld
[i
].out
== 0 || ! earlyclobber_operand_p (rld
[i
].out
))
783 && (small_register_class_p (rclass
)
784 || targetm
.small_register_classes_for_mode_p (VOIDmode
))
785 && MERGABLE_RELOADS (type
, rld
[i
].when_needed
,
786 opnum
, rld
[i
].opnum
))
788 /* Make sure reload_in ultimately has the increment,
789 not the plain register. */
797 /* Return nonzero if X is a SUBREG which will require reloading of its
798 SUBREG_REG expression. */
801 reload_inner_reg_of_subreg (rtx x
, enum machine_mode mode
, int output
)
805 /* Only SUBREGs are problematical. */
806 if (GET_CODE (x
) != SUBREG
)
809 inner
= SUBREG_REG (x
);
811 /* If INNER is a constant or PLUS, then INNER must be reloaded. */
812 if (CONSTANT_P (inner
) || GET_CODE (inner
) == PLUS
)
815 /* If INNER is not a hard register, then INNER will not need to
818 || REGNO (inner
) >= FIRST_PSEUDO_REGISTER
)
821 /* If INNER is not ok for MODE, then INNER will need reloading. */
822 if (! HARD_REGNO_MODE_OK (subreg_regno (x
), mode
))
825 /* If the outer part is a word or smaller, INNER larger than a
826 word and the number of regs for INNER is not the same as the
827 number of words in INNER, then INNER will need reloading. */
828 return (GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
830 && GET_MODE_SIZE (GET_MODE (inner
)) > UNITS_PER_WORD
831 && ((GET_MODE_SIZE (GET_MODE (inner
)) / UNITS_PER_WORD
)
832 != (int) hard_regno_nregs
[REGNO (inner
)][GET_MODE (inner
)]));
835 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
836 requiring an extra reload register. The caller has already found that
837 IN contains some reference to REGNO, so check that we can produce the
838 new value in a single step. E.g. if we have
839 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
840 instruction that adds one to a register, this should succeed.
841 However, if we have something like
842 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
843 needs to be loaded into a register first, we need a separate reload
845 Such PLUS reloads are generated by find_reload_address_part.
846 The out-of-range PLUS expressions are usually introduced in the instruction
847 patterns by register elimination and substituting pseudos without a home
848 by their function-invariant equivalences. */
850 can_reload_into (rtx in
, int regno
, enum machine_mode mode
)
854 struct recog_data save_recog_data
;
856 /* For matching constraints, we often get notional input reloads where
857 we want to use the original register as the reload register. I.e.
858 technically this is a non-optional input-output reload, but IN is
859 already a valid register, and has been chosen as the reload register.
860 Speed this up, since it trivially works. */
864 /* To test MEMs properly, we'd have to take into account all the reloads
865 that are already scheduled, which can become quite complicated.
866 And since we've already handled address reloads for this MEM, it
867 should always succeed anyway. */
871 /* If we can make a simple SET insn that does the job, everything should
873 dst
= gen_rtx_REG (mode
, regno
);
874 test_insn
= make_insn_raw (gen_rtx_SET (VOIDmode
, dst
, in
));
875 save_recog_data
= recog_data
;
876 if (recog_memoized (test_insn
) >= 0)
878 extract_insn (test_insn
);
879 r
= constrain_operands (1);
881 recog_data
= save_recog_data
;
885 /* Record one reload that needs to be performed.
886 IN is an rtx saying where the data are to be found before this instruction.
887 OUT says where they must be stored after the instruction.
888 (IN is zero for data not read, and OUT is zero for data not written.)
889 INLOC and OUTLOC point to the places in the instructions where
890 IN and OUT were found.
891 If IN and OUT are both nonzero, it means the same register must be used
892 to reload both IN and OUT.
894 RCLASS is a register class required for the reloaded data.
895 INMODE is the machine mode that the instruction requires
896 for the reg that replaces IN and OUTMODE is likewise for OUT.
898 If IN is zero, then OUT's location and mode should be passed as
901 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
903 OPTIONAL nonzero means this reload does not need to be performed:
904 it can be discarded if that is more convenient.
906 OPNUM and TYPE say what the purpose of this reload is.
908 The return value is the reload-number for this reload.
910 If both IN and OUT are nonzero, in some rare cases we might
911 want to make two separate reloads. (Actually we never do this now.)
912 Therefore, the reload-number for OUT is stored in
913 output_reloadnum when we return; the return value applies to IN.
914 Usually (presently always), when IN and OUT are nonzero,
915 the two reload-numbers are equal, but the caller should be careful to
919 push_reload (rtx in
, rtx out
, rtx
*inloc
, rtx
*outloc
,
920 enum reg_class rclass
, enum machine_mode inmode
,
921 enum machine_mode outmode
, int strict_low
, int optional
,
922 int opnum
, enum reload_type type
)
926 int dont_remove_subreg
= 0;
927 rtx
*in_subreg_loc
= 0, *out_subreg_loc
= 0;
928 int secondary_in_reload
= -1, secondary_out_reload
= -1;
929 enum insn_code secondary_in_icode
= CODE_FOR_nothing
;
930 enum insn_code secondary_out_icode
= CODE_FOR_nothing
;
932 /* INMODE and/or OUTMODE could be VOIDmode if no mode
933 has been specified for the operand. In that case,
934 use the operand's mode as the mode to reload. */
935 if (inmode
== VOIDmode
&& in
!= 0)
936 inmode
= GET_MODE (in
);
937 if (outmode
== VOIDmode
&& out
!= 0)
938 outmode
= GET_MODE (out
);
940 /* If find_reloads and friends until now missed to replace a pseudo
941 with a constant of reg_equiv_constant something went wrong
943 Note that it can't simply be done here if we missed it earlier
944 since the constant might need to be pushed into the literal pool
945 and the resulting memref would probably need further
947 if (in
!= 0 && REG_P (in
))
949 int regno
= REGNO (in
);
951 gcc_assert (regno
< FIRST_PSEUDO_REGISTER
952 || reg_renumber
[regno
] >= 0
953 || reg_equiv_constant
[regno
] == NULL_RTX
);
956 /* reg_equiv_constant only contains constants which are obviously
957 not appropriate as destination. So if we would need to replace
958 the destination pseudo with a constant we are in real
960 if (out
!= 0 && REG_P (out
))
962 int regno
= REGNO (out
);
964 gcc_assert (regno
< FIRST_PSEUDO_REGISTER
965 || reg_renumber
[regno
] >= 0
966 || reg_equiv_constant
[regno
] == NULL_RTX
);
969 /* If we have a read-write operand with an address side-effect,
970 change either IN or OUT so the side-effect happens only once. */
971 if (in
!= 0 && out
!= 0 && MEM_P (in
) && rtx_equal_p (in
, out
))
972 switch (GET_CODE (XEXP (in
, 0)))
974 case POST_INC
: case POST_DEC
: case POST_MODIFY
:
975 in
= replace_equiv_address_nv (in
, XEXP (XEXP (in
, 0), 0));
978 case PRE_INC
: case PRE_DEC
: case PRE_MODIFY
:
979 out
= replace_equiv_address_nv (out
, XEXP (XEXP (out
, 0), 0));
986 /* If we are reloading a (SUBREG constant ...), really reload just the
987 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
988 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
989 a pseudo and hence will become a MEM) with M1 wider than M2 and the
990 register is a pseudo, also reload the inside expression.
991 For machines that extend byte loads, do this for any SUBREG of a pseudo
992 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
993 M2 is an integral mode that gets extended when loaded.
994 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
995 either M1 is not valid for R or M2 is wider than a word but we only
996 need one word to store an M2-sized quantity in R.
997 (However, if OUT is nonzero, we need to reload the reg *and*
998 the subreg, so do nothing here, and let following statement handle it.)
1000 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1001 we can't handle it here because CONST_INT does not indicate a mode.
1003 Similarly, we must reload the inside expression if we have a
1004 STRICT_LOW_PART (presumably, in == out in this case).
1006 Also reload the inner expression if it does not require a secondary
1007 reload but the SUBREG does.
1009 Finally, reload the inner expression if it is a register that is in
1010 the class whose registers cannot be referenced in a different size
1011 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1012 cannot reload just the inside since we might end up with the wrong
1013 register class. But if it is inside a STRICT_LOW_PART, we have
1014 no choice, so we hope we do get the right register class there. */
1016 if (in
!= 0 && GET_CODE (in
) == SUBREG
1017 && (subreg_lowpart_p (in
) || strict_low
)
1018 #ifdef CANNOT_CHANGE_MODE_CLASS
1019 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in
)), inmode
, rclass
)
1021 && (CONSTANT_P (SUBREG_REG (in
))
1022 || GET_CODE (SUBREG_REG (in
)) == PLUS
1024 || (((REG_P (SUBREG_REG (in
))
1025 && REGNO (SUBREG_REG (in
)) >= FIRST_PSEUDO_REGISTER
)
1026 || MEM_P (SUBREG_REG (in
)))
1027 && ((GET_MODE_SIZE (inmode
)
1028 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
))))
1029 #ifdef LOAD_EXTEND_OP
1030 || (GET_MODE_SIZE (inmode
) <= UNITS_PER_WORD
1031 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
)))
1033 && (GET_MODE_SIZE (inmode
)
1034 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
))))
1035 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in
)))
1036 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in
))) != UNKNOWN
)
1038 #ifdef WORD_REGISTER_OPERATIONS
1039 || ((GET_MODE_SIZE (inmode
)
1040 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
))))
1041 && ((GET_MODE_SIZE (inmode
) - 1) / UNITS_PER_WORD
==
1042 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
))) - 1)
1046 || (REG_P (SUBREG_REG (in
))
1047 && REGNO (SUBREG_REG (in
)) < FIRST_PSEUDO_REGISTER
1048 /* The case where out is nonzero
1049 is handled differently in the following statement. */
1050 && (out
== 0 || subreg_lowpart_p (in
))
1051 && ((GET_MODE_SIZE (inmode
) <= UNITS_PER_WORD
1052 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
)))
1054 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
)))
1056 != (int) hard_regno_nregs
[REGNO (SUBREG_REG (in
))]
1057 [GET_MODE (SUBREG_REG (in
))]))
1058 || ! HARD_REGNO_MODE_OK (subreg_regno (in
), inmode
)))
1059 || (secondary_reload_class (1, rclass
, inmode
, in
) != NO_REGS
1060 && (secondary_reload_class (1, rclass
, GET_MODE (SUBREG_REG (in
)),
1063 #ifdef CANNOT_CHANGE_MODE_CLASS
1064 || (REG_P (SUBREG_REG (in
))
1065 && REGNO (SUBREG_REG (in
)) < FIRST_PSEUDO_REGISTER
1066 && REG_CANNOT_CHANGE_MODE_P
1067 (REGNO (SUBREG_REG (in
)), GET_MODE (SUBREG_REG (in
)), inmode
))
1071 in_subreg_loc
= inloc
;
1072 inloc
= &SUBREG_REG (in
);
1074 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1076 /* This is supposed to happen only for paradoxical subregs made by
1077 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1078 gcc_assert (GET_MODE_SIZE (GET_MODE (in
)) <= GET_MODE_SIZE (inmode
));
1080 inmode
= GET_MODE (in
);
1083 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1084 either M1 is not valid for R or M2 is wider than a word but we only
1085 need one word to store an M2-sized quantity in R.
1087 However, we must reload the inner reg *as well as* the subreg in
1090 /* Similar issue for (SUBREG constant ...) if it was not handled by the
1091 code above. This can happen if SUBREG_BYTE != 0. */
1093 if (in
!= 0 && reload_inner_reg_of_subreg (in
, inmode
, 0))
1095 enum reg_class in_class
= rclass
;
1097 if (REG_P (SUBREG_REG (in
)))
1099 = find_valid_class (inmode
, GET_MODE (SUBREG_REG (in
)),
1100 subreg_regno_offset (REGNO (SUBREG_REG (in
)),
1101 GET_MODE (SUBREG_REG (in
)),
1104 REGNO (SUBREG_REG (in
)));
1106 /* This relies on the fact that emit_reload_insns outputs the
1107 instructions for input reloads of type RELOAD_OTHER in the same
1108 order as the reloads. Thus if the outer reload is also of type
1109 RELOAD_OTHER, we are guaranteed that this inner reload will be
1110 output before the outer reload. */
1111 push_reload (SUBREG_REG (in
), NULL_RTX
, &SUBREG_REG (in
), (rtx
*) 0,
1112 in_class
, VOIDmode
, VOIDmode
, 0, 0, opnum
, type
);
1113 dont_remove_subreg
= 1;
1116 /* Similarly for paradoxical and problematical SUBREGs on the output.
1117 Note that there is no reason we need worry about the previous value
1118 of SUBREG_REG (out); even if wider than out,
1119 storing in a subreg is entitled to clobber it all
1120 (except in the case of STRICT_LOW_PART,
1121 and in that case the constraint should label it input-output.) */
1122 if (out
!= 0 && GET_CODE (out
) == SUBREG
1123 && (subreg_lowpart_p (out
) || strict_low
)
1124 #ifdef CANNOT_CHANGE_MODE_CLASS
1125 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out
)), outmode
, rclass
)
1127 && (CONSTANT_P (SUBREG_REG (out
))
1129 || (((REG_P (SUBREG_REG (out
))
1130 && REGNO (SUBREG_REG (out
)) >= FIRST_PSEUDO_REGISTER
)
1131 || MEM_P (SUBREG_REG (out
)))
1132 && ((GET_MODE_SIZE (outmode
)
1133 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out
))))
1134 #ifdef WORD_REGISTER_OPERATIONS
1135 || ((GET_MODE_SIZE (outmode
)
1136 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (out
))))
1137 && ((GET_MODE_SIZE (outmode
) - 1) / UNITS_PER_WORD
==
1138 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out
))) - 1)
1142 || (REG_P (SUBREG_REG (out
))
1143 && REGNO (SUBREG_REG (out
)) < FIRST_PSEUDO_REGISTER
1144 && ((GET_MODE_SIZE (outmode
) <= UNITS_PER_WORD
1145 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out
)))
1147 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out
)))
1149 != (int) hard_regno_nregs
[REGNO (SUBREG_REG (out
))]
1150 [GET_MODE (SUBREG_REG (out
))]))
1151 || ! HARD_REGNO_MODE_OK (subreg_regno (out
), outmode
)))
1152 || (secondary_reload_class (0, rclass
, outmode
, out
) != NO_REGS
1153 && (secondary_reload_class (0, rclass
, GET_MODE (SUBREG_REG (out
)),
1156 #ifdef CANNOT_CHANGE_MODE_CLASS
1157 || (REG_P (SUBREG_REG (out
))
1158 && REGNO (SUBREG_REG (out
)) < FIRST_PSEUDO_REGISTER
1159 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out
)),
1160 GET_MODE (SUBREG_REG (out
)),
1165 out_subreg_loc
= outloc
;
1166 outloc
= &SUBREG_REG (out
);
1168 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1169 gcc_assert (!MEM_P (out
)
1170 || GET_MODE_SIZE (GET_MODE (out
))
1171 <= GET_MODE_SIZE (outmode
));
1173 outmode
= GET_MODE (out
);
1176 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1177 either M1 is not valid for R or M2 is wider than a word but we only
1178 need one word to store an M2-sized quantity in R.
1180 However, we must reload the inner reg *as well as* the subreg in
1181 that case. In this case, the inner reg is an in-out reload. */
1183 if (out
!= 0 && reload_inner_reg_of_subreg (out
, outmode
, 1))
1185 /* This relies on the fact that emit_reload_insns outputs the
1186 instructions for output reloads of type RELOAD_OTHER in reverse
1187 order of the reloads. Thus if the outer reload is also of type
1188 RELOAD_OTHER, we are guaranteed that this inner reload will be
1189 output after the outer reload. */
1190 dont_remove_subreg
= 1;
1191 push_reload (SUBREG_REG (out
), SUBREG_REG (out
), &SUBREG_REG (out
),
1193 find_valid_class (outmode
, GET_MODE (SUBREG_REG (out
)),
1194 subreg_regno_offset (REGNO (SUBREG_REG (out
)),
1195 GET_MODE (SUBREG_REG (out
)),
1198 REGNO (SUBREG_REG (out
))),
1199 VOIDmode
, VOIDmode
, 0, 0,
1200 opnum
, RELOAD_OTHER
);
1203 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1204 if (in
!= 0 && out
!= 0 && MEM_P (out
)
1205 && (REG_P (in
) || MEM_P (in
) || GET_CODE (in
) == PLUS
)
1206 && reg_overlap_mentioned_for_reload_p (in
, XEXP (out
, 0)))
1209 /* If IN is a SUBREG of a hard register, make a new REG. This
1210 simplifies some of the cases below. */
1212 if (in
!= 0 && GET_CODE (in
) == SUBREG
&& REG_P (SUBREG_REG (in
))
1213 && REGNO (SUBREG_REG (in
)) < FIRST_PSEUDO_REGISTER
1214 && ! dont_remove_subreg
)
1215 in
= gen_rtx_REG (GET_MODE (in
), subreg_regno (in
));
1217 /* Similarly for OUT. */
1218 if (out
!= 0 && GET_CODE (out
) == SUBREG
1219 && REG_P (SUBREG_REG (out
))
1220 && REGNO (SUBREG_REG (out
)) < FIRST_PSEUDO_REGISTER
1221 && ! dont_remove_subreg
)
1222 out
= gen_rtx_REG (GET_MODE (out
), subreg_regno (out
));
1224 /* Narrow down the class of register wanted if that is
1225 desirable on this machine for efficiency. */
1227 reg_class_t preferred_class
= rclass
;
1230 preferred_class
= targetm
.preferred_reload_class (in
, rclass
);
1232 /* Output reloads may need analogous treatment, different in detail. */
1235 = targetm
.preferred_output_reload_class (out
, preferred_class
);
1237 /* Discard what the target said if we cannot do it. */
1238 if (preferred_class
!= NO_REGS
1239 || (optional
&& type
== RELOAD_FOR_OUTPUT
))
1240 rclass
= (enum reg_class
) preferred_class
;
1243 /* Make sure we use a class that can handle the actual pseudo
1244 inside any subreg. For example, on the 386, QImode regs
1245 can appear within SImode subregs. Although GENERAL_REGS
1246 can handle SImode, QImode needs a smaller class. */
1247 #ifdef LIMIT_RELOAD_CLASS
1249 rclass
= LIMIT_RELOAD_CLASS (inmode
, rclass
);
1250 else if (in
!= 0 && GET_CODE (in
) == SUBREG
)
1251 rclass
= LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in
)), rclass
);
1254 rclass
= LIMIT_RELOAD_CLASS (outmode
, rclass
);
1255 if (out
!= 0 && GET_CODE (out
) == SUBREG
)
1256 rclass
= LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out
)), rclass
);
1259 /* Verify that this class is at least possible for the mode that
1261 if (this_insn_is_asm
)
1263 enum machine_mode mode
;
1264 if (GET_MODE_SIZE (inmode
) > GET_MODE_SIZE (outmode
))
1268 if (mode
== VOIDmode
)
1270 error_for_asm (this_insn
, "cannot reload integer constant "
1271 "operand in %<asm%>");
1276 outmode
= word_mode
;
1278 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1279 if (HARD_REGNO_MODE_OK (i
, mode
)
1280 && in_hard_reg_set_p (reg_class_contents
[(int) rclass
], mode
, i
))
1282 if (i
== FIRST_PSEUDO_REGISTER
)
1284 error_for_asm (this_insn
, "impossible register constraint "
1286 /* Avoid further trouble with this insn. */
1287 PATTERN (this_insn
) = gen_rtx_USE (VOIDmode
, const0_rtx
);
1288 /* We used to continue here setting class to ALL_REGS, but it triggers
1289 sanity check on i386 for:
1290 void foo(long double d)
1294 Returning zero here ought to be safe as we take care in
1295 find_reloads to not process the reloads when instruction was
1302 /* Optional output reloads are always OK even if we have no register class,
1303 since the function of these reloads is only to have spill_reg_store etc.
1304 set, so that the storing insn can be deleted later. */
1305 gcc_assert (rclass
!= NO_REGS
1306 || (optional
!= 0 && type
== RELOAD_FOR_OUTPUT
));
1308 i
= find_reusable_reload (&in
, out
, rclass
, type
, opnum
, dont_share
);
1312 /* See if we need a secondary reload register to move between CLASS
1313 and IN or CLASS and OUT. Get the icode and push any required reloads
1314 needed for each of them if so. */
1318 = push_secondary_reload (1, in
, opnum
, optional
, rclass
, inmode
, type
,
1319 &secondary_in_icode
, NULL
);
1320 if (out
!= 0 && GET_CODE (out
) != SCRATCH
)
1321 secondary_out_reload
1322 = push_secondary_reload (0, out
, opnum
, optional
, rclass
, outmode
,
1323 type
, &secondary_out_icode
, NULL
);
1325 /* We found no existing reload suitable for re-use.
1326 So add an additional reload. */
1328 #ifdef SECONDARY_MEMORY_NEEDED
1329 /* If a memory location is needed for the copy, make one. */
1332 || (GET_CODE (in
) == SUBREG
&& REG_P (SUBREG_REG (in
))))
1333 && reg_or_subregno (in
) < FIRST_PSEUDO_REGISTER
1334 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in
)),
1336 get_secondary_mem (in
, inmode
, opnum
, type
);
1342 rld
[i
].rclass
= rclass
;
1343 rld
[i
].inmode
= inmode
;
1344 rld
[i
].outmode
= outmode
;
1346 rld
[i
].optional
= optional
;
1348 rld
[i
].nocombine
= 0;
1349 rld
[i
].in_reg
= inloc
? *inloc
: 0;
1350 rld
[i
].out_reg
= outloc
? *outloc
: 0;
1351 rld
[i
].opnum
= opnum
;
1352 rld
[i
].when_needed
= type
;
1353 rld
[i
].secondary_in_reload
= secondary_in_reload
;
1354 rld
[i
].secondary_out_reload
= secondary_out_reload
;
1355 rld
[i
].secondary_in_icode
= secondary_in_icode
;
1356 rld
[i
].secondary_out_icode
= secondary_out_icode
;
1357 rld
[i
].secondary_p
= 0;
1361 #ifdef SECONDARY_MEMORY_NEEDED
1364 || (GET_CODE (out
) == SUBREG
&& REG_P (SUBREG_REG (out
))))
1365 && reg_or_subregno (out
) < FIRST_PSEUDO_REGISTER
1366 && SECONDARY_MEMORY_NEEDED (rclass
,
1367 REGNO_REG_CLASS (reg_or_subregno (out
)),
1369 get_secondary_mem (out
, outmode
, opnum
, type
);
1374 /* We are reusing an existing reload,
1375 but we may have additional information for it.
1376 For example, we may now have both IN and OUT
1377 while the old one may have just one of them. */
1379 /* The modes can be different. If they are, we want to reload in
1380 the larger mode, so that the value is valid for both modes. */
1381 if (inmode
!= VOIDmode
1382 && GET_MODE_SIZE (inmode
) > GET_MODE_SIZE (rld
[i
].inmode
))
1383 rld
[i
].inmode
= inmode
;
1384 if (outmode
!= VOIDmode
1385 && GET_MODE_SIZE (outmode
) > GET_MODE_SIZE (rld
[i
].outmode
))
1386 rld
[i
].outmode
= outmode
;
1389 rtx in_reg
= inloc
? *inloc
: 0;
1390 /* If we merge reloads for two distinct rtl expressions that
1391 are identical in content, there might be duplicate address
1392 reloads. Remove the extra set now, so that if we later find
1393 that we can inherit this reload, we can get rid of the
1394 address reloads altogether.
1396 Do not do this if both reloads are optional since the result
1397 would be an optional reload which could potentially leave
1398 unresolved address replacements.
1400 It is not sufficient to call transfer_replacements since
1401 choose_reload_regs will remove the replacements for address
1402 reloads of inherited reloads which results in the same
1404 if (rld
[i
].in
!= in
&& rtx_equal_p (in
, rld
[i
].in
)
1405 && ! (rld
[i
].optional
&& optional
))
1407 /* We must keep the address reload with the lower operand
1409 if (opnum
> rld
[i
].opnum
)
1411 remove_address_replacements (in
);
1413 in_reg
= rld
[i
].in_reg
;
1416 remove_address_replacements (rld
[i
].in
);
1418 /* When emitting reloads we don't necessarily look at the in-
1419 and outmode, but also directly at the operands (in and out).
1420 So we can't simply overwrite them with whatever we have found
1421 for this (to-be-merged) reload, we have to "merge" that too.
1422 Reusing another reload already verified that we deal with the
1423 same operands, just possibly in different modes. So we
1424 overwrite the operands only when the new mode is larger.
1425 See also PR33613. */
1427 || GET_MODE_SIZE (GET_MODE (in
))
1428 > GET_MODE_SIZE (GET_MODE (rld
[i
].in
)))
1432 && GET_MODE_SIZE (GET_MODE (in_reg
))
1433 > GET_MODE_SIZE (GET_MODE (rld
[i
].in_reg
))))
1434 rld
[i
].in_reg
= in_reg
;
1440 && GET_MODE_SIZE (GET_MODE (out
))
1441 > GET_MODE_SIZE (GET_MODE (rld
[i
].out
))))
1445 || GET_MODE_SIZE (GET_MODE (*outloc
))
1446 > GET_MODE_SIZE (GET_MODE (rld
[i
].out_reg
))))
1447 rld
[i
].out_reg
= *outloc
;
1449 if (reg_class_subset_p (rclass
, rld
[i
].rclass
))
1450 rld
[i
].rclass
= rclass
;
1451 rld
[i
].optional
&= optional
;
1452 if (MERGE_TO_OTHER (type
, rld
[i
].when_needed
,
1453 opnum
, rld
[i
].opnum
))
1454 rld
[i
].when_needed
= RELOAD_OTHER
;
1455 rld
[i
].opnum
= MIN (rld
[i
].opnum
, opnum
);
1458 /* If the ostensible rtx being reloaded differs from the rtx found
1459 in the location to substitute, this reload is not safe to combine
1460 because we cannot reliably tell whether it appears in the insn. */
1462 if (in
!= 0 && in
!= *inloc
)
1463 rld
[i
].nocombine
= 1;
1466 /* This was replaced by changes in find_reloads_address_1 and the new
1467 function inc_for_reload, which go with a new meaning of reload_inc. */
1469 /* If this is an IN/OUT reload in an insn that sets the CC,
1470 it must be for an autoincrement. It doesn't work to store
1471 the incremented value after the insn because that would clobber the CC.
1472 So we must do the increment of the value reloaded from,
1473 increment it, store it back, then decrement again. */
1474 if (out
!= 0 && sets_cc0_p (PATTERN (this_insn
)))
1478 rld
[i
].inc
= find_inc_amount (PATTERN (this_insn
), in
);
1479 /* If we did not find a nonzero amount-to-increment-by,
1480 that contradicts the belief that IN is being incremented
1481 in an address in this insn. */
1482 gcc_assert (rld
[i
].inc
!= 0);
1486 /* If we will replace IN and OUT with the reload-reg,
1487 record where they are located so that substitution need
1488 not do a tree walk. */
1490 if (replace_reloads
)
1494 struct replacement
*r
= &replacements
[n_replacements
++];
1496 r
->subreg_loc
= in_subreg_loc
;
1500 if (outloc
!= 0 && outloc
!= inloc
)
1502 struct replacement
*r
= &replacements
[n_replacements
++];
1505 r
->subreg_loc
= out_subreg_loc
;
1510 /* If this reload is just being introduced and it has both
1511 an incoming quantity and an outgoing quantity that are
1512 supposed to be made to match, see if either one of the two
1513 can serve as the place to reload into.
1515 If one of them is acceptable, set rld[i].reg_rtx
1518 if (in
!= 0 && out
!= 0 && in
!= out
&& rld
[i
].reg_rtx
== 0)
1520 rld
[i
].reg_rtx
= find_dummy_reload (in
, out
, inloc
, outloc
,
1523 earlyclobber_operand_p (out
));
1525 /* If the outgoing register already contains the same value
1526 as the incoming one, we can dispense with loading it.
1527 The easiest way to tell the caller that is to give a phony
1528 value for the incoming operand (same as outgoing one). */
1529 if (rld
[i
].reg_rtx
== out
1530 && (REG_P (in
) || CONSTANT_P (in
))
1531 && 0 != find_equiv_reg (in
, this_insn
, NO_REGS
, REGNO (out
),
1532 static_reload_reg_p
, i
, inmode
))
1536 /* If this is an input reload and the operand contains a register that
1537 dies in this insn and is used nowhere else, see if it is the right class
1538 to be used for this reload. Use it if so. (This occurs most commonly
1539 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1540 this if it is also an output reload that mentions the register unless
1541 the output is a SUBREG that clobbers an entire register.
1543 Note that the operand might be one of the spill regs, if it is a
1544 pseudo reg and we are in a block where spilling has not taken place.
1545 But if there is no spilling in this block, that is OK.
1546 An explicitly used hard reg cannot be a spill reg. */
1548 if (rld
[i
].reg_rtx
== 0 && in
!= 0 && hard_regs_live_known
)
1552 enum machine_mode rel_mode
= inmode
;
1554 if (out
&& GET_MODE_SIZE (outmode
) > GET_MODE_SIZE (inmode
))
1557 for (note
= REG_NOTES (this_insn
); note
; note
= XEXP (note
, 1))
1558 if (REG_NOTE_KIND (note
) == REG_DEAD
1559 && REG_P (XEXP (note
, 0))
1560 && (regno
= REGNO (XEXP (note
, 0))) < FIRST_PSEUDO_REGISTER
1561 && reg_mentioned_p (XEXP (note
, 0), in
)
1562 /* Check that a former pseudo is valid; see find_dummy_reload. */
1563 && (ORIGINAL_REGNO (XEXP (note
, 0)) < FIRST_PSEUDO_REGISTER
1564 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR
),
1565 ORIGINAL_REGNO (XEXP (note
, 0)))
1566 && hard_regno_nregs
[regno
][GET_MODE (XEXP (note
, 0))] == 1))
1567 && ! refers_to_regno_for_reload_p (regno
,
1568 end_hard_regno (rel_mode
,
1570 PATTERN (this_insn
), inloc
)
1571 /* If this is also an output reload, IN cannot be used as
1572 the reload register if it is set in this insn unless IN
1574 && (out
== 0 || in
== out
1575 || ! hard_reg_set_here_p (regno
,
1576 end_hard_regno (rel_mode
, regno
),
1577 PATTERN (this_insn
)))
1578 /* ??? Why is this code so different from the previous?
1579 Is there any simple coherent way to describe the two together?
1580 What's going on here. */
1582 || (GET_CODE (in
) == SUBREG
1583 && (((GET_MODE_SIZE (GET_MODE (in
)) + (UNITS_PER_WORD
- 1))
1585 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
)))
1586 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))))
1587 /* Make sure the operand fits in the reg that dies. */
1588 && (GET_MODE_SIZE (rel_mode
)
1589 <= GET_MODE_SIZE (GET_MODE (XEXP (note
, 0))))
1590 && HARD_REGNO_MODE_OK (regno
, inmode
)
1591 && HARD_REGNO_MODE_OK (regno
, outmode
))
1594 unsigned int nregs
= MAX (hard_regno_nregs
[regno
][inmode
],
1595 hard_regno_nregs
[regno
][outmode
]);
1597 for (offs
= 0; offs
< nregs
; offs
++)
1598 if (fixed_regs
[regno
+ offs
]
1599 || ! TEST_HARD_REG_BIT (reg_class_contents
[(int) rclass
],
1604 && (! (refers_to_regno_for_reload_p
1605 (regno
, end_hard_regno (inmode
, regno
), in
, (rtx
*) 0))
1606 || can_reload_into (in
, regno
, inmode
)))
1608 rld
[i
].reg_rtx
= gen_rtx_REG (rel_mode
, regno
);
1615 output_reloadnum
= i
;
1620 /* Record an additional place we must replace a value
1621 for which we have already recorded a reload.
1622 RELOADNUM is the value returned by push_reload
1623 when the reload was recorded.
1624 This is used in insn patterns that use match_dup. */
1627 push_replacement (rtx
*loc
, int reloadnum
, enum machine_mode mode
)
1629 if (replace_reloads
)
1631 struct replacement
*r
= &replacements
[n_replacements
++];
1632 r
->what
= reloadnum
;
1639 /* Duplicate any replacement we have recorded to apply at
1640 location ORIG_LOC to also be performed at DUP_LOC.
1641 This is used in insn patterns that use match_dup. */
1644 dup_replacements (rtx
*dup_loc
, rtx
*orig_loc
)
1646 int i
, n
= n_replacements
;
1648 for (i
= 0; i
< n
; i
++)
1650 struct replacement
*r
= &replacements
[i
];
1651 if (r
->where
== orig_loc
)
1652 push_replacement (dup_loc
, r
->what
, r
->mode
);
1656 /* Transfer all replacements that used to be in reload FROM to be in
1660 transfer_replacements (int to
, int from
)
1664 for (i
= 0; i
< n_replacements
; i
++)
1665 if (replacements
[i
].what
== from
)
1666 replacements
[i
].what
= to
;
1669 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1670 or a subpart of it. If we have any replacements registered for IN_RTX,
1671 cancel the reloads that were supposed to load them.
1672 Return nonzero if we canceled any reloads. */
1674 remove_address_replacements (rtx in_rtx
)
1677 char reload_flags
[MAX_RELOADS
];
1678 int something_changed
= 0;
1680 memset (reload_flags
, 0, sizeof reload_flags
);
1681 for (i
= 0, j
= 0; i
< n_replacements
; i
++)
1683 if (loc_mentioned_in_p (replacements
[i
].where
, in_rtx
))
1684 reload_flags
[replacements
[i
].what
] |= 1;
1687 replacements
[j
++] = replacements
[i
];
1688 reload_flags
[replacements
[i
].what
] |= 2;
1691 /* Note that the following store must be done before the recursive calls. */
1694 for (i
= n_reloads
- 1; i
>= 0; i
--)
1696 if (reload_flags
[i
] == 1)
1698 deallocate_reload_reg (i
);
1699 remove_address_replacements (rld
[i
].in
);
1701 something_changed
= 1;
1704 return something_changed
;
1707 /* If there is only one output reload, and it is not for an earlyclobber
1708 operand, try to combine it with a (logically unrelated) input reload
1709 to reduce the number of reload registers needed.
1711 This is safe if the input reload does not appear in
1712 the value being output-reloaded, because this implies
1713 it is not needed any more once the original insn completes.
1715 If that doesn't work, see we can use any of the registers that
1716 die in this insn as a reload register. We can if it is of the right
1717 class and does not appear in the value being output-reloaded. */
1720 combine_reloads (void)
1723 int output_reload
= -1;
1724 int secondary_out
= -1;
1727 /* Find the output reload; return unless there is exactly one
1728 and that one is mandatory. */
1730 for (i
= 0; i
< n_reloads
; i
++)
1731 if (rld
[i
].out
!= 0)
1733 if (output_reload
>= 0)
1738 if (output_reload
< 0 || rld
[output_reload
].optional
)
1741 /* An input-output reload isn't combinable. */
1743 if (rld
[output_reload
].in
!= 0)
1746 /* If this reload is for an earlyclobber operand, we can't do anything. */
1747 if (earlyclobber_operand_p (rld
[output_reload
].out
))
1750 /* If there is a reload for part of the address of this operand, we would
1751 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1752 its life to the point where doing this combine would not lower the
1753 number of spill registers needed. */
1754 for (i
= 0; i
< n_reloads
; i
++)
1755 if ((rld
[i
].when_needed
== RELOAD_FOR_OUTPUT_ADDRESS
1756 || rld
[i
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
)
1757 && rld
[i
].opnum
== rld
[output_reload
].opnum
)
1760 /* Check each input reload; can we combine it? */
1762 for (i
= 0; i
< n_reloads
; i
++)
1763 if (rld
[i
].in
&& ! rld
[i
].optional
&& ! rld
[i
].nocombine
1764 /* Life span of this reload must not extend past main insn. */
1765 && rld
[i
].when_needed
!= RELOAD_FOR_OUTPUT_ADDRESS
1766 && rld
[i
].when_needed
!= RELOAD_FOR_OUTADDR_ADDRESS
1767 && rld
[i
].when_needed
!= RELOAD_OTHER
1768 && (CLASS_MAX_NREGS (rld
[i
].rclass
, rld
[i
].inmode
)
1769 == CLASS_MAX_NREGS (rld
[output_reload
].rclass
,
1770 rld
[output_reload
].outmode
))
1772 && rld
[i
].reg_rtx
== 0
1773 #ifdef SECONDARY_MEMORY_NEEDED
1774 /* Don't combine two reloads with different secondary
1775 memory locations. */
1776 && (secondary_memlocs_elim
[(int) rld
[output_reload
].outmode
][rld
[i
].opnum
] == 0
1777 || secondary_memlocs_elim
[(int) rld
[output_reload
].outmode
][rld
[output_reload
].opnum
] == 0
1778 || rtx_equal_p (secondary_memlocs_elim
[(int) rld
[output_reload
].outmode
][rld
[i
].opnum
],
1779 secondary_memlocs_elim
[(int) rld
[output_reload
].outmode
][rld
[output_reload
].opnum
]))
1781 && (targetm
.small_register_classes_for_mode_p (VOIDmode
)
1782 ? (rld
[i
].rclass
== rld
[output_reload
].rclass
)
1783 : (reg_class_subset_p (rld
[i
].rclass
,
1784 rld
[output_reload
].rclass
)
1785 || reg_class_subset_p (rld
[output_reload
].rclass
,
1787 && (MATCHES (rld
[i
].in
, rld
[output_reload
].out
)
1788 /* Args reversed because the first arg seems to be
1789 the one that we imagine being modified
1790 while the second is the one that might be affected. */
1791 || (! reg_overlap_mentioned_for_reload_p (rld
[output_reload
].out
,
1793 /* However, if the input is a register that appears inside
1794 the output, then we also can't share.
1795 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1796 If the same reload reg is used for both reg 69 and the
1797 result to be stored in memory, then that result
1798 will clobber the address of the memory ref. */
1799 && ! (REG_P (rld
[i
].in
)
1800 && reg_overlap_mentioned_for_reload_p (rld
[i
].in
,
1801 rld
[output_reload
].out
))))
1802 && ! reload_inner_reg_of_subreg (rld
[i
].in
, rld
[i
].inmode
,
1803 rld
[i
].when_needed
!= RELOAD_FOR_INPUT
)
1804 && (reg_class_size
[(int) rld
[i
].rclass
]
1805 || targetm
.small_register_classes_for_mode_p (VOIDmode
))
1806 /* We will allow making things slightly worse by combining an
1807 input and an output, but no worse than that. */
1808 && (rld
[i
].when_needed
== RELOAD_FOR_INPUT
1809 || rld
[i
].when_needed
== RELOAD_FOR_OUTPUT
))
1813 /* We have found a reload to combine with! */
1814 rld
[i
].out
= rld
[output_reload
].out
;
1815 rld
[i
].out_reg
= rld
[output_reload
].out_reg
;
1816 rld
[i
].outmode
= rld
[output_reload
].outmode
;
1817 /* Mark the old output reload as inoperative. */
1818 rld
[output_reload
].out
= 0;
1819 /* The combined reload is needed for the entire insn. */
1820 rld
[i
].when_needed
= RELOAD_OTHER
;
1821 /* If the output reload had a secondary reload, copy it. */
1822 if (rld
[output_reload
].secondary_out_reload
!= -1)
1824 rld
[i
].secondary_out_reload
1825 = rld
[output_reload
].secondary_out_reload
;
1826 rld
[i
].secondary_out_icode
1827 = rld
[output_reload
].secondary_out_icode
;
1830 #ifdef SECONDARY_MEMORY_NEEDED
1831 /* Copy any secondary MEM. */
1832 if (secondary_memlocs_elim
[(int) rld
[output_reload
].outmode
][rld
[output_reload
].opnum
] != 0)
1833 secondary_memlocs_elim
[(int) rld
[output_reload
].outmode
][rld
[i
].opnum
]
1834 = secondary_memlocs_elim
[(int) rld
[output_reload
].outmode
][rld
[output_reload
].opnum
];
1836 /* If required, minimize the register class. */
1837 if (reg_class_subset_p (rld
[output_reload
].rclass
,
1839 rld
[i
].rclass
= rld
[output_reload
].rclass
;
1841 /* Transfer all replacements from the old reload to the combined. */
1842 for (j
= 0; j
< n_replacements
; j
++)
1843 if (replacements
[j
].what
== output_reload
)
1844 replacements
[j
].what
= i
;
1849 /* If this insn has only one operand that is modified or written (assumed
1850 to be the first), it must be the one corresponding to this reload. It
1851 is safe to use anything that dies in this insn for that output provided
1852 that it does not occur in the output (we already know it isn't an
1853 earlyclobber. If this is an asm insn, give up. */
1855 if (INSN_CODE (this_insn
) == -1)
1858 for (i
= 1; i
< insn_data
[INSN_CODE (this_insn
)].n_operands
; i
++)
1859 if (insn_data
[INSN_CODE (this_insn
)].operand
[i
].constraint
[0] == '='
1860 || insn_data
[INSN_CODE (this_insn
)].operand
[i
].constraint
[0] == '+')
1863 /* See if some hard register that dies in this insn and is not used in
1864 the output is the right class. Only works if the register we pick
1865 up can fully hold our output reload. */
1866 for (note
= REG_NOTES (this_insn
); note
; note
= XEXP (note
, 1))
1867 if (REG_NOTE_KIND (note
) == REG_DEAD
1868 && REG_P (XEXP (note
, 0))
1869 && !reg_overlap_mentioned_for_reload_p (XEXP (note
, 0),
1870 rld
[output_reload
].out
)
1871 && (regno
= REGNO (XEXP (note
, 0))) < FIRST_PSEUDO_REGISTER
1872 && HARD_REGNO_MODE_OK (regno
, rld
[output_reload
].outmode
)
1873 && TEST_HARD_REG_BIT (reg_class_contents
[(int) rld
[output_reload
].rclass
],
1875 && (hard_regno_nregs
[regno
][rld
[output_reload
].outmode
]
1876 <= hard_regno_nregs
[regno
][GET_MODE (XEXP (note
, 0))])
1877 /* Ensure that a secondary or tertiary reload for this output
1878 won't want this register. */
1879 && ((secondary_out
= rld
[output_reload
].secondary_out_reload
) == -1
1880 || (!(TEST_HARD_REG_BIT
1881 (reg_class_contents
[(int) rld
[secondary_out
].rclass
], regno
))
1882 && ((secondary_out
= rld
[secondary_out
].secondary_out_reload
) == -1
1883 || !(TEST_HARD_REG_BIT
1884 (reg_class_contents
[(int) rld
[secondary_out
].rclass
],
1886 && !fixed_regs
[regno
]
1887 /* Check that a former pseudo is valid; see find_dummy_reload. */
1888 && (ORIGINAL_REGNO (XEXP (note
, 0)) < FIRST_PSEUDO_REGISTER
1889 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR
),
1890 ORIGINAL_REGNO (XEXP (note
, 0)))
1891 && hard_regno_nregs
[regno
][GET_MODE (XEXP (note
, 0))] == 1)))
1893 rld
[output_reload
].reg_rtx
1894 = gen_rtx_REG (rld
[output_reload
].outmode
, regno
);
1899 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1900 See if one of IN and OUT is a register that may be used;
1901 this is desirable since a spill-register won't be needed.
1902 If so, return the register rtx that proves acceptable.
1904 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1905 RCLASS is the register class required for the reload.
1907 If FOR_REAL is >= 0, it is the number of the reload,
1908 and in some cases when it can be discovered that OUT doesn't need
1909 to be computed, clear out rld[FOR_REAL].out.
1911 If FOR_REAL is -1, this should not be done, because this call
1912 is just to see if a register can be found, not to find and install it.
1914 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1915 puts an additional constraint on being able to use IN for OUT since
1916 IN must not appear elsewhere in the insn (it is assumed that IN itself
1917 is safe from the earlyclobber). */
1920 find_dummy_reload (rtx real_in
, rtx real_out
, rtx
*inloc
, rtx
*outloc
,
1921 enum machine_mode inmode
, enum machine_mode outmode
,
1922 reg_class_t rclass
, int for_real
, int earlyclobber
)
1930 /* If operands exceed a word, we can't use either of them
1931 unless they have the same size. */
1932 if (GET_MODE_SIZE (outmode
) != GET_MODE_SIZE (inmode
)
1933 && (GET_MODE_SIZE (outmode
) > UNITS_PER_WORD
1934 || GET_MODE_SIZE (inmode
) > UNITS_PER_WORD
))
1937 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1938 respectively refers to a hard register. */
1940 /* Find the inside of any subregs. */
1941 while (GET_CODE (out
) == SUBREG
)
1943 if (REG_P (SUBREG_REG (out
))
1944 && REGNO (SUBREG_REG (out
)) < FIRST_PSEUDO_REGISTER
)
1945 out_offset
+= subreg_regno_offset (REGNO (SUBREG_REG (out
)),
1946 GET_MODE (SUBREG_REG (out
)),
1949 out
= SUBREG_REG (out
);
1951 while (GET_CODE (in
) == SUBREG
)
1953 if (REG_P (SUBREG_REG (in
))
1954 && REGNO (SUBREG_REG (in
)) < FIRST_PSEUDO_REGISTER
)
1955 in_offset
+= subreg_regno_offset (REGNO (SUBREG_REG (in
)),
1956 GET_MODE (SUBREG_REG (in
)),
1959 in
= SUBREG_REG (in
);
1962 /* Narrow down the reg class, the same way push_reload will;
1963 otherwise we might find a dummy now, but push_reload won't. */
1965 reg_class_t preferred_class
= targetm
.preferred_reload_class (in
, rclass
);
1966 if (preferred_class
!= NO_REGS
)
1967 rclass
= (enum reg_class
) preferred_class
;
1970 /* See if OUT will do. */
1972 && REGNO (out
) < FIRST_PSEUDO_REGISTER
)
1974 unsigned int regno
= REGNO (out
) + out_offset
;
1975 unsigned int nwords
= hard_regno_nregs
[regno
][outmode
];
1978 /* When we consider whether the insn uses OUT,
1979 ignore references within IN. They don't prevent us
1980 from copying IN into OUT, because those refs would
1981 move into the insn that reloads IN.
1983 However, we only ignore IN in its role as this reload.
1984 If the insn uses IN elsewhere and it contains OUT,
1985 that counts. We can't be sure it's the "same" operand
1986 so it might not go through this reload. */
1988 *inloc
= const0_rtx
;
1990 if (regno
< FIRST_PSEUDO_REGISTER
1991 && HARD_REGNO_MODE_OK (regno
, outmode
)
1992 && ! refers_to_regno_for_reload_p (regno
, regno
+ nwords
,
1993 PATTERN (this_insn
), outloc
))
1997 for (i
= 0; i
< nwords
; i
++)
1998 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) rclass
],
2004 if (REG_P (real_out
))
2007 value
= gen_rtx_REG (outmode
, regno
);
2014 /* Consider using IN if OUT was not acceptable
2015 or if OUT dies in this insn (like the quotient in a divmod insn).
2016 We can't use IN unless it is dies in this insn,
2017 which means we must know accurately which hard regs are live.
2018 Also, the result can't go in IN if IN is used within OUT,
2019 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2020 if (hard_regs_live_known
2022 && REGNO (in
) < FIRST_PSEUDO_REGISTER
2024 || find_reg_note (this_insn
, REG_UNUSED
, real_out
))
2025 && find_reg_note (this_insn
, REG_DEAD
, real_in
)
2026 && !fixed_regs
[REGNO (in
)]
2027 && HARD_REGNO_MODE_OK (REGNO (in
),
2028 /* The only case where out and real_out might
2029 have different modes is where real_out
2030 is a subreg, and in that case, out
2032 (GET_MODE (out
) != VOIDmode
2033 ? GET_MODE (out
) : outmode
))
2034 && (ORIGINAL_REGNO (in
) < FIRST_PSEUDO_REGISTER
2035 /* However only do this if we can be sure that this input
2036 operand doesn't correspond with an uninitialized pseudo.
2037 global can assign some hardreg to it that is the same as
2038 the one assigned to a different, also live pseudo (as it
2039 can ignore the conflict). We must never introduce writes
2040 to such hardregs, as they would clobber the other live
2041 pseudo. See PR 20973. */
2042 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR
),
2043 ORIGINAL_REGNO (in
))
2044 /* Similarly, only do this if we can be sure that the death
2045 note is still valid. global can assign some hardreg to
2046 the pseudo referenced in the note and simultaneously a
2047 subword of this hardreg to a different, also live pseudo,
2048 because only another subword of the hardreg is actually
2049 used in the insn. This cannot happen if the pseudo has
2050 been assigned exactly one hardreg. See PR 33732. */
2051 && hard_regno_nregs
[REGNO (in
)][GET_MODE (in
)] == 1)))
2053 unsigned int regno
= REGNO (in
) + in_offset
;
2054 unsigned int nwords
= hard_regno_nregs
[regno
][inmode
];
2056 if (! refers_to_regno_for_reload_p (regno
, regno
+ nwords
, out
, (rtx
*) 0)
2057 && ! hard_reg_set_here_p (regno
, regno
+ nwords
,
2058 PATTERN (this_insn
))
2060 || ! refers_to_regno_for_reload_p (regno
, regno
+ nwords
,
2061 PATTERN (this_insn
), inloc
)))
2065 for (i
= 0; i
< nwords
; i
++)
2066 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) rclass
],
2072 /* If we were going to use OUT as the reload reg
2073 and changed our mind, it means OUT is a dummy that
2074 dies here. So don't bother copying value to it. */
2075 if (for_real
>= 0 && value
== real_out
)
2076 rld
[for_real
].out
= 0;
2077 if (REG_P (real_in
))
2080 value
= gen_rtx_REG (inmode
, regno
);
2088 /* This page contains subroutines used mainly for determining
2089 whether the IN or an OUT of a reload can serve as the
2092 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2095 earlyclobber_operand_p (rtx x
)
2099 for (i
= 0; i
< n_earlyclobbers
; i
++)
2100 if (reload_earlyclobbers
[i
] == x
)
2106 /* Return 1 if expression X alters a hard reg in the range
2107 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2108 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2109 X should be the body of an instruction. */
2112 hard_reg_set_here_p (unsigned int beg_regno
, unsigned int end_regno
, rtx x
)
2114 if (GET_CODE (x
) == SET
|| GET_CODE (x
) == CLOBBER
)
2116 rtx op0
= SET_DEST (x
);
2118 while (GET_CODE (op0
) == SUBREG
)
2119 op0
= SUBREG_REG (op0
);
2122 unsigned int r
= REGNO (op0
);
2124 /* See if this reg overlaps range under consideration. */
2126 && end_hard_regno (GET_MODE (op0
), r
) > beg_regno
)
2130 else if (GET_CODE (x
) == PARALLEL
)
2132 int i
= XVECLEN (x
, 0) - 1;
2135 if (hard_reg_set_here_p (beg_regno
, end_regno
, XVECEXP (x
, 0, i
)))
2142 /* Return 1 if ADDR is a valid memory address for mode MODE
2143 in address space AS, and check that each pseudo reg has the
2144 proper kind of hard reg. */
2147 strict_memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED
,
2148 rtx addr
, addr_space_t as
)
2150 #ifdef GO_IF_LEGITIMATE_ADDRESS
2151 gcc_assert (ADDR_SPACE_GENERIC_P (as
));
2152 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
2158 return targetm
.addr_space
.legitimate_address_p (mode
, addr
, 1, as
);
2162 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2163 if they are the same hard reg, and has special hacks for
2164 autoincrement and autodecrement.
2165 This is specifically intended for find_reloads to use
2166 in determining whether two operands match.
2167 X is the operand whose number is the lower of the two.
2169 The value is 2 if Y contains a pre-increment that matches
2170 a non-incrementing address in X. */
2172 /* ??? To be completely correct, we should arrange to pass
2173 for X the output operand and for Y the input operand.
2174 For now, we assume that the output operand has the lower number
2175 because that is natural in (SET output (... input ...)). */
2178 operands_match_p (rtx x
, rtx y
)
2181 RTX_CODE code
= GET_CODE (x
);
2187 if ((code
== REG
|| (code
== SUBREG
&& REG_P (SUBREG_REG (x
))))
2188 && (REG_P (y
) || (GET_CODE (y
) == SUBREG
2189 && REG_P (SUBREG_REG (y
)))))
2195 i
= REGNO (SUBREG_REG (x
));
2196 if (i
>= FIRST_PSEUDO_REGISTER
)
2198 i
+= subreg_regno_offset (REGNO (SUBREG_REG (x
)),
2199 GET_MODE (SUBREG_REG (x
)),
2206 if (GET_CODE (y
) == SUBREG
)
2208 j
= REGNO (SUBREG_REG (y
));
2209 if (j
>= FIRST_PSEUDO_REGISTER
)
2211 j
+= subreg_regno_offset (REGNO (SUBREG_REG (y
)),
2212 GET_MODE (SUBREG_REG (y
)),
2219 /* On a WORDS_BIG_ENDIAN machine, point to the last register of a
2220 multiple hard register group of scalar integer registers, so that
2221 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2223 if (WORDS_BIG_ENDIAN
&& GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
2224 && SCALAR_INT_MODE_P (GET_MODE (x
))
2225 && i
< FIRST_PSEUDO_REGISTER
)
2226 i
+= hard_regno_nregs
[i
][GET_MODE (x
)] - 1;
2227 if (WORDS_BIG_ENDIAN
&& GET_MODE_SIZE (GET_MODE (y
)) > UNITS_PER_WORD
2228 && SCALAR_INT_MODE_P (GET_MODE (y
))
2229 && j
< FIRST_PSEUDO_REGISTER
)
2230 j
+= hard_regno_nregs
[j
][GET_MODE (y
)] - 1;
2234 /* If two operands must match, because they are really a single
2235 operand of an assembler insn, then two postincrements are invalid
2236 because the assembler insn would increment only once.
2237 On the other hand, a postincrement matches ordinary indexing
2238 if the postincrement is the output operand. */
2239 if (code
== POST_DEC
|| code
== POST_INC
|| code
== POST_MODIFY
)
2240 return operands_match_p (XEXP (x
, 0), y
);
2241 /* Two preincrements are invalid
2242 because the assembler insn would increment only once.
2243 On the other hand, a preincrement matches ordinary indexing
2244 if the preincrement is the input operand.
2245 In this case, return 2, since some callers need to do special
2246 things when this happens. */
2247 if (GET_CODE (y
) == PRE_DEC
|| GET_CODE (y
) == PRE_INC
2248 || GET_CODE (y
) == PRE_MODIFY
)
2249 return operands_match_p (x
, XEXP (y
, 0)) ? 2 : 0;
2253 /* Now we have disposed of all the cases in which different rtx codes
2255 if (code
!= GET_CODE (y
))
2258 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2259 if (GET_MODE (x
) != GET_MODE (y
))
2262 /* MEMs refering to different address space are not equivalent. */
2263 if (code
== MEM
&& MEM_ADDR_SPACE (x
) != MEM_ADDR_SPACE (y
))
2274 return XEXP (x
, 0) == XEXP (y
, 0);
2276 return XSTR (x
, 0) == XSTR (y
, 0);
2282 /* Compare the elements. If any pair of corresponding elements
2283 fail to match, return 0 for the whole things. */
2286 fmt
= GET_RTX_FORMAT (code
);
2287 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2293 if (XWINT (x
, i
) != XWINT (y
, i
))
2298 if (XINT (x
, i
) != XINT (y
, i
))
2303 val
= operands_match_p (XEXP (x
, i
), XEXP (y
, i
));
2306 /* If any subexpression returns 2,
2307 we should return 2 if we are successful. */
2316 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
2318 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; --j
)
2320 val
= operands_match_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
));
2328 /* It is believed that rtx's at this level will never
2329 contain anything but integers and other rtx's,
2330 except for within LABEL_REFs and SYMBOL_REFs. */
2335 return 1 + success_2
;
2338 /* Describe the range of registers or memory referenced by X.
2339 If X is a register, set REG_FLAG and put the first register
2340 number into START and the last plus one into END.
2341 If X is a memory reference, put a base address into BASE
2342 and a range of integer offsets into START and END.
2343 If X is pushing on the stack, we can assume it causes no trouble,
2344 so we set the SAFE field. */
2346 static struct decomposition
2349 struct decomposition val
;
2352 memset (&val
, 0, sizeof (val
));
2354 switch (GET_CODE (x
))
2358 rtx base
= NULL_RTX
, offset
= 0;
2359 rtx addr
= XEXP (x
, 0);
2361 if (GET_CODE (addr
) == PRE_DEC
|| GET_CODE (addr
) == PRE_INC
2362 || GET_CODE (addr
) == POST_DEC
|| GET_CODE (addr
) == POST_INC
)
2364 val
.base
= XEXP (addr
, 0);
2365 val
.start
= -GET_MODE_SIZE (GET_MODE (x
));
2366 val
.end
= GET_MODE_SIZE (GET_MODE (x
));
2367 val
.safe
= REGNO (val
.base
) == STACK_POINTER_REGNUM
;
2371 if (GET_CODE (addr
) == PRE_MODIFY
|| GET_CODE (addr
) == POST_MODIFY
)
2373 if (GET_CODE (XEXP (addr
, 1)) == PLUS
2374 && XEXP (addr
, 0) == XEXP (XEXP (addr
, 1), 0)
2375 && CONSTANT_P (XEXP (XEXP (addr
, 1), 1)))
2377 val
.base
= XEXP (addr
, 0);
2378 val
.start
= -INTVAL (XEXP (XEXP (addr
, 1), 1));
2379 val
.end
= INTVAL (XEXP (XEXP (addr
, 1), 1));
2380 val
.safe
= REGNO (val
.base
) == STACK_POINTER_REGNUM
;
2385 if (GET_CODE (addr
) == CONST
)
2387 addr
= XEXP (addr
, 0);
2390 if (GET_CODE (addr
) == PLUS
)
2392 if (CONSTANT_P (XEXP (addr
, 0)))
2394 base
= XEXP (addr
, 1);
2395 offset
= XEXP (addr
, 0);
2397 else if (CONSTANT_P (XEXP (addr
, 1)))
2399 base
= XEXP (addr
, 0);
2400 offset
= XEXP (addr
, 1);
2407 offset
= const0_rtx
;
2409 if (GET_CODE (offset
) == CONST
)
2410 offset
= XEXP (offset
, 0);
2411 if (GET_CODE (offset
) == PLUS
)
2413 if (CONST_INT_P (XEXP (offset
, 0)))
2415 base
= gen_rtx_PLUS (GET_MODE (base
), base
, XEXP (offset
, 1));
2416 offset
= XEXP (offset
, 0);
2418 else if (CONST_INT_P (XEXP (offset
, 1)))
2420 base
= gen_rtx_PLUS (GET_MODE (base
), base
, XEXP (offset
, 0));
2421 offset
= XEXP (offset
, 1);
2425 base
= gen_rtx_PLUS (GET_MODE (base
), base
, offset
);
2426 offset
= const0_rtx
;
2429 else if (!CONST_INT_P (offset
))
2431 base
= gen_rtx_PLUS (GET_MODE (base
), base
, offset
);
2432 offset
= const0_rtx
;
2435 if (all_const
&& GET_CODE (base
) == PLUS
)
2436 base
= gen_rtx_CONST (GET_MODE (base
), base
);
2438 gcc_assert (CONST_INT_P (offset
));
2440 val
.start
= INTVAL (offset
);
2441 val
.end
= val
.start
+ GET_MODE_SIZE (GET_MODE (x
));
2448 val
.start
= true_regnum (x
);
2449 if (val
.start
< 0 || val
.start
>= FIRST_PSEUDO_REGISTER
)
2451 /* A pseudo with no hard reg. */
2452 val
.start
= REGNO (x
);
2453 val
.end
= val
.start
+ 1;
2457 val
.end
= end_hard_regno (GET_MODE (x
), val
.start
);
2461 if (!REG_P (SUBREG_REG (x
)))
2462 /* This could be more precise, but it's good enough. */
2463 return decompose (SUBREG_REG (x
));
2465 val
.start
= true_regnum (x
);
2466 if (val
.start
< 0 || val
.start
>= FIRST_PSEUDO_REGISTER
)
2467 return decompose (SUBREG_REG (x
));
2470 val
.end
= val
.start
+ subreg_nregs (x
);
2474 /* This hasn't been assigned yet, so it can't conflict yet. */
2479 gcc_assert (CONSTANT_P (x
));
2486 /* Return 1 if altering Y will not modify the value of X.
2487 Y is also described by YDATA, which should be decompose (Y). */
2490 immune_p (rtx x
, rtx y
, struct decomposition ydata
)
2492 struct decomposition xdata
;
2495 return !refers_to_regno_for_reload_p (ydata
.start
, ydata
.end
, x
, (rtx
*) 0);
2499 gcc_assert (MEM_P (y
));
2500 /* If Y is memory and X is not, Y can't affect X. */
2504 xdata
= decompose (x
);
2506 if (! rtx_equal_p (xdata
.base
, ydata
.base
))
2508 /* If bases are distinct symbolic constants, there is no overlap. */
2509 if (CONSTANT_P (xdata
.base
) && CONSTANT_P (ydata
.base
))
2511 /* Constants and stack slots never overlap. */
2512 if (CONSTANT_P (xdata
.base
)
2513 && (ydata
.base
== frame_pointer_rtx
2514 || ydata
.base
== hard_frame_pointer_rtx
2515 || ydata
.base
== stack_pointer_rtx
))
2517 if (CONSTANT_P (ydata
.base
)
2518 && (xdata
.base
== frame_pointer_rtx
2519 || xdata
.base
== hard_frame_pointer_rtx
2520 || xdata
.base
== stack_pointer_rtx
))
2522 /* If either base is variable, we don't know anything. */
2526 return (xdata
.start
>= ydata
.end
|| ydata
.start
>= xdata
.end
);
2529 /* Similar, but calls decompose. */
2532 safe_from_earlyclobber (rtx op
, rtx clobber
)
2534 struct decomposition early_data
;
2536 early_data
= decompose (clobber
);
2537 return immune_p (op
, clobber
, early_data
);
2540 /* Main entry point of this file: search the body of INSN
2541 for values that need reloading and record them with push_reload.
2542 REPLACE nonzero means record also where the values occur
2543 so that subst_reloads can be used.
2545 IND_LEVELS says how many levels of indirection are supported by this
2546 machine; a value of zero means that a memory reference is not a valid
2549 LIVE_KNOWN says we have valid information about which hard
2550 regs are live at each point in the program; this is true when
2551 we are called from global_alloc but false when stupid register
2552 allocation has been done.
2554 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2555 which is nonnegative if the reg has been commandeered for reloading into.
2556 It is copied into STATIC_RELOAD_REG_P and referenced from there
2557 by various subroutines.
2559 Return TRUE if some operands need to be changed, because of swapping
2560 commutative operands, reg_equiv_address substitution, or whatever. */
2563 find_reloads (rtx insn
, int replace
, int ind_levels
, int live_known
,
2564 short *reload_reg_p
)
2566 int insn_code_number
;
2569 /* These start out as the constraints for the insn
2570 and they are chewed up as we consider alternatives. */
2571 const char *constraints
[MAX_RECOG_OPERANDS
];
2572 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2574 enum reg_class preferred_class
[MAX_RECOG_OPERANDS
];
2575 char pref_or_nothing
[MAX_RECOG_OPERANDS
];
2576 /* Nonzero for a MEM operand whose entire address needs a reload.
2577 May be -1 to indicate the entire address may or may not need a reload. */
2578 int address_reloaded
[MAX_RECOG_OPERANDS
];
2579 /* Nonzero for an address operand that needs to be completely reloaded.
2580 May be -1 to indicate the entire operand may or may not need a reload. */
2581 int address_operand_reloaded
[MAX_RECOG_OPERANDS
];
2582 /* Value of enum reload_type to use for operand. */
2583 enum reload_type operand_type
[MAX_RECOG_OPERANDS
];
2584 /* Value of enum reload_type to use within address of operand. */
2585 enum reload_type address_type
[MAX_RECOG_OPERANDS
];
2586 /* Save the usage of each operand. */
2587 enum reload_usage
{ RELOAD_READ
, RELOAD_READ_WRITE
, RELOAD_WRITE
} modified
[MAX_RECOG_OPERANDS
];
2588 int no_input_reloads
= 0, no_output_reloads
= 0;
2590 reg_class_t this_alternative
[MAX_RECOG_OPERANDS
];
2591 char this_alternative_match_win
[MAX_RECOG_OPERANDS
];
2592 char this_alternative_win
[MAX_RECOG_OPERANDS
];
2593 char this_alternative_offmemok
[MAX_RECOG_OPERANDS
];
2594 char this_alternative_earlyclobber
[MAX_RECOG_OPERANDS
];
2595 int this_alternative_matches
[MAX_RECOG_OPERANDS
];
2597 reg_class_t goal_alternative
[MAX_RECOG_OPERANDS
];
2598 int this_alternative_number
;
2599 int goal_alternative_number
= 0;
2600 int operand_reloadnum
[MAX_RECOG_OPERANDS
];
2601 int goal_alternative_matches
[MAX_RECOG_OPERANDS
];
2602 int goal_alternative_matched
[MAX_RECOG_OPERANDS
];
2603 char goal_alternative_match_win
[MAX_RECOG_OPERANDS
];
2604 char goal_alternative_win
[MAX_RECOG_OPERANDS
];
2605 char goal_alternative_offmemok
[MAX_RECOG_OPERANDS
];
2606 char goal_alternative_earlyclobber
[MAX_RECOG_OPERANDS
];
2607 int goal_alternative_swapped
;
2610 char operands_match
[MAX_RECOG_OPERANDS
][MAX_RECOG_OPERANDS
];
2611 rtx substed_operand
[MAX_RECOG_OPERANDS
];
2612 rtx body
= PATTERN (insn
);
2613 rtx set
= single_set (insn
);
2614 int goal_earlyclobber
= 0, this_earlyclobber
;
2615 enum machine_mode operand_mode
[MAX_RECOG_OPERANDS
];
2621 n_earlyclobbers
= 0;
2622 replace_reloads
= replace
;
2623 hard_regs_live_known
= live_known
;
2624 static_reload_reg_p
= reload_reg_p
;
2626 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2627 neither are insns that SET cc0. Insns that use CC0 are not allowed
2628 to have any input reloads. */
2629 if (JUMP_P (insn
) || CALL_P (insn
))
2630 no_output_reloads
= 1;
2633 if (reg_referenced_p (cc0_rtx
, PATTERN (insn
)))
2634 no_input_reloads
= 1;
2635 if (reg_set_p (cc0_rtx
, PATTERN (insn
)))
2636 no_output_reloads
= 1;
2639 #ifdef SECONDARY_MEMORY_NEEDED
2640 /* The eliminated forms of any secondary memory locations are per-insn, so
2641 clear them out here. */
2643 if (secondary_memlocs_elim_used
)
2645 memset (secondary_memlocs_elim
, 0,
2646 sizeof (secondary_memlocs_elim
[0]) * secondary_memlocs_elim_used
);
2647 secondary_memlocs_elim_used
= 0;
2651 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2652 is cheap to move between them. If it is not, there may not be an insn
2653 to do the copy, so we may need a reload. */
2654 if (GET_CODE (body
) == SET
2655 && REG_P (SET_DEST (body
))
2656 && REGNO (SET_DEST (body
)) < FIRST_PSEUDO_REGISTER
2657 && REG_P (SET_SRC (body
))
2658 && REGNO (SET_SRC (body
)) < FIRST_PSEUDO_REGISTER
2659 && register_move_cost (GET_MODE (SET_SRC (body
)),
2660 REGNO_REG_CLASS (REGNO (SET_SRC (body
))),
2661 REGNO_REG_CLASS (REGNO (SET_DEST (body
)))) == 2)
2664 extract_insn (insn
);
2666 noperands
= reload_n_operands
= recog_data
.n_operands
;
2667 n_alternatives
= recog_data
.n_alternatives
;
2669 /* Just return "no reloads" if insn has no operands with constraints. */
2670 if (noperands
== 0 || n_alternatives
== 0)
2673 insn_code_number
= INSN_CODE (insn
);
2674 this_insn_is_asm
= insn_code_number
< 0;
2676 memcpy (operand_mode
, recog_data
.operand_mode
,
2677 noperands
* sizeof (enum machine_mode
));
2678 memcpy (constraints
, recog_data
.constraints
,
2679 noperands
* sizeof (const char *));
2683 /* If we will need to know, later, whether some pair of operands
2684 are the same, we must compare them now and save the result.
2685 Reloading the base and index registers will clobber them
2686 and afterward they will fail to match. */
2688 for (i
= 0; i
< noperands
; i
++)
2694 substed_operand
[i
] = recog_data
.operand
[i
];
2697 modified
[i
] = RELOAD_READ
;
2699 /* Scan this operand's constraint to see if it is an output operand,
2700 an in-out operand, is commutative, or should match another. */
2704 p
+= CONSTRAINT_LEN (c
, p
);
2708 modified
[i
] = RELOAD_WRITE
;
2711 modified
[i
] = RELOAD_READ_WRITE
;
2715 /* The last operand should not be marked commutative. */
2716 gcc_assert (i
!= noperands
- 1);
2718 /* We currently only support one commutative pair of
2719 operands. Some existing asm code currently uses more
2720 than one pair. Previously, that would usually work,
2721 but sometimes it would crash the compiler. We
2722 continue supporting that case as well as we can by
2723 silently ignoring all but the first pair. In the
2724 future we may handle it correctly. */
2725 if (commutative
< 0)
2728 gcc_assert (this_insn_is_asm
);
2731 /* Use of ISDIGIT is tempting here, but it may get expensive because
2732 of locale support we don't want. */
2733 case '0': case '1': case '2': case '3': case '4':
2734 case '5': case '6': case '7': case '8': case '9':
2736 c
= strtoul (p
- 1, &end
, 10);
2739 operands_match
[c
][i
]
2740 = operands_match_p (recog_data
.operand
[c
],
2741 recog_data
.operand
[i
]);
2743 /* An operand may not match itself. */
2744 gcc_assert (c
!= i
);
2746 /* If C can be commuted with C+1, and C might need to match I,
2747 then C+1 might also need to match I. */
2748 if (commutative
>= 0)
2750 if (c
== commutative
|| c
== commutative
+ 1)
2752 int other
= c
+ (c
== commutative
? 1 : -1);
2753 operands_match
[other
][i
]
2754 = operands_match_p (recog_data
.operand
[other
],
2755 recog_data
.operand
[i
]);
2757 if (i
== commutative
|| i
== commutative
+ 1)
2759 int other
= i
+ (i
== commutative
? 1 : -1);
2760 operands_match
[c
][other
]
2761 = operands_match_p (recog_data
.operand
[c
],
2762 recog_data
.operand
[other
]);
2764 /* Note that C is supposed to be less than I.
2765 No need to consider altering both C and I because in
2766 that case we would alter one into the other. */
2773 /* Examine each operand that is a memory reference or memory address
2774 and reload parts of the addresses into index registers.
2775 Also here any references to pseudo regs that didn't get hard regs
2776 but are equivalent to constants get replaced in the insn itself
2777 with those constants. Nobody will ever see them again.
2779 Finally, set up the preferred classes of each operand. */
2781 for (i
= 0; i
< noperands
; i
++)
2783 RTX_CODE code
= GET_CODE (recog_data
.operand
[i
]);
2785 address_reloaded
[i
] = 0;
2786 address_operand_reloaded
[i
] = 0;
2787 operand_type
[i
] = (modified
[i
] == RELOAD_READ
? RELOAD_FOR_INPUT
2788 : modified
[i
] == RELOAD_WRITE
? RELOAD_FOR_OUTPUT
2791 = (modified
[i
] == RELOAD_READ
? RELOAD_FOR_INPUT_ADDRESS
2792 : modified
[i
] == RELOAD_WRITE
? RELOAD_FOR_OUTPUT_ADDRESS
2795 if (*constraints
[i
] == 0)
2796 /* Ignore things like match_operator operands. */
2798 else if (constraints
[i
][0] == 'p'
2799 || EXTRA_ADDRESS_CONSTRAINT (constraints
[i
][0], constraints
[i
]))
2801 address_operand_reloaded
[i
]
2802 = find_reloads_address (recog_data
.operand_mode
[i
], (rtx
*) 0,
2803 recog_data
.operand
[i
],
2804 recog_data
.operand_loc
[i
],
2805 i
, operand_type
[i
], ind_levels
, insn
);
2807 /* If we now have a simple operand where we used to have a
2808 PLUS or MULT, re-recognize and try again. */
2809 if ((OBJECT_P (*recog_data
.operand_loc
[i
])
2810 || GET_CODE (*recog_data
.operand_loc
[i
]) == SUBREG
)
2811 && (GET_CODE (recog_data
.operand
[i
]) == MULT
2812 || GET_CODE (recog_data
.operand
[i
]) == PLUS
))
2814 INSN_CODE (insn
) = -1;
2815 retval
= find_reloads (insn
, replace
, ind_levels
, live_known
,
2820 recog_data
.operand
[i
] = *recog_data
.operand_loc
[i
];
2821 substed_operand
[i
] = recog_data
.operand
[i
];
2823 /* Address operands are reloaded in their existing mode,
2824 no matter what is specified in the machine description. */
2825 operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2827 else if (code
== MEM
)
2830 = find_reloads_address (GET_MODE (recog_data
.operand
[i
]),
2831 recog_data
.operand_loc
[i
],
2832 XEXP (recog_data
.operand
[i
], 0),
2833 &XEXP (recog_data
.operand
[i
], 0),
2834 i
, address_type
[i
], ind_levels
, insn
);
2835 recog_data
.operand
[i
] = *recog_data
.operand_loc
[i
];
2836 substed_operand
[i
] = recog_data
.operand
[i
];
2838 else if (code
== SUBREG
)
2840 rtx reg
= SUBREG_REG (recog_data
.operand
[i
]);
2842 = find_reloads_toplev (recog_data
.operand
[i
], i
, address_type
[i
],
2845 && &SET_DEST (set
) == recog_data
.operand_loc
[i
],
2847 &address_reloaded
[i
]);
2849 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2850 that didn't get a hard register, emit a USE with a REG_EQUAL
2851 note in front so that we might inherit a previous, possibly
2857 && (GET_MODE_SIZE (GET_MODE (reg
))
2858 >= GET_MODE_SIZE (GET_MODE (op
)))
2859 && reg_equiv_constant
[REGNO (reg
)] == 0)
2860 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode
, reg
),
2862 REG_EQUAL
, reg_equiv_memory_loc
[REGNO (reg
)]);
2864 substed_operand
[i
] = recog_data
.operand
[i
] = op
;
2866 else if (code
== PLUS
|| GET_RTX_CLASS (code
) == RTX_UNARY
)
2867 /* We can get a PLUS as an "operand" as a result of register
2868 elimination. See eliminate_regs and gen_reload. We handle
2869 a unary operator by reloading the operand. */
2870 substed_operand
[i
] = recog_data
.operand
[i
]
2871 = find_reloads_toplev (recog_data
.operand
[i
], i
, address_type
[i
],
2872 ind_levels
, 0, insn
,
2873 &address_reloaded
[i
]);
2874 else if (code
== REG
)
2876 /* This is equivalent to calling find_reloads_toplev.
2877 The code is duplicated for speed.
2878 When we find a pseudo always equivalent to a constant,
2879 we replace it by the constant. We must be sure, however,
2880 that we don't try to replace it in the insn in which it
2882 int regno
= REGNO (recog_data
.operand
[i
]);
2883 if (reg_equiv_constant
[regno
] != 0
2884 && (set
== 0 || &SET_DEST (set
) != recog_data
.operand_loc
[i
]))
2886 /* Record the existing mode so that the check if constants are
2887 allowed will work when operand_mode isn't specified. */
2889 if (operand_mode
[i
] == VOIDmode
)
2890 operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2892 substed_operand
[i
] = recog_data
.operand
[i
]
2893 = reg_equiv_constant
[regno
];
2895 if (reg_equiv_memory_loc
[regno
] != 0
2896 && (reg_equiv_address
[regno
] != 0 || num_not_at_initial_offset
))
2897 /* We need not give a valid is_set_dest argument since the case
2898 of a constant equivalence was checked above. */
2899 substed_operand
[i
] = recog_data
.operand
[i
]
2900 = find_reloads_toplev (recog_data
.operand
[i
], i
, address_type
[i
],
2901 ind_levels
, 0, insn
,
2902 &address_reloaded
[i
]);
2904 /* If the operand is still a register (we didn't replace it with an
2905 equivalent), get the preferred class to reload it into. */
2906 code
= GET_CODE (recog_data
.operand
[i
]);
2908 = ((code
== REG
&& REGNO (recog_data
.operand
[i
])
2909 >= FIRST_PSEUDO_REGISTER
)
2910 ? reg_preferred_class (REGNO (recog_data
.operand
[i
]))
2914 && REGNO (recog_data
.operand
[i
]) >= FIRST_PSEUDO_REGISTER
2915 && reg_alternate_class (REGNO (recog_data
.operand
[i
])) == NO_REGS
);
2918 /* If this is simply a copy from operand 1 to operand 0, merge the
2919 preferred classes for the operands. */
2920 if (set
!= 0 && noperands
>= 2 && recog_data
.operand
[0] == SET_DEST (set
)
2921 && recog_data
.operand
[1] == SET_SRC (set
))
2923 preferred_class
[0] = preferred_class
[1]
2924 = reg_class_subunion
[(int) preferred_class
[0]][(int) preferred_class
[1]];
2925 pref_or_nothing
[0] |= pref_or_nothing
[1];
2926 pref_or_nothing
[1] |= pref_or_nothing
[0];
2929 /* Now see what we need for pseudo-regs that didn't get hard regs
2930 or got the wrong kind of hard reg. For this, we must consider
2931 all the operands together against the register constraints. */
2933 best
= MAX_RECOG_OPERANDS
* 2 + 600;
2936 goal_alternative_swapped
= 0;
2939 /* The constraints are made of several alternatives.
2940 Each operand's constraint looks like foo,bar,... with commas
2941 separating the alternatives. The first alternatives for all
2942 operands go together, the second alternatives go together, etc.
2944 First loop over alternatives. */
2946 for (this_alternative_number
= 0;
2947 this_alternative_number
< n_alternatives
;
2948 this_alternative_number
++)
2950 /* Loop over operands for one constraint alternative. */
2951 /* LOSERS counts those that don't fit this alternative
2952 and would require loading. */
2954 /* BAD is set to 1 if it some operand can't fit this alternative
2955 even after reloading. */
2957 /* REJECT is a count of how undesirable this alternative says it is
2958 if any reloading is required. If the alternative matches exactly
2959 then REJECT is ignored, but otherwise it gets this much
2960 counted against it in addition to the reloading needed. Each
2961 ? counts three times here since we want the disparaging caused by
2962 a bad register class to only count 1/3 as much. */
2965 if (!recog_data
.alternative_enabled_p
[this_alternative_number
])
2969 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2970 constraints
[i
] = skip_alternative (constraints
[i
]);
2975 this_earlyclobber
= 0;
2977 for (i
= 0; i
< noperands
; i
++)
2979 const char *p
= constraints
[i
];
2984 /* 0 => this operand can be reloaded somehow for this alternative. */
2986 /* 0 => this operand can be reloaded if the alternative allows regs. */
2990 rtx operand
= recog_data
.operand
[i
];
2992 /* Nonzero means this is a MEM that must be reloaded into a reg
2993 regardless of what the constraint says. */
2994 int force_reload
= 0;
2996 /* Nonzero if a constant forced into memory would be OK for this
2999 int earlyclobber
= 0;
3001 /* If the predicate accepts a unary operator, it means that
3002 we need to reload the operand, but do not do this for
3003 match_operator and friends. */
3004 if (UNARY_P (operand
) && *p
!= 0)
3005 operand
= XEXP (operand
, 0);
3007 /* If the operand is a SUBREG, extract
3008 the REG or MEM (or maybe even a constant) within.
3009 (Constants can occur as a result of reg_equiv_constant.) */
3011 while (GET_CODE (operand
) == SUBREG
)
3013 /* Offset only matters when operand is a REG and
3014 it is a hard reg. This is because it is passed
3015 to reg_fits_class_p if it is a REG and all pseudos
3016 return 0 from that function. */
3017 if (REG_P (SUBREG_REG (operand
))
3018 && REGNO (SUBREG_REG (operand
)) < FIRST_PSEUDO_REGISTER
)
3020 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand
)),
3021 GET_MODE (SUBREG_REG (operand
)),
3022 SUBREG_BYTE (operand
),
3023 GET_MODE (operand
)) < 0)
3025 offset
+= subreg_regno_offset (REGNO (SUBREG_REG (operand
)),
3026 GET_MODE (SUBREG_REG (operand
)),
3027 SUBREG_BYTE (operand
),
3028 GET_MODE (operand
));
3030 operand
= SUBREG_REG (operand
);
3031 /* Force reload if this is a constant or PLUS or if there may
3032 be a problem accessing OPERAND in the outer mode. */
3033 if (CONSTANT_P (operand
)
3034 || GET_CODE (operand
) == PLUS
3035 /* We must force a reload of paradoxical SUBREGs
3036 of a MEM because the alignment of the inner value
3037 may not be enough to do the outer reference. On
3038 big-endian machines, it may also reference outside
3041 On machines that extend byte operations and we have a
3042 SUBREG where both the inner and outer modes are no wider
3043 than a word and the inner mode is narrower, is integral,
3044 and gets extended when loaded from memory, combine.c has
3045 made assumptions about the behavior of the machine in such
3046 register access. If the data is, in fact, in memory we
3047 must always load using the size assumed to be in the
3048 register and let the insn do the different-sized
3051 This is doubly true if WORD_REGISTER_OPERATIONS. In
3052 this case eliminate_regs has left non-paradoxical
3053 subregs for push_reload to see. Make sure it does
3054 by forcing the reload.
3056 ??? When is it right at this stage to have a subreg
3057 of a mem that is _not_ to be handled specially? IMO
3058 those should have been reduced to just a mem. */
3059 || ((MEM_P (operand
)
3061 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
))
3062 #ifndef WORD_REGISTER_OPERATIONS
3063 && (((GET_MODE_BITSIZE (GET_MODE (operand
))
3064 < BIGGEST_ALIGNMENT
)
3065 && (GET_MODE_SIZE (operand_mode
[i
])
3066 > GET_MODE_SIZE (GET_MODE (operand
))))
3068 #ifdef LOAD_EXTEND_OP
3069 || (GET_MODE_SIZE (operand_mode
[i
]) <= UNITS_PER_WORD
3070 && (GET_MODE_SIZE (GET_MODE (operand
))
3072 && (GET_MODE_SIZE (operand_mode
[i
])
3073 > GET_MODE_SIZE (GET_MODE (operand
)))
3074 && INTEGRAL_MODE_P (GET_MODE (operand
))
3075 && LOAD_EXTEND_OP (GET_MODE (operand
)) != UNKNOWN
)
3084 this_alternative
[i
] = NO_REGS
;
3085 this_alternative_win
[i
] = 0;
3086 this_alternative_match_win
[i
] = 0;
3087 this_alternative_offmemok
[i
] = 0;
3088 this_alternative_earlyclobber
[i
] = 0;
3089 this_alternative_matches
[i
] = -1;
3091 /* An empty constraint or empty alternative
3092 allows anything which matched the pattern. */
3093 if (*p
== 0 || *p
== ',')
3096 /* Scan this alternative's specs for this operand;
3097 set WIN if the operand fits any letter in this alternative.
3098 Otherwise, clear BADOP if this operand could
3099 fit some letter after reloads,
3100 or set WINREG if this operand could fit after reloads
3101 provided the constraint allows some registers. */
3104 switch ((c
= *p
, len
= CONSTRAINT_LEN (c
, p
)), c
)
3113 case '=': case '+': case '*':
3117 /* We only support one commutative marker, the first
3118 one. We already set commutative above. */
3130 /* Ignore rest of this alternative as far as
3131 reloading is concerned. */
3134 while (*p
&& *p
!= ',');
3138 case '0': case '1': case '2': case '3': case '4':
3139 case '5': case '6': case '7': case '8': case '9':
3140 m
= strtoul (p
, &end
, 10);
3144 this_alternative_matches
[i
] = m
;
3145 /* We are supposed to match a previous operand.
3146 If we do, we win if that one did.
3147 If we do not, count both of the operands as losers.
3148 (This is too conservative, since most of the time
3149 only a single reload insn will be needed to make
3150 the two operands win. As a result, this alternative
3151 may be rejected when it is actually desirable.) */
3152 if ((swapped
&& (m
!= commutative
|| i
!= commutative
+ 1))
3153 /* If we are matching as if two operands were swapped,
3154 also pretend that operands_match had been computed
3156 But if I is the second of those and C is the first,
3157 don't exchange them, because operands_match is valid
3158 only on one side of its diagonal. */
3160 [(m
== commutative
|| m
== commutative
+ 1)
3161 ? 2 * commutative
+ 1 - m
: m
]
3162 [(i
== commutative
|| i
== commutative
+ 1)
3163 ? 2 * commutative
+ 1 - i
: i
])
3164 : operands_match
[m
][i
])
3166 /* If we are matching a non-offsettable address where an
3167 offsettable address was expected, then we must reject
3168 this combination, because we can't reload it. */
3169 if (this_alternative_offmemok
[m
]
3170 && MEM_P (recog_data
.operand
[m
])
3171 && this_alternative
[m
] == NO_REGS
3172 && ! this_alternative_win
[m
])
3175 did_match
= this_alternative_win
[m
];
3179 /* Operands don't match. */
3182 /* Retroactively mark the operand we had to match
3183 as a loser, if it wasn't already. */
3184 if (this_alternative_win
[m
])
3186 this_alternative_win
[m
] = 0;
3187 if (this_alternative
[m
] == NO_REGS
)
3189 /* But count the pair only once in the total badness of
3190 this alternative, if the pair can be a dummy reload.
3191 The pointers in operand_loc are not swapped; swap
3192 them by hand if necessary. */
3193 if (swapped
&& i
== commutative
)
3194 loc1
= commutative
+ 1;
3195 else if (swapped
&& i
== commutative
+ 1)
3199 if (swapped
&& m
== commutative
)
3200 loc2
= commutative
+ 1;
3201 else if (swapped
&& m
== commutative
+ 1)
3206 = find_dummy_reload (recog_data
.operand
[i
],
3207 recog_data
.operand
[m
],
3208 recog_data
.operand_loc
[loc1
],
3209 recog_data
.operand_loc
[loc2
],
3210 operand_mode
[i
], operand_mode
[m
],
3211 this_alternative
[m
], -1,
3212 this_alternative_earlyclobber
[m
]);
3217 /* This can be fixed with reloads if the operand
3218 we are supposed to match can be fixed with reloads. */
3220 this_alternative
[i
] = this_alternative
[m
];
3222 /* If we have to reload this operand and some previous
3223 operand also had to match the same thing as this
3224 operand, we don't know how to do that. So reject this
3226 if (! did_match
|| force_reload
)
3227 for (j
= 0; j
< i
; j
++)
3228 if (this_alternative_matches
[j
]
3229 == this_alternative_matches
[i
])
3234 /* All necessary reloads for an address_operand
3235 were handled in find_reloads_address. */
3236 this_alternative
[i
] = base_reg_class (VOIDmode
, ADDRESS
,
3242 case TARGET_MEM_CONSTRAINT
:
3247 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
3248 && reg_renumber
[REGNO (operand
)] < 0))
3250 if (CONST_POOL_OK_P (operand
))
3257 && ! address_reloaded
[i
]
3258 && (GET_CODE (XEXP (operand
, 0)) == PRE_DEC
3259 || GET_CODE (XEXP (operand
, 0)) == POST_DEC
))
3265 && ! address_reloaded
[i
]
3266 && (GET_CODE (XEXP (operand
, 0)) == PRE_INC
3267 || GET_CODE (XEXP (operand
, 0)) == POST_INC
))
3271 /* Memory operand whose address is not offsettable. */
3276 && ! (ind_levels
? offsettable_memref_p (operand
)
3277 : offsettable_nonstrict_memref_p (operand
))
3278 /* Certain mem addresses will become offsettable
3279 after they themselves are reloaded. This is important;
3280 we don't want our own handling of unoffsettables
3281 to override the handling of reg_equiv_address. */
3282 && !(REG_P (XEXP (operand
, 0))
3284 || reg_equiv_address
[REGNO (XEXP (operand
, 0))] != 0)))
3288 /* Memory operand whose address is offsettable. */
3292 if ((MEM_P (operand
)
3293 /* If IND_LEVELS, find_reloads_address won't reload a
3294 pseudo that didn't get a hard reg, so we have to
3295 reject that case. */
3296 && ((ind_levels
? offsettable_memref_p (operand
)
3297 : offsettable_nonstrict_memref_p (operand
))
3298 /* A reloaded address is offsettable because it is now
3299 just a simple register indirect. */
3300 || address_reloaded
[i
] == 1))
3302 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
3303 && reg_renumber
[REGNO (operand
)] < 0
3304 /* If reg_equiv_address is nonzero, we will be
3305 loading it into a register; hence it will be
3306 offsettable, but we cannot say that reg_equiv_mem
3307 is offsettable without checking. */
3308 && ((reg_equiv_mem
[REGNO (operand
)] != 0
3309 && offsettable_memref_p (reg_equiv_mem
[REGNO (operand
)]))
3310 || (reg_equiv_address
[REGNO (operand
)] != 0))))
3312 if (CONST_POOL_OK_P (operand
)
3320 /* Output operand that is stored before the need for the
3321 input operands (and their index registers) is over. */
3322 earlyclobber
= 1, this_earlyclobber
= 1;
3327 if (GET_CODE (operand
) == CONST_DOUBLE
3328 || (GET_CODE (operand
) == CONST_VECTOR
3329 && (GET_MODE_CLASS (GET_MODE (operand
))
3330 == MODE_VECTOR_FLOAT
)))
3336 if (GET_CODE (operand
) == CONST_DOUBLE
3337 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (operand
, c
, p
))
3342 if (CONST_INT_P (operand
)
3343 || (GET_CODE (operand
) == CONST_DOUBLE
3344 && GET_MODE (operand
) == VOIDmode
))
3347 if (CONSTANT_P (operand
)
3348 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (operand
)))
3353 if (CONST_INT_P (operand
)
3354 || (GET_CODE (operand
) == CONST_DOUBLE
3355 && GET_MODE (operand
) == VOIDmode
))
3367 if (CONST_INT_P (operand
)
3368 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (operand
), c
, p
))
3379 /* A PLUS is never a valid operand, but reload can make
3380 it from a register when eliminating registers. */
3381 && GET_CODE (operand
) != PLUS
3382 /* A SCRATCH is not a valid operand. */
3383 && GET_CODE (operand
) != SCRATCH
3384 && (! CONSTANT_P (operand
)
3386 || LEGITIMATE_PIC_OPERAND_P (operand
))
3387 && (GENERAL_REGS
== ALL_REGS
3389 || (REGNO (operand
) >= FIRST_PSEUDO_REGISTER
3390 && reg_renumber
[REGNO (operand
)] < 0)))
3392 /* Drop through into 'r' case. */
3396 = reg_class_subunion
[this_alternative
[i
]][(int) GENERAL_REGS
];
3400 if (REG_CLASS_FROM_CONSTRAINT (c
, p
) == NO_REGS
)
3402 #ifdef EXTRA_CONSTRAINT_STR
3403 if (EXTRA_MEMORY_CONSTRAINT (c
, p
))
3407 if (EXTRA_CONSTRAINT_STR (operand
, c
, p
))
3409 /* If the address was already reloaded,
3411 else if (MEM_P (operand
)
3412 && address_reloaded
[i
] == 1)
3414 /* Likewise if the address will be reloaded because
3415 reg_equiv_address is nonzero. For reg_equiv_mem
3416 we have to check. */
3417 else if (REG_P (operand
)
3418 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
3419 && reg_renumber
[REGNO (operand
)] < 0
3420 && ((reg_equiv_mem
[REGNO (operand
)] != 0
3421 && EXTRA_CONSTRAINT_STR (reg_equiv_mem
[REGNO (operand
)], c
, p
))
3422 || (reg_equiv_address
[REGNO (operand
)] != 0)))
3425 /* If we didn't already win, we can reload
3426 constants via force_const_mem, and other
3427 MEMs by reloading the address like for 'o'. */
3428 if (CONST_POOL_OK_P (operand
)
3435 if (EXTRA_ADDRESS_CONSTRAINT (c
, p
))
3437 if (EXTRA_CONSTRAINT_STR (operand
, c
, p
))
3440 /* If we didn't already win, we can reload
3441 the address into a base register. */
3442 this_alternative
[i
] = base_reg_class (VOIDmode
,
3449 if (EXTRA_CONSTRAINT_STR (operand
, c
, p
))
3456 = (reg_class_subunion
3457 [this_alternative
[i
]]
3458 [(int) REG_CLASS_FROM_CONSTRAINT (c
, p
)]);
3460 if (GET_MODE (operand
) == BLKmode
)
3464 && reg_fits_class_p (operand
, this_alternative
[i
],
3465 offset
, GET_MODE (recog_data
.operand
[i
])))
3469 while ((p
+= len
), c
);
3473 /* If this operand could be handled with a reg,
3474 and some reg is allowed, then this operand can be handled. */
3475 if (winreg
&& this_alternative
[i
] != NO_REGS
3476 && (win
|| !class_only_fixed_regs
[this_alternative
[i
]]))
3479 /* Record which operands fit this alternative. */
3480 this_alternative_earlyclobber
[i
] = earlyclobber
;
3481 if (win
&& ! force_reload
)
3482 this_alternative_win
[i
] = 1;
3483 else if (did_match
&& ! force_reload
)
3484 this_alternative_match_win
[i
] = 1;
3487 int const_to_mem
= 0;
3489 this_alternative_offmemok
[i
] = offmemok
;
3493 /* Alternative loses if it has no regs for a reg operand. */
3495 && this_alternative
[i
] == NO_REGS
3496 && this_alternative_matches
[i
] < 0)
3499 /* If this is a constant that is reloaded into the desired
3500 class by copying it to memory first, count that as another
3501 reload. This is consistent with other code and is
3502 required to avoid choosing another alternative when
3503 the constant is moved into memory by this function on
3504 an early reload pass. Note that the test here is
3505 precisely the same as in the code below that calls
3507 if (CONST_POOL_OK_P (operand
)
3508 && ((targetm
.preferred_reload_class (operand
,
3509 this_alternative
[i
])
3511 || no_input_reloads
)
3512 && operand_mode
[i
] != VOIDmode
)
3515 if (this_alternative
[i
] != NO_REGS
)
3519 /* Alternative loses if it requires a type of reload not
3520 permitted for this insn. We can always reload SCRATCH
3521 and objects with a REG_UNUSED note. */
3522 if (GET_CODE (operand
) != SCRATCH
3523 && modified
[i
] != RELOAD_READ
&& no_output_reloads
3524 && ! find_reg_note (insn
, REG_UNUSED
, operand
))
3526 else if (modified
[i
] != RELOAD_WRITE
&& no_input_reloads
3530 /* If we can't reload this value at all, reject this
3531 alternative. Note that we could also lose due to
3532 LIMIT_RELOAD_CLASS, but we don't check that
3535 if (! CONSTANT_P (operand
) && this_alternative
[i
] != NO_REGS
)
3537 if (targetm
.preferred_reload_class (operand
, this_alternative
[i
])
3541 if (operand_type
[i
] == RELOAD_FOR_OUTPUT
3542 && (targetm
.preferred_output_reload_class (operand
,
3543 this_alternative
[i
])
3548 /* We prefer to reload pseudos over reloading other things,
3549 since such reloads may be able to be eliminated later.
3550 If we are reloading a SCRATCH, we won't be generating any
3551 insns, just using a register, so it is also preferred.
3552 So bump REJECT in other cases. Don't do this in the
3553 case where we are forcing a constant into memory and
3554 it will then win since we don't want to have a different
3555 alternative match then. */
3556 if (! (REG_P (operand
)
3557 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
)
3558 && GET_CODE (operand
) != SCRATCH
3559 && ! (const_to_mem
&& constmemok
))
3562 /* Input reloads can be inherited more often than output
3563 reloads can be removed, so penalize output reloads. */
3564 if (operand_type
[i
] != RELOAD_FOR_INPUT
3565 && GET_CODE (operand
) != SCRATCH
)
3569 /* If this operand is a pseudo register that didn't get a hard
3570 reg and this alternative accepts some register, see if the
3571 class that we want is a subset of the preferred class for this
3572 register. If not, but it intersects that class, use the
3573 preferred class instead. If it does not intersect the preferred
3574 class, show that usage of this alternative should be discouraged;
3575 it will be discouraged more still if the register is `preferred
3576 or nothing'. We do this because it increases the chance of
3577 reusing our spill register in a later insn and avoiding a pair
3578 of memory stores and loads.
3580 Don't bother with this if this alternative will accept this
3583 Don't do this for a multiword operand, since it is only a
3584 small win and has the risk of requiring more spill registers,
3585 which could cause a large loss.
3587 Don't do this if the preferred class has only one register
3588 because we might otherwise exhaust the class. */
3590 if (! win
&& ! did_match
3591 && this_alternative
[i
] != NO_REGS
3592 && GET_MODE_SIZE (operand_mode
[i
]) <= UNITS_PER_WORD
3593 && reg_class_size
[(int) preferred_class
[i
]] > 0
3594 && ! small_register_class_p (preferred_class
[i
]))
3596 if (! reg_class_subset_p (this_alternative
[i
],
3597 preferred_class
[i
]))
3599 /* Since we don't have a way of forming the intersection,
3600 we just do something special if the preferred class
3601 is a subset of the class we have; that's the most
3602 common case anyway. */
3603 if (reg_class_subset_p (preferred_class
[i
],
3604 this_alternative
[i
]))
3605 this_alternative
[i
] = preferred_class
[i
];
3607 reject
+= (2 + 2 * pref_or_nothing
[i
]);
3612 /* Now see if any output operands that are marked "earlyclobber"
3613 in this alternative conflict with any input operands
3614 or any memory addresses. */
3616 for (i
= 0; i
< noperands
; i
++)
3617 if (this_alternative_earlyclobber
[i
]
3618 && (this_alternative_win
[i
] || this_alternative_match_win
[i
]))
3620 struct decomposition early_data
;
3622 early_data
= decompose (recog_data
.operand
[i
]);
3624 gcc_assert (modified
[i
] != RELOAD_READ
);
3626 if (this_alternative
[i
] == NO_REGS
)
3628 this_alternative_earlyclobber
[i
] = 0;
3629 gcc_assert (this_insn_is_asm
);
3630 error_for_asm (this_insn
,
3631 "%<&%> constraint used with no register class");
3634 for (j
= 0; j
< noperands
; j
++)
3635 /* Is this an input operand or a memory ref? */
3636 if ((MEM_P (recog_data
.operand
[j
])
3637 || modified
[j
] != RELOAD_WRITE
)
3639 /* Ignore things like match_operator operands. */
3640 && !recog_data
.is_operator
[j
]
3641 /* Don't count an input operand that is constrained to match
3642 the early clobber operand. */
3643 && ! (this_alternative_matches
[j
] == i
3644 && rtx_equal_p (recog_data
.operand
[i
],
3645 recog_data
.operand
[j
]))
3646 /* Is it altered by storing the earlyclobber operand? */
3647 && !immune_p (recog_data
.operand
[j
], recog_data
.operand
[i
],
3650 /* If the output is in a non-empty few-regs class,
3651 it's costly to reload it, so reload the input instead. */
3652 if (small_register_class_p (this_alternative
[i
])
3653 && (REG_P (recog_data
.operand
[j
])
3654 || GET_CODE (recog_data
.operand
[j
]) == SUBREG
))
3657 this_alternative_win
[j
] = 0;
3658 this_alternative_match_win
[j
] = 0;
3663 /* If an earlyclobber operand conflicts with something,
3664 it must be reloaded, so request this and count the cost. */
3668 this_alternative_win
[i
] = 0;
3669 this_alternative_match_win
[j
] = 0;
3670 for (j
= 0; j
< noperands
; j
++)
3671 if (this_alternative_matches
[j
] == i
3672 && this_alternative_match_win
[j
])
3674 this_alternative_win
[j
] = 0;
3675 this_alternative_match_win
[j
] = 0;
3681 /* If one alternative accepts all the operands, no reload required,
3682 choose that alternative; don't consider the remaining ones. */
3685 /* Unswap these so that they are never swapped at `finish'. */
3686 if (commutative
>= 0)
3688 recog_data
.operand
[commutative
] = substed_operand
[commutative
];
3689 recog_data
.operand
[commutative
+ 1]
3690 = substed_operand
[commutative
+ 1];
3692 for (i
= 0; i
< noperands
; i
++)
3694 goal_alternative_win
[i
] = this_alternative_win
[i
];
3695 goal_alternative_match_win
[i
] = this_alternative_match_win
[i
];
3696 goal_alternative
[i
] = this_alternative
[i
];
3697 goal_alternative_offmemok
[i
] = this_alternative_offmemok
[i
];
3698 goal_alternative_matches
[i
] = this_alternative_matches
[i
];
3699 goal_alternative_earlyclobber
[i
]
3700 = this_alternative_earlyclobber
[i
];
3702 goal_alternative_number
= this_alternative_number
;
3703 goal_alternative_swapped
= swapped
;
3704 goal_earlyclobber
= this_earlyclobber
;
3708 /* REJECT, set by the ! and ? constraint characters and when a register
3709 would be reloaded into a non-preferred class, discourages the use of
3710 this alternative for a reload goal. REJECT is incremented by six
3711 for each ? and two for each non-preferred class. */
3712 losers
= losers
* 6 + reject
;
3714 /* If this alternative can be made to work by reloading,
3715 and it needs less reloading than the others checked so far,
3716 record it as the chosen goal for reloading. */
3721 for (i
= 0; i
< noperands
; i
++)
3723 goal_alternative
[i
] = this_alternative
[i
];
3724 goal_alternative_win
[i
] = this_alternative_win
[i
];
3725 goal_alternative_match_win
[i
]
3726 = this_alternative_match_win
[i
];
3727 goal_alternative_offmemok
[i
]
3728 = this_alternative_offmemok
[i
];
3729 goal_alternative_matches
[i
] = this_alternative_matches
[i
];
3730 goal_alternative_earlyclobber
[i
]
3731 = this_alternative_earlyclobber
[i
];
3733 goal_alternative_swapped
= swapped
;
3735 goal_alternative_number
= this_alternative_number
;
3736 goal_earlyclobber
= this_earlyclobber
;
3741 /* If insn is commutative (it's safe to exchange a certain pair of operands)
3742 then we need to try each alternative twice,
3743 the second time matching those two operands
3744 as if we had exchanged them.
3745 To do this, really exchange them in operands.
3747 If we have just tried the alternatives the second time,
3748 return operands to normal and drop through. */
3750 if (commutative
>= 0)
3755 enum reg_class tclass
;
3758 recog_data
.operand
[commutative
] = substed_operand
[commutative
+ 1];
3759 recog_data
.operand
[commutative
+ 1] = substed_operand
[commutative
];
3760 /* Swap the duplicates too. */
3761 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3762 if (recog_data
.dup_num
[i
] == commutative
3763 || recog_data
.dup_num
[i
] == commutative
+ 1)
3764 *recog_data
.dup_loc
[i
]
3765 = recog_data
.operand
[(int) recog_data
.dup_num
[i
]];
3767 tclass
= preferred_class
[commutative
];
3768 preferred_class
[commutative
] = preferred_class
[commutative
+ 1];
3769 preferred_class
[commutative
+ 1] = tclass
;
3771 t
= pref_or_nothing
[commutative
];
3772 pref_or_nothing
[commutative
] = pref_or_nothing
[commutative
+ 1];
3773 pref_or_nothing
[commutative
+ 1] = t
;
3775 t
= address_reloaded
[commutative
];
3776 address_reloaded
[commutative
] = address_reloaded
[commutative
+ 1];
3777 address_reloaded
[commutative
+ 1] = t
;
3779 memcpy (constraints
, recog_data
.constraints
,
3780 noperands
* sizeof (const char *));
3785 recog_data
.operand
[commutative
] = substed_operand
[commutative
];
3786 recog_data
.operand
[commutative
+ 1]
3787 = substed_operand
[commutative
+ 1];
3788 /* Unswap the duplicates too. */
3789 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3790 if (recog_data
.dup_num
[i
] == commutative
3791 || recog_data
.dup_num
[i
] == commutative
+ 1)
3792 *recog_data
.dup_loc
[i
]
3793 = recog_data
.operand
[(int) recog_data
.dup_num
[i
]];
3797 /* The operands don't meet the constraints.
3798 goal_alternative describes the alternative
3799 that we could reach by reloading the fewest operands.
3800 Reload so as to fit it. */
3802 if (best
== MAX_RECOG_OPERANDS
* 2 + 600)
3804 /* No alternative works with reloads?? */
3805 if (insn_code_number
>= 0)
3806 fatal_insn ("unable to generate reloads for:", insn
);
3807 error_for_asm (insn
, "inconsistent operand constraints in an %<asm%>");
3808 /* Avoid further trouble with this insn. */
3809 PATTERN (insn
) = gen_rtx_USE (VOIDmode
, const0_rtx
);
3814 /* Jump to `finish' from above if all operands are valid already.
3815 In that case, goal_alternative_win is all 1. */
3818 /* Right now, for any pair of operands I and J that are required to match,
3820 goal_alternative_matches[J] is I.
3821 Set up goal_alternative_matched as the inverse function:
3822 goal_alternative_matched[I] = J. */
3824 for (i
= 0; i
< noperands
; i
++)
3825 goal_alternative_matched
[i
] = -1;
3827 for (i
= 0; i
< noperands
; i
++)
3828 if (! goal_alternative_win
[i
]
3829 && goal_alternative_matches
[i
] >= 0)
3830 goal_alternative_matched
[goal_alternative_matches
[i
]] = i
;
3832 for (i
= 0; i
< noperands
; i
++)
3833 goal_alternative_win
[i
] |= goal_alternative_match_win
[i
];
3835 /* If the best alternative is with operands 1 and 2 swapped,
3836 consider them swapped before reporting the reloads. Update the
3837 operand numbers of any reloads already pushed. */
3839 if (goal_alternative_swapped
)
3843 tem
= substed_operand
[commutative
];
3844 substed_operand
[commutative
] = substed_operand
[commutative
+ 1];
3845 substed_operand
[commutative
+ 1] = tem
;
3846 tem
= recog_data
.operand
[commutative
];
3847 recog_data
.operand
[commutative
] = recog_data
.operand
[commutative
+ 1];
3848 recog_data
.operand
[commutative
+ 1] = tem
;
3849 tem
= *recog_data
.operand_loc
[commutative
];
3850 *recog_data
.operand_loc
[commutative
]
3851 = *recog_data
.operand_loc
[commutative
+ 1];
3852 *recog_data
.operand_loc
[commutative
+ 1] = tem
;
3854 for (i
= 0; i
< n_reloads
; i
++)
3856 if (rld
[i
].opnum
== commutative
)
3857 rld
[i
].opnum
= commutative
+ 1;
3858 else if (rld
[i
].opnum
== commutative
+ 1)
3859 rld
[i
].opnum
= commutative
;
3863 for (i
= 0; i
< noperands
; i
++)
3865 operand_reloadnum
[i
] = -1;
3867 /* If this is an earlyclobber operand, we need to widen the scope.
3868 The reload must remain valid from the start of the insn being
3869 reloaded until after the operand is stored into its destination.
3870 We approximate this with RELOAD_OTHER even though we know that we
3871 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3873 One special case that is worth checking is when we have an
3874 output that is earlyclobber but isn't used past the insn (typically
3875 a SCRATCH). In this case, we only need have the reload live
3876 through the insn itself, but not for any of our input or output
3878 But we must not accidentally narrow the scope of an existing
3879 RELOAD_OTHER reload - leave these alone.
3881 In any case, anything needed to address this operand can remain
3882 however they were previously categorized. */
3884 if (goal_alternative_earlyclobber
[i
] && operand_type
[i
] != RELOAD_OTHER
)
3886 = (find_reg_note (insn
, REG_UNUSED
, recog_data
.operand
[i
])
3887 ? RELOAD_FOR_INSN
: RELOAD_OTHER
);
3890 /* Any constants that aren't allowed and can't be reloaded
3891 into registers are here changed into memory references. */
3892 for (i
= 0; i
< noperands
; i
++)
3893 if (! goal_alternative_win
[i
])
3895 rtx op
= recog_data
.operand
[i
];
3896 rtx subreg
= NULL_RTX
;
3897 rtx plus
= NULL_RTX
;
3898 enum machine_mode mode
= operand_mode
[i
];
3900 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3901 push_reload so we have to let them pass here. */
3902 if (GET_CODE (op
) == SUBREG
)
3905 op
= SUBREG_REG (op
);
3906 mode
= GET_MODE (op
);
3909 if (GET_CODE (op
) == PLUS
)
3915 if (CONST_POOL_OK_P (op
)
3916 && ((targetm
.preferred_reload_class (op
, goal_alternative
[i
])
3918 || no_input_reloads
)
3919 && mode
!= VOIDmode
)
3921 int this_address_reloaded
;
3922 rtx tem
= force_const_mem (mode
, op
);
3924 /* If we stripped a SUBREG or a PLUS above add it back. */
3925 if (plus
!= NULL_RTX
)
3926 tem
= gen_rtx_PLUS (mode
, XEXP (plus
, 0), tem
);
3928 if (subreg
!= NULL_RTX
)
3929 tem
= gen_rtx_SUBREG (operand_mode
[i
], tem
, SUBREG_BYTE (subreg
));
3931 this_address_reloaded
= 0;
3932 substed_operand
[i
] = recog_data
.operand
[i
]
3933 = find_reloads_toplev (tem
, i
, address_type
[i
], ind_levels
,
3934 0, insn
, &this_address_reloaded
);
3936 /* If the alternative accepts constant pool refs directly
3937 there will be no reload needed at all. */
3938 if (plus
== NULL_RTX
3939 && subreg
== NULL_RTX
3940 && alternative_allows_const_pool_ref (this_address_reloaded
== 0
3941 ? substed_operand
[i
]
3943 recog_data
.constraints
[i
],
3944 goal_alternative_number
))
3945 goal_alternative_win
[i
] = 1;
3949 /* Record the values of the earlyclobber operands for the caller. */
3950 if (goal_earlyclobber
)
3951 for (i
= 0; i
< noperands
; i
++)
3952 if (goal_alternative_earlyclobber
[i
])
3953 reload_earlyclobbers
[n_earlyclobbers
++] = recog_data
.operand
[i
];
3955 /* Now record reloads for all the operands that need them. */
3956 for (i
= 0; i
< noperands
; i
++)
3957 if (! goal_alternative_win
[i
])
3959 /* Operands that match previous ones have already been handled. */
3960 if (goal_alternative_matches
[i
] >= 0)
3962 /* Handle an operand with a nonoffsettable address
3963 appearing where an offsettable address will do
3964 by reloading the address into a base register.
3966 ??? We can also do this when the operand is a register and
3967 reg_equiv_mem is not offsettable, but this is a bit tricky,
3968 so we don't bother with it. It may not be worth doing. */
3969 else if (goal_alternative_matched
[i
] == -1
3970 && goal_alternative_offmemok
[i
]
3971 && MEM_P (recog_data
.operand
[i
]))
3973 /* If the address to be reloaded is a VOIDmode constant,
3974 use the default address mode as mode of the reload register,
3975 as would have been done by find_reloads_address. */
3976 enum machine_mode address_mode
;
3977 address_mode
= GET_MODE (XEXP (recog_data
.operand
[i
], 0));
3978 if (address_mode
== VOIDmode
)
3980 addr_space_t as
= MEM_ADDR_SPACE (recog_data
.operand
[i
]);
3981 address_mode
= targetm
.addr_space
.address_mode (as
);
3984 operand_reloadnum
[i
]
3985 = push_reload (XEXP (recog_data
.operand
[i
], 0), NULL_RTX
,
3986 &XEXP (recog_data
.operand
[i
], 0), (rtx
*) 0,
3987 base_reg_class (VOIDmode
, MEM
, SCRATCH
),
3989 VOIDmode
, 0, 0, i
, RELOAD_FOR_INPUT
);
3990 rld
[operand_reloadnum
[i
]].inc
3991 = GET_MODE_SIZE (GET_MODE (recog_data
.operand
[i
]));
3993 /* If this operand is an output, we will have made any
3994 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
3995 now we are treating part of the operand as an input, so
3996 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
3998 if (modified
[i
] == RELOAD_WRITE
)
4000 for (j
= 0; j
< n_reloads
; j
++)
4002 if (rld
[j
].opnum
== i
)
4004 if (rld
[j
].when_needed
== RELOAD_FOR_OUTPUT_ADDRESS
)
4005 rld
[j
].when_needed
= RELOAD_FOR_INPUT_ADDRESS
;
4006 else if (rld
[j
].when_needed
4007 == RELOAD_FOR_OUTADDR_ADDRESS
)
4008 rld
[j
].when_needed
= RELOAD_FOR_INPADDR_ADDRESS
;
4013 else if (goal_alternative_matched
[i
] == -1)
4015 operand_reloadnum
[i
]
4016 = push_reload ((modified
[i
] != RELOAD_WRITE
4017 ? recog_data
.operand
[i
] : 0),
4018 (modified
[i
] != RELOAD_READ
4019 ? recog_data
.operand
[i
] : 0),
4020 (modified
[i
] != RELOAD_WRITE
4021 ? recog_data
.operand_loc
[i
] : 0),
4022 (modified
[i
] != RELOAD_READ
4023 ? recog_data
.operand_loc
[i
] : 0),
4024 (enum reg_class
) goal_alternative
[i
],
4025 (modified
[i
] == RELOAD_WRITE
4026 ? VOIDmode
: operand_mode
[i
]),
4027 (modified
[i
] == RELOAD_READ
4028 ? VOIDmode
: operand_mode
[i
]),
4029 (insn_code_number
< 0 ? 0
4030 : insn_data
[insn_code_number
].operand
[i
].strict_low
),
4031 0, i
, operand_type
[i
]);
4033 /* In a matching pair of operands, one must be input only
4034 and the other must be output only.
4035 Pass the input operand as IN and the other as OUT. */
4036 else if (modified
[i
] == RELOAD_READ
4037 && modified
[goal_alternative_matched
[i
]] == RELOAD_WRITE
)
4039 operand_reloadnum
[i
]
4040 = push_reload (recog_data
.operand
[i
],
4041 recog_data
.operand
[goal_alternative_matched
[i
]],
4042 recog_data
.operand_loc
[i
],
4043 recog_data
.operand_loc
[goal_alternative_matched
[i
]],
4044 (enum reg_class
) goal_alternative
[i
],
4046 operand_mode
[goal_alternative_matched
[i
]],
4047 0, 0, i
, RELOAD_OTHER
);
4048 operand_reloadnum
[goal_alternative_matched
[i
]] = output_reloadnum
;
4050 else if (modified
[i
] == RELOAD_WRITE
4051 && modified
[goal_alternative_matched
[i
]] == RELOAD_READ
)
4053 operand_reloadnum
[goal_alternative_matched
[i
]]
4054 = push_reload (recog_data
.operand
[goal_alternative_matched
[i
]],
4055 recog_data
.operand
[i
],
4056 recog_data
.operand_loc
[goal_alternative_matched
[i
]],
4057 recog_data
.operand_loc
[i
],
4058 (enum reg_class
) goal_alternative
[i
],
4059 operand_mode
[goal_alternative_matched
[i
]],
4061 0, 0, i
, RELOAD_OTHER
);
4062 operand_reloadnum
[i
] = output_reloadnum
;
4066 gcc_assert (insn_code_number
< 0);
4067 error_for_asm (insn
, "inconsistent operand constraints "
4069 /* Avoid further trouble with this insn. */
4070 PATTERN (insn
) = gen_rtx_USE (VOIDmode
, const0_rtx
);
4075 else if (goal_alternative_matched
[i
] < 0
4076 && goal_alternative_matches
[i
] < 0
4077 && address_operand_reloaded
[i
] != 1
4080 /* For each non-matching operand that's a MEM or a pseudo-register
4081 that didn't get a hard register, make an optional reload.
4082 This may get done even if the insn needs no reloads otherwise. */
4084 rtx operand
= recog_data
.operand
[i
];
4086 while (GET_CODE (operand
) == SUBREG
)
4087 operand
= SUBREG_REG (operand
);
4088 if ((MEM_P (operand
)
4090 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
))
4091 /* If this is only for an output, the optional reload would not
4092 actually cause us to use a register now, just note that
4093 something is stored here. */
4094 && (goal_alternative
[i
] != NO_REGS
4095 || modified
[i
] == RELOAD_WRITE
)
4096 && ! no_input_reloads
4097 /* An optional output reload might allow to delete INSN later.
4098 We mustn't make in-out reloads on insns that are not permitted
4100 If this is an asm, we can't delete it; we must not even call
4101 push_reload for an optional output reload in this case,
4102 because we can't be sure that the constraint allows a register,
4103 and push_reload verifies the constraints for asms. */
4104 && (modified
[i
] == RELOAD_READ
4105 || (! no_output_reloads
&& ! this_insn_is_asm
)))
4106 operand_reloadnum
[i
]
4107 = push_reload ((modified
[i
] != RELOAD_WRITE
4108 ? recog_data
.operand
[i
] : 0),
4109 (modified
[i
] != RELOAD_READ
4110 ? recog_data
.operand
[i
] : 0),
4111 (modified
[i
] != RELOAD_WRITE
4112 ? recog_data
.operand_loc
[i
] : 0),
4113 (modified
[i
] != RELOAD_READ
4114 ? recog_data
.operand_loc
[i
] : 0),
4115 (enum reg_class
) goal_alternative
[i
],
4116 (modified
[i
] == RELOAD_WRITE
4117 ? VOIDmode
: operand_mode
[i
]),
4118 (modified
[i
] == RELOAD_READ
4119 ? VOIDmode
: operand_mode
[i
]),
4120 (insn_code_number
< 0 ? 0
4121 : insn_data
[insn_code_number
].operand
[i
].strict_low
),
4122 1, i
, operand_type
[i
]);
4123 /* If a memory reference remains (either as a MEM or a pseudo that
4124 did not get a hard register), yet we can't make an optional
4125 reload, check if this is actually a pseudo register reference;
4126 we then need to emit a USE and/or a CLOBBER so that reload
4127 inheritance will do the right thing. */
4131 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
4132 && reg_renumber
[REGNO (operand
)] < 0)))
4134 operand
= *recog_data
.operand_loc
[i
];
4136 while (GET_CODE (operand
) == SUBREG
)
4137 operand
= SUBREG_REG (operand
);
4138 if (REG_P (operand
))
4140 if (modified
[i
] != RELOAD_WRITE
)
4141 /* We mark the USE with QImode so that we recognize
4142 it as one that can be safely deleted at the end
4144 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode
, operand
),
4146 if (modified
[i
] != RELOAD_READ
)
4147 emit_insn_after (gen_clobber (operand
), insn
);
4151 else if (goal_alternative_matches
[i
] >= 0
4152 && goal_alternative_win
[goal_alternative_matches
[i
]]
4153 && modified
[i
] == RELOAD_READ
4154 && modified
[goal_alternative_matches
[i
]] == RELOAD_WRITE
4155 && ! no_input_reloads
&& ! no_output_reloads
4158 /* Similarly, make an optional reload for a pair of matching
4159 objects that are in MEM or a pseudo that didn't get a hard reg. */
4161 rtx operand
= recog_data
.operand
[i
];
4163 while (GET_CODE (operand
) == SUBREG
)
4164 operand
= SUBREG_REG (operand
);
4165 if ((MEM_P (operand
)
4167 && REGNO (operand
) >= FIRST_PSEUDO_REGISTER
))
4168 && (goal_alternative
[goal_alternative_matches
[i
]] != NO_REGS
))
4169 operand_reloadnum
[i
] = operand_reloadnum
[goal_alternative_matches
[i
]]
4170 = push_reload (recog_data
.operand
[goal_alternative_matches
[i
]],
4171 recog_data
.operand
[i
],
4172 recog_data
.operand_loc
[goal_alternative_matches
[i
]],
4173 recog_data
.operand_loc
[i
],
4174 (enum reg_class
) goal_alternative
[goal_alternative_matches
[i
]],
4175 operand_mode
[goal_alternative_matches
[i
]],
4177 0, 1, goal_alternative_matches
[i
], RELOAD_OTHER
);
4180 /* Perform whatever substitutions on the operands we are supposed
4181 to make due to commutativity or replacement of registers
4182 with equivalent constants or memory slots. */
4184 for (i
= 0; i
< noperands
; i
++)
4186 /* We only do this on the last pass through reload, because it is
4187 possible for some data (like reg_equiv_address) to be changed during
4188 later passes. Moreover, we lose the opportunity to get a useful
4189 reload_{in,out}_reg when we do these replacements. */
4193 rtx substitution
= substed_operand
[i
];
4195 *recog_data
.operand_loc
[i
] = substitution
;
4197 /* If we're replacing an operand with a LABEL_REF, we need to
4198 make sure that there's a REG_LABEL_OPERAND note attached to
4199 this instruction. */
4200 if (GET_CODE (substitution
) == LABEL_REF
4201 && !find_reg_note (insn
, REG_LABEL_OPERAND
,
4202 XEXP (substitution
, 0))
4203 /* For a JUMP_P, if it was a branch target it must have
4204 already been recorded as such. */
4206 || !label_is_jump_target_p (XEXP (substitution
, 0),
4208 add_reg_note (insn
, REG_LABEL_OPERAND
, XEXP (substitution
, 0));
4211 retval
|= (substed_operand
[i
] != *recog_data
.operand_loc
[i
]);
4214 /* If this insn pattern contains any MATCH_DUP's, make sure that
4215 they will be substituted if the operands they match are substituted.
4216 Also do now any substitutions we already did on the operands.
4218 Don't do this if we aren't making replacements because we might be
4219 propagating things allocated by frame pointer elimination into places
4220 it doesn't expect. */
4222 if (insn_code_number
>= 0 && replace
)
4223 for (i
= insn_data
[insn_code_number
].n_dups
- 1; i
>= 0; i
--)
4225 int opno
= recog_data
.dup_num
[i
];
4226 *recog_data
.dup_loc
[i
] = *recog_data
.operand_loc
[opno
];
4227 dup_replacements (recog_data
.dup_loc
[i
], recog_data
.operand_loc
[opno
]);
4231 /* This loses because reloading of prior insns can invalidate the equivalence
4232 (or at least find_equiv_reg isn't smart enough to find it any more),
4233 causing this insn to need more reload regs than it needed before.
4234 It may be too late to make the reload regs available.
4235 Now this optimization is done safely in choose_reload_regs. */
4237 /* For each reload of a reg into some other class of reg,
4238 search for an existing equivalent reg (same value now) in the right class.
4239 We can use it as long as we don't need to change its contents. */
4240 for (i
= 0; i
< n_reloads
; i
++)
4241 if (rld
[i
].reg_rtx
== 0
4243 && REG_P (rld
[i
].in
)
4247 = find_equiv_reg (rld
[i
].in
, insn
, rld
[i
].rclass
, -1,
4248 static_reload_reg_p
, 0, rld
[i
].inmode
);
4249 /* Prevent generation of insn to load the value
4250 because the one we found already has the value. */
4252 rld
[i
].in
= rld
[i
].reg_rtx
;
4256 /* If we detected error and replaced asm instruction by USE, forget about the
4258 if (GET_CODE (PATTERN (insn
)) == USE
4259 && CONST_INT_P (XEXP (PATTERN (insn
), 0)))
4262 /* Perhaps an output reload can be combined with another
4263 to reduce needs by one. */
4264 if (!goal_earlyclobber
)
4267 /* If we have a pair of reloads for parts of an address, they are reloading
4268 the same object, the operands themselves were not reloaded, and they
4269 are for two operands that are supposed to match, merge the reloads and
4270 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4272 for (i
= 0; i
< n_reloads
; i
++)
4276 for (j
= i
+ 1; j
< n_reloads
; j
++)
4277 if ((rld
[i
].when_needed
== RELOAD_FOR_INPUT_ADDRESS
4278 || rld
[i
].when_needed
== RELOAD_FOR_OUTPUT_ADDRESS
4279 || rld
[i
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
4280 || rld
[i
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
)
4281 && (rld
[j
].when_needed
== RELOAD_FOR_INPUT_ADDRESS
4282 || rld
[j
].when_needed
== RELOAD_FOR_OUTPUT_ADDRESS
4283 || rld
[j
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
4284 || rld
[j
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
)
4285 && rtx_equal_p (rld
[i
].in
, rld
[j
].in
)
4286 && (operand_reloadnum
[rld
[i
].opnum
] < 0
4287 || rld
[operand_reloadnum
[rld
[i
].opnum
]].optional
)
4288 && (operand_reloadnum
[rld
[j
].opnum
] < 0
4289 || rld
[operand_reloadnum
[rld
[j
].opnum
]].optional
)
4290 && (goal_alternative_matches
[rld
[i
].opnum
] == rld
[j
].opnum
4291 || (goal_alternative_matches
[rld
[j
].opnum
]
4294 for (k
= 0; k
< n_replacements
; k
++)
4295 if (replacements
[k
].what
== j
)
4296 replacements
[k
].what
= i
;
4298 if (rld
[i
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
4299 || rld
[i
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
)
4300 rld
[i
].when_needed
= RELOAD_FOR_OPADDR_ADDR
;
4302 rld
[i
].when_needed
= RELOAD_FOR_OPERAND_ADDRESS
;
4307 /* Scan all the reloads and update their type.
4308 If a reload is for the address of an operand and we didn't reload
4309 that operand, change the type. Similarly, change the operand number
4310 of a reload when two operands match. If a reload is optional, treat it
4311 as though the operand isn't reloaded.
4313 ??? This latter case is somewhat odd because if we do the optional
4314 reload, it means the object is hanging around. Thus we need only
4315 do the address reload if the optional reload was NOT done.
4317 Change secondary reloads to be the address type of their operand, not
4320 If an operand's reload is now RELOAD_OTHER, change any
4321 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4322 RELOAD_FOR_OTHER_ADDRESS. */
4324 for (i
= 0; i
< n_reloads
; i
++)
4326 if (rld
[i
].secondary_p
4327 && rld
[i
].when_needed
== operand_type
[rld
[i
].opnum
])
4328 rld
[i
].when_needed
= address_type
[rld
[i
].opnum
];
4330 if ((rld
[i
].when_needed
== RELOAD_FOR_INPUT_ADDRESS
4331 || rld
[i
].when_needed
== RELOAD_FOR_OUTPUT_ADDRESS
4332 || rld
[i
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
4333 || rld
[i
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
)
4334 && (operand_reloadnum
[rld
[i
].opnum
] < 0
4335 || rld
[operand_reloadnum
[rld
[i
].opnum
]].optional
))
4337 /* If we have a secondary reload to go along with this reload,
4338 change its type to RELOAD_FOR_OPADDR_ADDR. */
4340 if ((rld
[i
].when_needed
== RELOAD_FOR_INPUT_ADDRESS
4341 || rld
[i
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
)
4342 && rld
[i
].secondary_in_reload
!= -1)
4344 int secondary_in_reload
= rld
[i
].secondary_in_reload
;
4346 rld
[secondary_in_reload
].when_needed
= RELOAD_FOR_OPADDR_ADDR
;
4348 /* If there's a tertiary reload we have to change it also. */
4349 if (secondary_in_reload
> 0
4350 && rld
[secondary_in_reload
].secondary_in_reload
!= -1)
4351 rld
[rld
[secondary_in_reload
].secondary_in_reload
].when_needed
4352 = RELOAD_FOR_OPADDR_ADDR
;
4355 if ((rld
[i
].when_needed
== RELOAD_FOR_OUTPUT_ADDRESS
4356 || rld
[i
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
)
4357 && rld
[i
].secondary_out_reload
!= -1)
4359 int secondary_out_reload
= rld
[i
].secondary_out_reload
;
4361 rld
[secondary_out_reload
].when_needed
= RELOAD_FOR_OPADDR_ADDR
;
4363 /* If there's a tertiary reload we have to change it also. */
4364 if (secondary_out_reload
4365 && rld
[secondary_out_reload
].secondary_out_reload
!= -1)
4366 rld
[rld
[secondary_out_reload
].secondary_out_reload
].when_needed
4367 = RELOAD_FOR_OPADDR_ADDR
;
4370 if (rld
[i
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
4371 || rld
[i
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
)
4372 rld
[i
].when_needed
= RELOAD_FOR_OPADDR_ADDR
;
4374 rld
[i
].when_needed
= RELOAD_FOR_OPERAND_ADDRESS
;
4377 if ((rld
[i
].when_needed
== RELOAD_FOR_INPUT_ADDRESS
4378 || rld
[i
].when_needed
== RELOAD_FOR_INPADDR_ADDRESS
)
4379 && operand_reloadnum
[rld
[i
].opnum
] >= 0
4380 && (rld
[operand_reloadnum
[rld
[i
].opnum
]].when_needed
4382 rld
[i
].when_needed
= RELOAD_FOR_OTHER_ADDRESS
;
4384 if (goal_alternative_matches
[rld
[i
].opnum
] >= 0)
4385 rld
[i
].opnum
= goal_alternative_matches
[rld
[i
].opnum
];
4388 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4389 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4390 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4392 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4393 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4394 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4395 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4396 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4397 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4398 This is complicated by the fact that a single operand can have more
4399 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4400 choose_reload_regs without affecting code quality, and cases that
4401 actually fail are extremely rare, so it turns out to be better to fix
4402 the problem here by not generating cases that choose_reload_regs will
4404 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4405 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4407 We can reduce the register pressure by exploiting that a
4408 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4409 does not conflict with any of them, if it is only used for the first of
4410 the RELOAD_FOR_X_ADDRESS reloads. */
4412 int first_op_addr_num
= -2;
4413 int first_inpaddr_num
[MAX_RECOG_OPERANDS
];
4414 int first_outpaddr_num
[MAX_RECOG_OPERANDS
];
4415 int need_change
= 0;
4416 /* We use last_op_addr_reload and the contents of the above arrays
4417 first as flags - -2 means no instance encountered, -1 means exactly
4418 one instance encountered.
4419 If more than one instance has been encountered, we store the reload
4420 number of the first reload of the kind in question; reload numbers
4421 are known to be non-negative. */
4422 for (i
= 0; i
< noperands
; i
++)
4423 first_inpaddr_num
[i
] = first_outpaddr_num
[i
] = -2;
4424 for (i
= n_reloads
- 1; i
>= 0; i
--)
4426 switch (rld
[i
].when_needed
)
4428 case RELOAD_FOR_OPERAND_ADDRESS
:
4429 if (++first_op_addr_num
>= 0)
4431 first_op_addr_num
= i
;
4435 case RELOAD_FOR_INPUT_ADDRESS
:
4436 if (++first_inpaddr_num
[rld
[i
].opnum
] >= 0)
4438 first_inpaddr_num
[rld
[i
].opnum
] = i
;
4442 case RELOAD_FOR_OUTPUT_ADDRESS
:
4443 if (++first_outpaddr_num
[rld
[i
].opnum
] >= 0)
4445 first_outpaddr_num
[rld
[i
].opnum
] = i
;
4456 for (i
= 0; i
< n_reloads
; i
++)
4459 enum reload_type type
;
4461 switch (rld
[i
].when_needed
)
4463 case RELOAD_FOR_OPADDR_ADDR
:
4464 first_num
= first_op_addr_num
;
4465 type
= RELOAD_FOR_OPERAND_ADDRESS
;
4467 case RELOAD_FOR_INPADDR_ADDRESS
:
4468 first_num
= first_inpaddr_num
[rld
[i
].opnum
];
4469 type
= RELOAD_FOR_INPUT_ADDRESS
;
4471 case RELOAD_FOR_OUTADDR_ADDRESS
:
4472 first_num
= first_outpaddr_num
[rld
[i
].opnum
];
4473 type
= RELOAD_FOR_OUTPUT_ADDRESS
;
4480 else if (i
> first_num
)
4481 rld
[i
].when_needed
= type
;
4484 /* Check if the only TYPE reload that uses reload I is
4485 reload FIRST_NUM. */
4486 for (j
= n_reloads
- 1; j
> first_num
; j
--)
4488 if (rld
[j
].when_needed
== type
4489 && (rld
[i
].secondary_p
4490 ? rld
[j
].secondary_in_reload
== i
4491 : reg_mentioned_p (rld
[i
].in
, rld
[j
].in
)))
4493 rld
[i
].when_needed
= type
;
4502 /* See if we have any reloads that are now allowed to be merged
4503 because we've changed when the reload is needed to
4504 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4505 check for the most common cases. */
4507 for (i
= 0; i
< n_reloads
; i
++)
4508 if (rld
[i
].in
!= 0 && rld
[i
].out
== 0
4509 && (rld
[i
].when_needed
== RELOAD_FOR_OPERAND_ADDRESS
4510 || rld
[i
].when_needed
== RELOAD_FOR_OPADDR_ADDR
4511 || rld
[i
].when_needed
== RELOAD_FOR_OTHER_ADDRESS
))
4512 for (j
= 0; j
< n_reloads
; j
++)
4513 if (i
!= j
&& rld
[j
].in
!= 0 && rld
[j
].out
== 0
4514 && rld
[j
].when_needed
== rld
[i
].when_needed
4515 && MATCHES (rld
[i
].in
, rld
[j
].in
)
4516 && rld
[i
].rclass
== rld
[j
].rclass
4517 && !rld
[i
].nocombine
&& !rld
[j
].nocombine
4518 && rld
[i
].reg_rtx
== rld
[j
].reg_rtx
)
4520 rld
[i
].opnum
= MIN (rld
[i
].opnum
, rld
[j
].opnum
);
4521 transfer_replacements (i
, j
);
4526 /* If we made any reloads for addresses, see if they violate a
4527 "no input reloads" requirement for this insn. But loads that we
4528 do after the insn (such as for output addresses) are fine. */
4529 if (no_input_reloads
)
4530 for (i
= 0; i
< n_reloads
; i
++)
4531 gcc_assert (rld
[i
].in
== 0
4532 || rld
[i
].when_needed
== RELOAD_FOR_OUTADDR_ADDRESS
4533 || rld
[i
].when_needed
== RELOAD_FOR_OUTPUT_ADDRESS
);
4536 /* Compute reload_mode and reload_nregs. */
4537 for (i
= 0; i
< n_reloads
; i
++)
4540 = (rld
[i
].inmode
== VOIDmode
4541 || (GET_MODE_SIZE (rld
[i
].outmode
)
4542 > GET_MODE_SIZE (rld
[i
].inmode
)))
4543 ? rld
[i
].outmode
: rld
[i
].inmode
;
4545 rld
[i
].nregs
= CLASS_MAX_NREGS (rld
[i
].rclass
, rld
[i
].mode
);
4548 /* Special case a simple move with an input reload and a
4549 destination of a hard reg, if the hard reg is ok, use it. */
4550 for (i
= 0; i
< n_reloads
; i
++)
4551 if (rld
[i
].when_needed
== RELOAD_FOR_INPUT
4552 && GET_CODE (PATTERN (insn
)) == SET
4553 && REG_P (SET_DEST (PATTERN (insn
)))
4554 && (SET_SRC (PATTERN (insn
)) == rld
[i
].in
4555 || SET_SRC (PATTERN (insn
)) == rld
[i
].in_reg
)
4556 && !elimination_target_reg_p (SET_DEST (PATTERN (insn
))))
4558 rtx dest
= SET_DEST (PATTERN (insn
));
4559 unsigned int regno
= REGNO (dest
);
4561 if (regno
< FIRST_PSEUDO_REGISTER
4562 && TEST_HARD_REG_BIT (reg_class_contents
[rld
[i
].rclass
], regno
)
4563 && HARD_REGNO_MODE_OK (regno
, rld
[i
].mode
))
4565 int nr
= hard_regno_nregs
[regno
][rld
[i
].mode
];
4568 for (nri
= 1; nri
< nr
; nri
++)
4569 if (! TEST_HARD_REG_BIT (reg_class_contents
[rld
[i
].rclass
], regno
+ nri
))
4573 rld
[i
].reg_rtx
= dest
;
4580 /* Return true if alternative number ALTNUM in constraint-string
4581 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4582 MEM gives the reference if it didn't need any reloads, otherwise it
4586 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED
,
4587 const char *constraint
, int altnum
)
4591 /* Skip alternatives before the one requested. */
4594 while (*constraint
++ != ',');
4597 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4598 If one of them is present, this alternative accepts the result of
4599 passing a constant-pool reference through find_reloads_toplev.
4601 The same is true of extra memory constraints if the address
4602 was reloaded into a register. However, the target may elect
4603 to disallow the original constant address, forcing it to be
4604 reloaded into a register instead. */
4605 for (; (c
= *constraint
) && c
!= ',' && c
!= '#';
4606 constraint
+= CONSTRAINT_LEN (c
, constraint
))
4608 if (c
== TARGET_MEM_CONSTRAINT
|| c
== 'o')
4610 #ifdef EXTRA_CONSTRAINT_STR
4611 if (EXTRA_MEMORY_CONSTRAINT (c
, constraint
)
4612 && (mem
== NULL
|| EXTRA_CONSTRAINT_STR (mem
, c
, constraint
)))
4619 /* Scan X for memory references and scan the addresses for reloading.
4620 Also checks for references to "constant" regs that we want to eliminate
4621 and replaces them with the values they stand for.
4622 We may alter X destructively if it contains a reference to such.
4623 If X is just a constant reg, we return the equivalent value
4626 IND_LEVELS says how many levels of indirect addressing this machine
4629 OPNUM and TYPE identify the purpose of the reload.
4631 IS_SET_DEST is true if X is the destination of a SET, which is not
4632 appropriate to be replaced by a constant.
4634 INSN, if nonzero, is the insn in which we do the reload. It is used
4635 to determine if we may generate output reloads, and where to put USEs
4636 for pseudos that we have to replace with stack slots.
4638 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4639 result of find_reloads_address. */
4642 find_reloads_toplev (rtx x
, int opnum
, enum reload_type type
,
4643 int ind_levels
, int is_set_dest
, rtx insn
,
4644 int *address_reloaded
)
4646 RTX_CODE code
= GET_CODE (x
);
4648 const char *fmt
= GET_RTX_FORMAT (code
);
4654 /* This code is duplicated for speed in find_reloads. */
4655 int regno
= REGNO (x
);
4656 if (reg_equiv_constant
[regno
] != 0 && !is_set_dest
)
4657 x
= reg_equiv_constant
[regno
];
4659 /* This creates (subreg (mem...)) which would cause an unnecessary
4660 reload of the mem. */
4661 else if (reg_equiv_mem
[regno
] != 0)
4662 x
= reg_equiv_mem
[regno
];
4664 else if (reg_equiv_memory_loc
[regno
]
4665 && (reg_equiv_address
[regno
] != 0 || num_not_at_initial_offset
))
4667 rtx mem
= make_memloc (x
, regno
);
4668 if (reg_equiv_address
[regno
]
4669 || ! rtx_equal_p (mem
, reg_equiv_mem
[regno
]))
4671 /* If this is not a toplevel operand, find_reloads doesn't see
4672 this substitution. We have to emit a USE of the pseudo so
4673 that delete_output_reload can see it. */
4674 if (replace_reloads
&& recog_data
.operand
[opnum
] != x
)
4675 /* We mark the USE with QImode so that we recognize it
4676 as one that can be safely deleted at the end of
4678 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode
, x
), insn
),
4681 i
= find_reloads_address (GET_MODE (x
), &x
, XEXP (x
, 0), &XEXP (x
, 0),
4682 opnum
, type
, ind_levels
, insn
);
4683 if (!rtx_equal_p (x
, mem
))
4684 push_reg_equiv_alt_mem (regno
, x
);
4685 if (address_reloaded
)
4686 *address_reloaded
= i
;
4695 i
= find_reloads_address (GET_MODE (x
), &tem
, XEXP (x
, 0), &XEXP (x
, 0),
4696 opnum
, type
, ind_levels
, insn
);
4697 if (address_reloaded
)
4698 *address_reloaded
= i
;
4703 if (code
== SUBREG
&& REG_P (SUBREG_REG (x
)))
4705 /* Check for SUBREG containing a REG that's equivalent to a
4706 constant. If the constant has a known value, truncate it
4707 right now. Similarly if we are extracting a single-word of a
4708 multi-word constant. If the constant is symbolic, allow it
4709 to be substituted normally. push_reload will strip the
4710 subreg later. The constant must not be VOIDmode, because we
4711 will lose the mode of the register (this should never happen
4712 because one of the cases above should handle it). */
4714 int regno
= REGNO (SUBREG_REG (x
));
4717 if (regno
>= FIRST_PSEUDO_REGISTER
4718 && reg_renumber
[regno
] < 0
4719 && reg_equiv_constant
[regno
] != 0)
4722 simplify_gen_subreg (GET_MODE (x
), reg_equiv_constant
[regno
],
4723 GET_MODE (SUBREG_REG (x
)), SUBREG_BYTE (x
));
4725 if (CONSTANT_P (tem
) && !LEGITIMATE_CONSTANT_P (tem
))
4727 tem
= force_const_mem (GET_MODE (x
), tem
);
4728 i
= find_reloads_address (GET_MODE (tem
), &tem
, XEXP (tem
, 0),
4729 &XEXP (tem
, 0), opnum
, type
,
4731 if (address_reloaded
)
4732 *address_reloaded
= i
;
4737 /* If the subreg contains a reg that will be converted to a mem,
4738 convert the subreg to a narrower memref now.
4739 Otherwise, we would get (subreg (mem ...) ...),
4740 which would force reload of the mem.
4742 We also need to do this if there is an equivalent MEM that is
4743 not offsettable. In that case, alter_subreg would produce an
4744 invalid address on big-endian machines.
4746 For machines that extend byte loads, we must not reload using
4747 a wider mode if we have a paradoxical SUBREG. find_reloads will
4748 force a reload in that case. So we should not do anything here. */
4750 if (regno
>= FIRST_PSEUDO_REGISTER
4751 #ifdef LOAD_EXTEND_OP
4752 && (GET_MODE_SIZE (GET_MODE (x
))
4753 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
4755 && (reg_equiv_address
[regno
] != 0
4756 || (reg_equiv_mem
[regno
] != 0
4757 && (! strict_memory_address_addr_space_p
4758 (GET_MODE (x
), XEXP (reg_equiv_mem
[regno
], 0),
4759 MEM_ADDR_SPACE (reg_equiv_mem
[regno
]))
4760 || ! offsettable_memref_p (reg_equiv_mem
[regno
])
4761 || num_not_at_initial_offset
))))
4762 x
= find_reloads_subreg_address (x
, 1, opnum
, type
, ind_levels
,
4766 for (copied
= 0, i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4770 rtx new_part
= find_reloads_toplev (XEXP (x
, i
), opnum
, type
,
4771 ind_levels
, is_set_dest
, insn
,
4773 /* If we have replaced a reg with it's equivalent memory loc -
4774 that can still be handled here e.g. if it's in a paradoxical
4775 subreg - we must make the change in a copy, rather than using
4776 a destructive change. This way, find_reloads can still elect
4777 not to do the change. */
4778 if (new_part
!= XEXP (x
, i
) && ! CONSTANT_P (new_part
) && ! copied
)
4780 x
= shallow_copy_rtx (x
);
4783 XEXP (x
, i
) = new_part
;
4789 /* Return a mem ref for the memory equivalent of reg REGNO.
4790 This mem ref is not shared with anything. */
4793 make_memloc (rtx ad
, int regno
)
4795 /* We must rerun eliminate_regs, in case the elimination
4796 offsets have changed. */
4798 = XEXP (eliminate_regs (reg_equiv_memory_loc
[regno
], VOIDmode
, NULL_RTX
),
4801 /* If TEM might contain a pseudo, we must copy it to avoid
4802 modifying it when we do the substitution for the reload. */
4803 if (rtx_varies_p (tem
, 0))
4804 tem
= copy_rtx (tem
);
4806 tem
= replace_equiv_address_nv (reg_equiv_memory_loc
[regno
], tem
);
4807 tem
= adjust_address_nv (tem
, GET_MODE (ad
), 0);
4809 /* Copy the result if it's still the same as the equivalence, to avoid
4810 modifying it when we do the substitution for the reload. */
4811 if (tem
== reg_equiv_memory_loc
[regno
])
4812 tem
= copy_rtx (tem
);
4816 /* Returns true if AD could be turned into a valid memory reference
4817 to mode MODE in address space AS by reloading the part pointed to
4818 by PART into a register. */
4821 maybe_memory_address_addr_space_p (enum machine_mode mode
, rtx ad
,
4822 addr_space_t as
, rtx
*part
)
4826 rtx reg
= gen_rtx_REG (GET_MODE (tem
), max_reg_num ());
4829 retv
= memory_address_addr_space_p (mode
, ad
, as
);
4835 /* Record all reloads needed for handling memory address AD
4836 which appears in *LOC in a memory reference to mode MODE
4837 which itself is found in location *MEMREFLOC.
4838 Note that we take shortcuts assuming that no multi-reg machine mode
4839 occurs as part of an address.
4841 OPNUM and TYPE specify the purpose of this reload.
4843 IND_LEVELS says how many levels of indirect addressing this machine
4846 INSN, if nonzero, is the insn in which we do the reload. It is used
4847 to determine if we may generate output reloads, and where to put USEs
4848 for pseudos that we have to replace with stack slots.
4850 Value is one if this address is reloaded or replaced as a whole; it is
4851 zero if the top level of this address was not reloaded or replaced, and
4852 it is -1 if it may or may not have been reloaded or replaced.
4854 Note that there is no verification that the address will be valid after
4855 this routine does its work. Instead, we rely on the fact that the address
4856 was valid when reload started. So we need only undo things that reload
4857 could have broken. These are wrong register types, pseudos not allocated
4858 to a hard register, and frame pointer elimination. */
4861 find_reloads_address (enum machine_mode mode
, rtx
*memrefloc
, rtx ad
,
4862 rtx
*loc
, int opnum
, enum reload_type type
,
4863 int ind_levels
, rtx insn
)
4865 addr_space_t as
= memrefloc
? MEM_ADDR_SPACE (*memrefloc
)
4866 : ADDR_SPACE_GENERIC
;
4868 int removed_and
= 0;
4872 /* If the address is a register, see if it is a legitimate address and
4873 reload if not. We first handle the cases where we need not reload
4874 or where we must reload in a non-standard way. */
4880 if (reg_equiv_constant
[regno
] != 0)
4882 find_reloads_address_part (reg_equiv_constant
[regno
], loc
,
4883 base_reg_class (mode
, MEM
, SCRATCH
),
4884 GET_MODE (ad
), opnum
, type
, ind_levels
);
4888 tem
= reg_equiv_memory_loc
[regno
];
4891 if (reg_equiv_address
[regno
] != 0 || num_not_at_initial_offset
)
4893 tem
= make_memloc (ad
, regno
);
4894 if (! strict_memory_address_addr_space_p (GET_MODE (tem
),
4896 MEM_ADDR_SPACE (tem
)))
4900 find_reloads_address (GET_MODE (tem
), &tem
, XEXP (tem
, 0),
4901 &XEXP (tem
, 0), opnum
,
4902 ADDR_TYPE (type
), ind_levels
, insn
);
4903 if (!rtx_equal_p (tem
, orig
))
4904 push_reg_equiv_alt_mem (regno
, tem
);
4906 /* We can avoid a reload if the register's equivalent memory
4907 expression is valid as an indirect memory address.
4908 But not all addresses are valid in a mem used as an indirect
4909 address: only reg or reg+constant. */
4912 && strict_memory_address_addr_space_p (mode
, tem
, as
)
4913 && (REG_P (XEXP (tem
, 0))
4914 || (GET_CODE (XEXP (tem
, 0)) == PLUS
4915 && REG_P (XEXP (XEXP (tem
, 0), 0))
4916 && CONSTANT_P (XEXP (XEXP (tem
, 0), 1)))))
4918 /* TEM is not the same as what we'll be replacing the
4919 pseudo with after reload, put a USE in front of INSN
4920 in the final reload pass. */
4922 && num_not_at_initial_offset
4923 && ! rtx_equal_p (tem
, reg_equiv_mem
[regno
]))
4926 /* We mark the USE with QImode so that we
4927 recognize it as one that can be safely
4928 deleted at the end of reload. */
4929 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode
, ad
),
4932 /* This doesn't really count as replacing the address
4933 as a whole, since it is still a memory access. */
4941 /* The only remaining case where we can avoid a reload is if this is a
4942 hard register that is valid as a base register and which is not the
4943 subject of a CLOBBER in this insn. */
4945 else if (regno
< FIRST_PSEUDO_REGISTER
4946 && regno_ok_for_base_p (regno
, mode
, MEM
, SCRATCH
)
4947 && ! regno_clobbered_p (regno
, this_insn
, mode
, 0))
4950 /* If we do not have one of the cases above, we must do the reload. */
4951 push_reload (ad
, NULL_RTX
, loc
, (rtx
*) 0, base_reg_class (mode
, MEM
, SCRATCH
),
4952 GET_MODE (ad
), VOIDmode
, 0, 0, opnum
, type
);
4956 if (strict_memory_address_addr_space_p (mode
, ad
, as
))
4958 /* The address appears valid, so reloads are not needed.
4959 But the address may contain an eliminable register.
4960 This can happen because a machine with indirect addressing
4961 may consider a pseudo register by itself a valid address even when
4962 it has failed to get a hard reg.
4963 So do a tree-walk to find and eliminate all such regs. */
4965 /* But first quickly dispose of a common case. */
4966 if (GET_CODE (ad
) == PLUS
4967 && CONST_INT_P (XEXP (ad
, 1))
4968 && REG_P (XEXP (ad
, 0))
4969 && reg_equiv_constant
[REGNO (XEXP (ad
, 0))] == 0)
4972 subst_reg_equivs_changed
= 0;
4973 *loc
= subst_reg_equivs (ad
, insn
);
4975 if (! subst_reg_equivs_changed
)
4978 /* Check result for validity after substitution. */
4979 if (strict_memory_address_addr_space_p (mode
, ad
, as
))
4983 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4986 if (memrefloc
&& ADDR_SPACE_GENERIC_P (as
))
4988 LEGITIMIZE_RELOAD_ADDRESS (ad
, GET_MODE (*memrefloc
), opnum
, type
,
4993 *memrefloc
= copy_rtx (*memrefloc
);
4994 XEXP (*memrefloc
, 0) = ad
;
4995 move_replacements (&ad
, &XEXP (*memrefloc
, 0));
5001 /* The address is not valid. We have to figure out why. First see if
5002 we have an outer AND and remove it if so. Then analyze what's inside. */
5004 if (GET_CODE (ad
) == AND
)
5007 loc
= &XEXP (ad
, 0);
5011 /* One possibility for why the address is invalid is that it is itself
5012 a MEM. This can happen when the frame pointer is being eliminated, a
5013 pseudo is not allocated to a hard register, and the offset between the
5014 frame and stack pointers is not its initial value. In that case the
5015 pseudo will have been replaced by a MEM referring to the
5019 /* First ensure that the address in this MEM is valid. Then, unless
5020 indirect addresses are valid, reload the MEM into a register. */
5022 find_reloads_address (GET_MODE (ad
), &tem
, XEXP (ad
, 0), &XEXP (ad
, 0),
5023 opnum
, ADDR_TYPE (type
),
5024 ind_levels
== 0 ? 0 : ind_levels
- 1, insn
);
5026 /* If tem was changed, then we must create a new memory reference to
5027 hold it and store it back into memrefloc. */
5028 if (tem
!= ad
&& memrefloc
)
5030 *memrefloc
= copy_rtx (*memrefloc
);
5031 copy_replacements (tem
, XEXP (*memrefloc
, 0));
5032 loc
= &XEXP (*memrefloc
, 0);
5034 loc
= &XEXP (*loc
, 0);
5037 /* Check similar cases as for indirect addresses as above except
5038 that we can allow pseudos and a MEM since they should have been
5039 taken care of above. */
5042 || (GET_CODE (XEXP (tem
, 0)) == SYMBOL_REF
&& ! indirect_symref_ok
)
5043 || MEM_P (XEXP (tem
, 0))
5044 || ! (REG_P (XEXP (tem
, 0))
5045 || (GET_CODE (XEXP (tem
, 0)) == PLUS
5046 && REG_P (XEXP (XEXP (tem
, 0), 0))
5047 && CONST_INT_P (XEXP (XEXP (tem
, 0), 1)))))
5049 /* Must use TEM here, not AD, since it is the one that will
5050 have any subexpressions reloaded, if needed. */
5051 push_reload (tem
, NULL_RTX
, loc
, (rtx
*) 0,
5052 base_reg_class (mode
, MEM
, SCRATCH
), GET_MODE (tem
),
5055 return ! removed_and
;
5061 /* If we have address of a stack slot but it's not valid because the
5062 displacement is too large, compute the sum in a register.
5063 Handle all base registers here, not just fp/ap/sp, because on some
5064 targets (namely SH) we can also get too large displacements from
5065 big-endian corrections. */
5066 else if (GET_CODE (ad
) == PLUS
5067 && REG_P (XEXP (ad
, 0))
5068 && REGNO (XEXP (ad
, 0)) < FIRST_PSEUDO_REGISTER
5069 && CONST_INT_P (XEXP (ad
, 1))
5070 && regno_ok_for_base_p (REGNO (XEXP (ad
, 0)), mode
, PLUS
,
5074 /* Unshare the MEM rtx so we can safely alter it. */
5077 *memrefloc
= copy_rtx (*memrefloc
);
5078 loc
= &XEXP (*memrefloc
, 0);
5080 loc
= &XEXP (*loc
, 0);
5083 if (double_reg_address_ok
)
5085 /* Unshare the sum as well. */
5086 *loc
= ad
= copy_rtx (ad
);
5088 /* Reload the displacement into an index reg.
5089 We assume the frame pointer or arg pointer is a base reg. */
5090 find_reloads_address_part (XEXP (ad
, 1), &XEXP (ad
, 1),
5091 INDEX_REG_CLASS
, GET_MODE (ad
), opnum
,
5097 /* If the sum of two regs is not necessarily valid,
5098 reload the sum into a base reg.
5099 That will at least work. */
5100 find_reloads_address_part (ad
, loc
,
5101 base_reg_class (mode
, MEM
, SCRATCH
),
5102 GET_MODE (ad
), opnum
, type
, ind_levels
);
5104 return ! removed_and
;
5107 /* If we have an indexed stack slot, there are three possible reasons why
5108 it might be invalid: The index might need to be reloaded, the address
5109 might have been made by frame pointer elimination and hence have a
5110 constant out of range, or both reasons might apply.
5112 We can easily check for an index needing reload, but even if that is the
5113 case, we might also have an invalid constant. To avoid making the
5114 conservative assumption and requiring two reloads, we see if this address
5115 is valid when not interpreted strictly. If it is, the only problem is
5116 that the index needs a reload and find_reloads_address_1 will take care
5119 Handle all base registers here, not just fp/ap/sp, because on some
5120 targets (namely SPARC) we can also get invalid addresses from preventive
5121 subreg big-endian corrections made by find_reloads_toplev. We
5122 can also get expressions involving LO_SUM (rather than PLUS) from
5123 find_reloads_subreg_address.
5125 If we decide to do something, it must be that `double_reg_address_ok'
5126 is true. We generate a reload of the base register + constant and
5127 rework the sum so that the reload register will be added to the index.
5128 This is safe because we know the address isn't shared.
5130 We check for the base register as both the first and second operand of
5131 the innermost PLUS and/or LO_SUM. */
5133 for (op_index
= 0; op_index
< 2; ++op_index
)
5135 rtx operand
, addend
;
5136 enum rtx_code inner_code
;
5138 if (GET_CODE (ad
) != PLUS
)
5141 inner_code
= GET_CODE (XEXP (ad
, 0));
5142 if (!(GET_CODE (ad
) == PLUS
5143 && CONST_INT_P (XEXP (ad
, 1))
5144 && (inner_code
== PLUS
|| inner_code
== LO_SUM
)))
5147 operand
= XEXP (XEXP (ad
, 0), op_index
);
5148 if (!REG_P (operand
) || REGNO (operand
) >= FIRST_PSEUDO_REGISTER
)
5151 addend
= XEXP (XEXP (ad
, 0), 1 - op_index
);
5153 if ((regno_ok_for_base_p (REGNO (operand
), mode
, inner_code
,
5155 || operand
== frame_pointer_rtx
5156 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
5157 || operand
== hard_frame_pointer_rtx
5159 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5160 || operand
== arg_pointer_rtx
5162 || operand
== stack_pointer_rtx
)
5163 && ! maybe_memory_address_addr_space_p
5164 (mode
, ad
, as
, &XEXP (XEXP (ad
, 0), 1 - op_index
)))
5169 offset_reg
= plus_constant (operand
, INTVAL (XEXP (ad
, 1)));
5171 /* Form the adjusted address. */
5172 if (GET_CODE (XEXP (ad
, 0)) == PLUS
)
5173 ad
= gen_rtx_PLUS (GET_MODE (ad
),
5174 op_index
== 0 ? offset_reg
: addend
,
5175 op_index
== 0 ? addend
: offset_reg
);
5177 ad
= gen_rtx_LO_SUM (GET_MODE (ad
),
5178 op_index
== 0 ? offset_reg
: addend
,
5179 op_index
== 0 ? addend
: offset_reg
);
5182 cls
= base_reg_class (mode
, MEM
, GET_CODE (addend
));
5183 find_reloads_address_part (XEXP (ad
, op_index
),
5184 &XEXP (ad
, op_index
), cls
,
5185 GET_MODE (ad
), opnum
, type
, ind_levels
);
5186 find_reloads_address_1 (mode
,
5187 XEXP (ad
, 1 - op_index
), 1, GET_CODE (ad
),
5188 GET_CODE (XEXP (ad
, op_index
)),
5189 &XEXP (ad
, 1 - op_index
), opnum
,
5196 /* See if address becomes valid when an eliminable register
5197 in a sum is replaced. */
5200 if (GET_CODE (ad
) == PLUS
)
5201 tem
= subst_indexed_address (ad
);
5202 if (tem
!= ad
&& strict_memory_address_addr_space_p (mode
, tem
, as
))
5204 /* Ok, we win that way. Replace any additional eliminable
5207 subst_reg_equivs_changed
= 0;
5208 tem
= subst_reg_equivs (tem
, insn
);
5210 /* Make sure that didn't make the address invalid again. */
5212 if (! subst_reg_equivs_changed
5213 || strict_memory_address_addr_space_p (mode
, tem
, as
))
5220 /* If constants aren't valid addresses, reload the constant address
5222 if (CONSTANT_P (ad
) && ! strict_memory_address_addr_space_p (mode
, ad
, as
))
5224 enum machine_mode address_mode
= GET_MODE (ad
);
5225 if (address_mode
== VOIDmode
)
5226 address_mode
= targetm
.addr_space
.address_mode (as
);
5228 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5229 Unshare it so we can safely alter it. */
5230 if (memrefloc
&& GET_CODE (ad
) == SYMBOL_REF
5231 && CONSTANT_POOL_ADDRESS_P (ad
))
5233 *memrefloc
= copy_rtx (*memrefloc
);
5234 loc
= &XEXP (*memrefloc
, 0);
5236 loc
= &XEXP (*loc
, 0);
5239 find_reloads_address_part (ad
, loc
, base_reg_class (mode
, MEM
, SCRATCH
),
5240 address_mode
, opnum
, type
, ind_levels
);
5241 return ! removed_and
;
5244 return find_reloads_address_1 (mode
, ad
, 0, MEM
, SCRATCH
, loc
, opnum
, type
,
5248 /* Find all pseudo regs appearing in AD
5249 that are eliminable in favor of equivalent values
5250 and do not have hard regs; replace them by their equivalents.
5251 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5252 front of it for pseudos that we have to replace with stack slots. */
5255 subst_reg_equivs (rtx ad
, rtx insn
)
5257 RTX_CODE code
= GET_CODE (ad
);
5277 int regno
= REGNO (ad
);
5279 if (reg_equiv_constant
[regno
] != 0)
5281 subst_reg_equivs_changed
= 1;
5282 return reg_equiv_constant
[regno
];
5284 if (reg_equiv_memory_loc
[regno
] && num_not_at_initial_offset
)
5286 rtx mem
= make_memloc (ad
, regno
);
5287 if (! rtx_equal_p (mem
, reg_equiv_mem
[regno
]))
5289 subst_reg_equivs_changed
= 1;
5290 /* We mark the USE with QImode so that we recognize it
5291 as one that can be safely deleted at the end of
5293 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode
, ad
), insn
),
5302 /* Quickly dispose of a common case. */
5303 if (XEXP (ad
, 0) == frame_pointer_rtx
5304 && CONST_INT_P (XEXP (ad
, 1)))
5312 fmt
= GET_RTX_FORMAT (code
);
5313 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
5315 XEXP (ad
, i
) = subst_reg_equivs (XEXP (ad
, i
), insn
);
5319 /* Compute the sum of X and Y, making canonicalizations assumed in an
5320 address, namely: sum constant integers, surround the sum of two
5321 constants with a CONST, put the constant as the second operand, and
5322 group the constant on the outermost sum.
5324 This routine assumes both inputs are already in canonical form. */
5327 form_sum (enum machine_mode mode
, rtx x
, rtx y
)
5331 gcc_assert (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
);
5332 gcc_assert (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
);
5334 if (CONST_INT_P (x
))
5335 return plus_constant (y
, INTVAL (x
));
5336 else if (CONST_INT_P (y
))
5337 return plus_constant (x
, INTVAL (y
));
5338 else if (CONSTANT_P (x
))
5339 tem
= x
, x
= y
, y
= tem
;
5341 if (GET_CODE (x
) == PLUS
&& CONSTANT_P (XEXP (x
, 1)))
5342 return form_sum (mode
, XEXP (x
, 0), form_sum (mode
, XEXP (x
, 1), y
));
5344 /* Note that if the operands of Y are specified in the opposite
5345 order in the recursive calls below, infinite recursion will occur. */
5346 if (GET_CODE (y
) == PLUS
&& CONSTANT_P (XEXP (y
, 1)))
5347 return form_sum (mode
, form_sum (mode
, x
, XEXP (y
, 0)), XEXP (y
, 1));
5349 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5350 constant will have been placed second. */
5351 if (CONSTANT_P (x
) && CONSTANT_P (y
))
5353 if (GET_CODE (x
) == CONST
)
5355 if (GET_CODE (y
) == CONST
)
5358 return gen_rtx_CONST (VOIDmode
, gen_rtx_PLUS (mode
, x
, y
));
5361 return gen_rtx_PLUS (mode
, x
, y
);
5364 /* If ADDR is a sum containing a pseudo register that should be
5365 replaced with a constant (from reg_equiv_constant),
5366 return the result of doing so, and also apply the associative
5367 law so that the result is more likely to be a valid address.
5368 (But it is not guaranteed to be one.)
5370 Note that at most one register is replaced, even if more are
5371 replaceable. Also, we try to put the result into a canonical form
5372 so it is more likely to be a valid address.
5374 In all other cases, return ADDR. */
5377 subst_indexed_address (rtx addr
)
5379 rtx op0
= 0, op1
= 0, op2
= 0;
5383 if (GET_CODE (addr
) == PLUS
)
5385 /* Try to find a register to replace. */
5386 op0
= XEXP (addr
, 0), op1
= XEXP (addr
, 1), op2
= 0;
5388 && (regno
= REGNO (op0
)) >= FIRST_PSEUDO_REGISTER
5389 && reg_renumber
[regno
] < 0
5390 && reg_equiv_constant
[regno
] != 0)
5391 op0
= reg_equiv_constant
[regno
];
5392 else if (REG_P (op1
)
5393 && (regno
= REGNO (op1
)) >= FIRST_PSEUDO_REGISTER
5394 && reg_renumber
[regno
] < 0
5395 && reg_equiv_constant
[regno
] != 0)
5396 op1
= reg_equiv_constant
[regno
];
5397 else if (GET_CODE (op0
) == PLUS
5398 && (tem
= subst_indexed_address (op0
)) != op0
)
5400 else if (GET_CODE (op1
) == PLUS
5401 && (tem
= subst_indexed_address (op1
)) != op1
)
5406 /* Pick out up to three things to add. */
5407 if (GET_CODE (op1
) == PLUS
)
5408 op2
= XEXP (op1
, 1), op1
= XEXP (op1
, 0);
5409 else if (GET_CODE (op0
) == PLUS
)
5410 op2
= op1
, op1
= XEXP (op0
, 1), op0
= XEXP (op0
, 0);
5412 /* Compute the sum. */
5414 op1
= form_sum (GET_MODE (addr
), op1
, op2
);
5416 op0
= form_sum (GET_MODE (addr
), op0
, op1
);
5423 /* Update the REG_INC notes for an insn. It updates all REG_INC
5424 notes for the instruction which refer to REGNO the to refer
5425 to the reload number.
5427 INSN is the insn for which any REG_INC notes need updating.
5429 REGNO is the register number which has been reloaded.
5431 RELOADNUM is the reload number. */
5434 update_auto_inc_notes (rtx insn ATTRIBUTE_UNUSED
, int regno ATTRIBUTE_UNUSED
,
5435 int reloadnum ATTRIBUTE_UNUSED
)
5440 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
5441 if (REG_NOTE_KIND (link
) == REG_INC
5442 && (int) REGNO (XEXP (link
, 0)) == regno
)
5443 push_replacement (&XEXP (link
, 0), reloadnum
, VOIDmode
);
5447 /* Record the pseudo registers we must reload into hard registers in a
5448 subexpression of a would-be memory address, X referring to a value
5449 in mode MODE. (This function is not called if the address we find
5452 CONTEXT = 1 means we are considering regs as index regs,
5453 = 0 means we are considering them as base regs.
5454 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5456 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5457 is the code of the index part of the address. Otherwise, pass SCRATCH
5459 OPNUM and TYPE specify the purpose of any reloads made.
5461 IND_LEVELS says how many levels of indirect addressing are
5462 supported at this point in the address.
5464 INSN, if nonzero, is the insn in which we do the reload. It is used
5465 to determine if we may generate output reloads.
5467 We return nonzero if X, as a whole, is reloaded or replaced. */
5469 /* Note that we take shortcuts assuming that no multi-reg machine mode
5470 occurs as part of an address.
5471 Also, this is not fully machine-customizable; it works for machines
5472 such as VAXen and 68000's and 32000's, but other possible machines
5473 could have addressing modes that this does not handle right.
5474 If you add push_reload calls here, you need to make sure gen_reload
5475 handles those cases gracefully. */
5478 find_reloads_address_1 (enum machine_mode mode
, rtx x
, int context
,
5479 enum rtx_code outer_code
, enum rtx_code index_code
,
5480 rtx
*loc
, int opnum
, enum reload_type type
,
5481 int ind_levels
, rtx insn
)
5483 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, OUTER, INDEX) \
5485 ? regno_ok_for_base_p (REGNO, MODE, OUTER, INDEX) \
5486 : REGNO_OK_FOR_INDEX_P (REGNO))
5488 enum reg_class context_reg_class
;
5489 RTX_CODE code
= GET_CODE (x
);
5492 context_reg_class
= INDEX_REG_CLASS
;
5494 context_reg_class
= base_reg_class (mode
, outer_code
, index_code
);
5500 rtx orig_op0
= XEXP (x
, 0);
5501 rtx orig_op1
= XEXP (x
, 1);
5502 RTX_CODE code0
= GET_CODE (orig_op0
);
5503 RTX_CODE code1
= GET_CODE (orig_op1
);
5507 if (GET_CODE (op0
) == SUBREG
)
5509 op0
= SUBREG_REG (op0
);
5510 code0
= GET_CODE (op0
);
5511 if (code0
== REG
&& REGNO (op0
) < FIRST_PSEUDO_REGISTER
)
5512 op0
= gen_rtx_REG (word_mode
,
5514 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0
)),
5515 GET_MODE (SUBREG_REG (orig_op0
)),
5516 SUBREG_BYTE (orig_op0
),
5517 GET_MODE (orig_op0
))));
5520 if (GET_CODE (op1
) == SUBREG
)
5522 op1
= SUBREG_REG (op1
);
5523 code1
= GET_CODE (op1
);
5524 if (code1
== REG
&& REGNO (op1
) < FIRST_PSEUDO_REGISTER
)
5525 /* ??? Why is this given op1's mode and above for
5526 ??? op0 SUBREGs we use word_mode? */
5527 op1
= gen_rtx_REG (GET_MODE (op1
),
5529 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1
)),
5530 GET_MODE (SUBREG_REG (orig_op1
)),
5531 SUBREG_BYTE (orig_op1
),
5532 GET_MODE (orig_op1
))));
5534 /* Plus in the index register may be created only as a result of
5535 register rematerialization for expression like &localvar*4. Reload it.
5536 It may be possible to combine the displacement on the outer level,
5537 but it is probably not worthwhile to do so. */
5540 find_reloads_address (GET_MODE (x
), loc
, XEXP (x
, 0), &XEXP (x
, 0),
5541 opnum
, ADDR_TYPE (type
), ind_levels
, insn
);
5542 push_reload (*loc
, NULL_RTX
, loc
, (rtx
*) 0,
5544 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
5548 if (code0
== MULT
|| code0
== SIGN_EXTEND
|| code0
== TRUNCATE
5549 || code0
== ZERO_EXTEND
|| code1
== MEM
)
5551 find_reloads_address_1 (mode
, orig_op0
, 1, PLUS
, SCRATCH
,
5552 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5554 find_reloads_address_1 (mode
, orig_op1
, 0, PLUS
, code0
,
5555 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5559 else if (code1
== MULT
|| code1
== SIGN_EXTEND
|| code1
== TRUNCATE
5560 || code1
== ZERO_EXTEND
|| code0
== MEM
)
5562 find_reloads_address_1 (mode
, orig_op0
, 0, PLUS
, code1
,
5563 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5565 find_reloads_address_1 (mode
, orig_op1
, 1, PLUS
, SCRATCH
,
5566 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5570 else if (code0
== CONST_INT
|| code0
== CONST
5571 || code0
== SYMBOL_REF
|| code0
== LABEL_REF
)
5572 find_reloads_address_1 (mode
, orig_op1
, 0, PLUS
, code0
,
5573 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5576 else if (code1
== CONST_INT
|| code1
== CONST
5577 || code1
== SYMBOL_REF
|| code1
== LABEL_REF
)
5578 find_reloads_address_1 (mode
, orig_op0
, 0, PLUS
, code1
,
5579 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5582 else if (code0
== REG
&& code1
== REG
)
5584 if (REGNO_OK_FOR_INDEX_P (REGNO (op1
))
5585 && regno_ok_for_base_p (REGNO (op0
), mode
, PLUS
, REG
))
5587 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0
))
5588 && regno_ok_for_base_p (REGNO (op1
), mode
, PLUS
, REG
))
5590 else if (regno_ok_for_base_p (REGNO (op0
), mode
, PLUS
, REG
))
5591 find_reloads_address_1 (mode
, orig_op1
, 1, PLUS
, SCRATCH
,
5592 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5594 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1
)))
5595 find_reloads_address_1 (mode
, orig_op0
, 0, PLUS
, REG
,
5596 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5598 else if (regno_ok_for_base_p (REGNO (op1
), mode
, PLUS
, REG
))
5599 find_reloads_address_1 (mode
, orig_op0
, 1, PLUS
, SCRATCH
,
5600 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5602 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0
)))
5603 find_reloads_address_1 (mode
, orig_op1
, 0, PLUS
, REG
,
5604 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5608 find_reloads_address_1 (mode
, orig_op0
, 0, PLUS
, REG
,
5609 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5611 find_reloads_address_1 (mode
, orig_op1
, 1, PLUS
, SCRATCH
,
5612 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5617 else if (code0
== REG
)
5619 find_reloads_address_1 (mode
, orig_op0
, 1, PLUS
, SCRATCH
,
5620 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5622 find_reloads_address_1 (mode
, orig_op1
, 0, PLUS
, REG
,
5623 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5627 else if (code1
== REG
)
5629 find_reloads_address_1 (mode
, orig_op1
, 1, PLUS
, SCRATCH
,
5630 &XEXP (x
, 1), opnum
, type
, ind_levels
,
5632 find_reloads_address_1 (mode
, orig_op0
, 0, PLUS
, REG
,
5633 &XEXP (x
, 0), opnum
, type
, ind_levels
,
5643 rtx op0
= XEXP (x
, 0);
5644 rtx op1
= XEXP (x
, 1);
5645 enum rtx_code index_code
;
5649 if (GET_CODE (op1
) != PLUS
&& GET_CODE (op1
) != MINUS
)
5652 /* Currently, we only support {PRE,POST}_MODIFY constructs
5653 where a base register is {inc,dec}remented by the contents
5654 of another register or by a constant value. Thus, these
5655 operands must match. */
5656 gcc_assert (op0
== XEXP (op1
, 0));
5658 /* Require index register (or constant). Let's just handle the
5659 register case in the meantime... If the target allows
5660 auto-modify by a constant then we could try replacing a pseudo
5661 register with its equivalent constant where applicable.
5663 We also handle the case where the register was eliminated
5664 resulting in a PLUS subexpression.
5666 If we later decide to reload the whole PRE_MODIFY or
5667 POST_MODIFY, inc_for_reload might clobber the reload register
5668 before reading the index. The index register might therefore
5669 need to live longer than a TYPE reload normally would, so be
5670 conservative and class it as RELOAD_OTHER. */
5671 if ((REG_P (XEXP (op1
, 1))
5672 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1
, 1))))
5673 || GET_CODE (XEXP (op1
, 1)) == PLUS
)
5674 find_reloads_address_1 (mode
, XEXP (op1
, 1), 1, code
, SCRATCH
,
5675 &XEXP (op1
, 1), opnum
, RELOAD_OTHER
,
5678 gcc_assert (REG_P (XEXP (op1
, 0)));
5680 regno
= REGNO (XEXP (op1
, 0));
5681 index_code
= GET_CODE (XEXP (op1
, 1));
5683 /* A register that is incremented cannot be constant! */
5684 gcc_assert (regno
< FIRST_PSEUDO_REGISTER
5685 || reg_equiv_constant
[regno
] == 0);
5687 /* Handle a register that is equivalent to a memory location
5688 which cannot be addressed directly. */
5689 if (reg_equiv_memory_loc
[regno
] != 0
5690 && (reg_equiv_address
[regno
] != 0
5691 || num_not_at_initial_offset
))
5693 rtx tem
= make_memloc (XEXP (x
, 0), regno
);
5695 if (reg_equiv_address
[regno
]
5696 || ! rtx_equal_p (tem
, reg_equiv_mem
[regno
]))
5700 /* First reload the memory location's address.
5701 We can't use ADDR_TYPE (type) here, because we need to
5702 write back the value after reading it, hence we actually
5703 need two registers. */
5704 find_reloads_address (GET_MODE (tem
), &tem
, XEXP (tem
, 0),
5705 &XEXP (tem
, 0), opnum
,
5709 if (!rtx_equal_p (tem
, orig
))
5710 push_reg_equiv_alt_mem (regno
, tem
);
5712 /* Then reload the memory location into a base
5714 reloadnum
= push_reload (tem
, tem
, &XEXP (x
, 0),
5716 base_reg_class (mode
, code
,
5718 GET_MODE (x
), GET_MODE (x
), 0,
5719 0, opnum
, RELOAD_OTHER
);
5721 update_auto_inc_notes (this_insn
, regno
, reloadnum
);
5726 if (reg_renumber
[regno
] >= 0)
5727 regno
= reg_renumber
[regno
];
5729 /* We require a base register here... */
5730 if (!regno_ok_for_base_p (regno
, GET_MODE (x
), code
, index_code
))
5732 reloadnum
= push_reload (XEXP (op1
, 0), XEXP (x
, 0),
5733 &XEXP (op1
, 0), &XEXP (x
, 0),
5734 base_reg_class (mode
, code
, index_code
),
5735 GET_MODE (x
), GET_MODE (x
), 0, 0,
5736 opnum
, RELOAD_OTHER
);
5738 update_auto_inc_notes (this_insn
, regno
, reloadnum
);
5748 if (REG_P (XEXP (x
, 0)))
5750 int regno
= REGNO (XEXP (x
, 0));
5754 /* A register that is incremented cannot be constant! */
5755 gcc_assert (regno
< FIRST_PSEUDO_REGISTER
5756 || reg_equiv_constant
[regno
] == 0);
5758 /* Handle a register that is equivalent to a memory location
5759 which cannot be addressed directly. */
5760 if (reg_equiv_memory_loc
[regno
] != 0
5761 && (reg_equiv_address
[regno
] != 0 || num_not_at_initial_offset
))
5763 rtx tem
= make_memloc (XEXP (x
, 0), regno
);
5764 if (reg_equiv_address
[regno
]
5765 || ! rtx_equal_p (tem
, reg_equiv_mem
[regno
]))
5769 /* First reload the memory location's address.
5770 We can't use ADDR_TYPE (type) here, because we need to
5771 write back the value after reading it, hence we actually
5772 need two registers. */
5773 find_reloads_address (GET_MODE (tem
), &tem
, XEXP (tem
, 0),
5774 &XEXP (tem
, 0), opnum
, type
,
5776 if (!rtx_equal_p (tem
, orig
))
5777 push_reg_equiv_alt_mem (regno
, tem
);
5778 /* Put this inside a new increment-expression. */
5779 x
= gen_rtx_fmt_e (GET_CODE (x
), GET_MODE (x
), tem
);
5780 /* Proceed to reload that, as if it contained a register. */
5784 /* If we have a hard register that is ok in this incdec context,
5785 don't make a reload. If the register isn't nice enough for
5786 autoincdec, we can reload it. But, if an autoincrement of a
5787 register that we here verified as playing nice, still outside
5788 isn't "valid", it must be that no autoincrement is "valid".
5789 If that is true and something made an autoincrement anyway,
5790 this must be a special context where one is allowed.
5791 (For example, a "push" instruction.)
5792 We can't improve this address, so leave it alone. */
5794 /* Otherwise, reload the autoincrement into a suitable hard reg
5795 and record how much to increment by. */
5797 if (reg_renumber
[regno
] >= 0)
5798 regno
= reg_renumber
[regno
];
5799 if (regno
>= FIRST_PSEUDO_REGISTER
5800 || !REG_OK_FOR_CONTEXT (context
, regno
, mode
, code
,
5805 /* If we can output the register afterwards, do so, this
5806 saves the extra update.
5807 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5808 CALL_INSN - and it does not set CC0.
5809 But don't do this if we cannot directly address the
5810 memory location, since this will make it harder to
5811 reuse address reloads, and increases register pressure.
5812 Also don't do this if we can probably update x directly. */
5813 rtx equiv
= (MEM_P (XEXP (x
, 0))
5815 : reg_equiv_mem
[regno
]);
5816 int icode
= (int) optab_handler (add_optab
, GET_MODE (x
));
5817 if (insn
&& NONJUMP_INSN_P (insn
) && equiv
5818 && memory_operand (equiv
, GET_MODE (equiv
))
5820 && ! sets_cc0_p (PATTERN (insn
))
5822 && ! (icode
!= CODE_FOR_nothing
5823 && ((*insn_data
[icode
].operand
[0].predicate
)
5824 (equiv
, GET_MODE (x
)))
5825 && ((*insn_data
[icode
].operand
[1].predicate
)
5826 (equiv
, GET_MODE (x
)))))
5828 /* We use the original pseudo for loc, so that
5829 emit_reload_insns() knows which pseudo this
5830 reload refers to and updates the pseudo rtx, not
5831 its equivalent memory location, as well as the
5832 corresponding entry in reg_last_reload_reg. */
5833 loc
= &XEXP (x_orig
, 0);
5836 = push_reload (x
, x
, loc
, loc
,
5838 GET_MODE (x
), GET_MODE (x
), 0, 0,
5839 opnum
, RELOAD_OTHER
);
5844 = push_reload (x
, x
, loc
, (rtx
*) 0,
5846 GET_MODE (x
), GET_MODE (x
), 0, 0,
5849 = find_inc_amount (PATTERN (this_insn
), XEXP (x_orig
, 0));
5854 update_auto_inc_notes (this_insn
, REGNO (XEXP (x_orig
, 0)),
5864 /* Look for parts to reload in the inner expression and reload them
5865 too, in addition to this operation. Reloading all inner parts in
5866 addition to this one shouldn't be necessary, but at this point,
5867 we don't know if we can possibly omit any part that *can* be
5868 reloaded. Targets that are better off reloading just either part
5869 (or perhaps even a different part of an outer expression), should
5870 define LEGITIMIZE_RELOAD_ADDRESS. */
5871 find_reloads_address_1 (GET_MODE (XEXP (x
, 0)), XEXP (x
, 0),
5872 context
, code
, SCRATCH
, &XEXP (x
, 0), opnum
,
5873 type
, ind_levels
, insn
);
5874 push_reload (x
, NULL_RTX
, loc
, (rtx
*) 0,
5876 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
5880 /* This is probably the result of a substitution, by eliminate_regs, of
5881 an equivalent address for a pseudo that was not allocated to a hard
5882 register. Verify that the specified address is valid and reload it
5885 Since we know we are going to reload this item, don't decrement for
5886 the indirection level.
5888 Note that this is actually conservative: it would be slightly more
5889 efficient to use the value of SPILL_INDIRECT_LEVELS from
5892 find_reloads_address (GET_MODE (x
), loc
, XEXP (x
, 0), &XEXP (x
, 0),
5893 opnum
, ADDR_TYPE (type
), ind_levels
, insn
);
5894 push_reload (*loc
, NULL_RTX
, loc
, (rtx
*) 0,
5896 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
5901 int regno
= REGNO (x
);
5903 if (reg_equiv_constant
[regno
] != 0)
5905 find_reloads_address_part (reg_equiv_constant
[regno
], loc
,
5907 GET_MODE (x
), opnum
, type
, ind_levels
);
5911 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5912 that feeds this insn. */
5913 if (reg_equiv_mem
[regno
] != 0)
5915 push_reload (reg_equiv_mem
[regno
], NULL_RTX
, loc
, (rtx
*) 0,
5917 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
5922 if (reg_equiv_memory_loc
[regno
]
5923 && (reg_equiv_address
[regno
] != 0 || num_not_at_initial_offset
))
5925 rtx tem
= make_memloc (x
, regno
);
5926 if (reg_equiv_address
[regno
] != 0
5927 || ! rtx_equal_p (tem
, reg_equiv_mem
[regno
]))
5930 find_reloads_address (GET_MODE (x
), &x
, XEXP (x
, 0),
5931 &XEXP (x
, 0), opnum
, ADDR_TYPE (type
),
5933 if (!rtx_equal_p (x
, tem
))
5934 push_reg_equiv_alt_mem (regno
, x
);
5938 if (reg_renumber
[regno
] >= 0)
5939 regno
= reg_renumber
[regno
];
5941 if (regno
>= FIRST_PSEUDO_REGISTER
5942 || !REG_OK_FOR_CONTEXT (context
, regno
, mode
, outer_code
,
5945 push_reload (x
, NULL_RTX
, loc
, (rtx
*) 0,
5947 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
5951 /* If a register appearing in an address is the subject of a CLOBBER
5952 in this insn, reload it into some other register to be safe.
5953 The CLOBBER is supposed to make the register unavailable
5954 from before this insn to after it. */
5955 if (regno_clobbered_p (regno
, this_insn
, GET_MODE (x
), 0))
5957 push_reload (x
, NULL_RTX
, loc
, (rtx
*) 0,
5959 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
5966 if (REG_P (SUBREG_REG (x
)))
5968 /* If this is a SUBREG of a hard register and the resulting register
5969 is of the wrong class, reload the whole SUBREG. This avoids
5970 needless copies if SUBREG_REG is multi-word. */
5971 if (REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
)
5973 int regno ATTRIBUTE_UNUSED
= subreg_regno (x
);
5975 if (!REG_OK_FOR_CONTEXT (context
, regno
, mode
, outer_code
,
5978 push_reload (x
, NULL_RTX
, loc
, (rtx
*) 0,
5980 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
5984 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
5985 is larger than the class size, then reload the whole SUBREG. */
5988 enum reg_class rclass
= context_reg_class
;
5989 if ((unsigned) CLASS_MAX_NREGS (rclass
, GET_MODE (SUBREG_REG (x
)))
5990 > reg_class_size
[rclass
])
5992 x
= find_reloads_subreg_address (x
, 0, opnum
,
5995 push_reload (x
, NULL_RTX
, loc
, (rtx
*) 0, rclass
,
5996 GET_MODE (x
), VOIDmode
, 0, 0, opnum
, type
);
6008 const char *fmt
= GET_RTX_FORMAT (code
);
6011 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6014 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6016 find_reloads_address_1 (mode
, XEXP (x
, i
), context
, code
, SCRATCH
,
6017 &XEXP (x
, i
), opnum
, type
, ind_levels
, insn
);
6021 #undef REG_OK_FOR_CONTEXT
6025 /* X, which is found at *LOC, is a part of an address that needs to be
6026 reloaded into a register of class RCLASS. If X is a constant, or if
6027 X is a PLUS that contains a constant, check that the constant is a
6028 legitimate operand and that we are supposed to be able to load
6029 it into the register.
6031 If not, force the constant into memory and reload the MEM instead.
6033 MODE is the mode to use, in case X is an integer constant.
6035 OPNUM and TYPE describe the purpose of any reloads made.
6037 IND_LEVELS says how many levels of indirect addressing this machine
6041 find_reloads_address_part (rtx x
, rtx
*loc
, enum reg_class rclass
,
6042 enum machine_mode mode
, int opnum
,
6043 enum reload_type type
, int ind_levels
)
6046 && (! LEGITIMATE_CONSTANT_P (x
)
6047 || targetm
.preferred_reload_class (x
, rclass
) == NO_REGS
))
6049 x
= force_const_mem (mode
, x
);
6050 find_reloads_address (mode
, &x
, XEXP (x
, 0), &XEXP (x
, 0),
6051 opnum
, type
, ind_levels
, 0);
6054 else if (GET_CODE (x
) == PLUS
6055 && CONSTANT_P (XEXP (x
, 1))
6056 && (! LEGITIMATE_CONSTANT_P (XEXP (x
, 1))
6057 || targetm
.preferred_reload_class (XEXP (x
, 1), rclass
)
6062 tem
= force_const_mem (GET_MODE (x
), XEXP (x
, 1));
6063 x
= gen_rtx_PLUS (GET_MODE (x
), XEXP (x
, 0), tem
);
6064 find_reloads_address (mode
, &XEXP (x
, 1), XEXP (tem
, 0), &XEXP (tem
, 0),
6065 opnum
, type
, ind_levels
, 0);
6068 push_reload (x
, NULL_RTX
, loc
, (rtx
*) 0, rclass
,
6069 mode
, VOIDmode
, 0, 0, opnum
, type
);
6072 /* X, a subreg of a pseudo, is a part of an address that needs to be
6075 If the pseudo is equivalent to a memory location that cannot be directly
6076 addressed, make the necessary address reloads.
6078 If address reloads have been necessary, or if the address is changed
6079 by register elimination, return the rtx of the memory location;
6080 otherwise, return X.
6082 If FORCE_REPLACE is nonzero, unconditionally replace the subreg with the
6085 OPNUM and TYPE identify the purpose of the reload.
6087 IND_LEVELS says how many levels of indirect addressing are
6088 supported at this point in the address.
6090 INSN, if nonzero, is the insn in which we do the reload. It is used
6091 to determine where to put USEs for pseudos that we have to replace with
6095 find_reloads_subreg_address (rtx x
, int force_replace
, int opnum
,
6096 enum reload_type type
, int ind_levels
, rtx insn
)
6098 int regno
= REGNO (SUBREG_REG (x
));
6100 if (reg_equiv_memory_loc
[regno
])
6102 /* If the address is not directly addressable, or if the address is not
6103 offsettable, then it must be replaced. */
6105 && (reg_equiv_address
[regno
]
6106 || ! offsettable_memref_p (reg_equiv_mem
[regno
])))
6109 if (force_replace
|| num_not_at_initial_offset
)
6111 rtx tem
= make_memloc (SUBREG_REG (x
), regno
);
6113 /* If the address changes because of register elimination, then
6114 it must be replaced. */
6116 || ! rtx_equal_p (tem
, reg_equiv_mem
[regno
]))
6118 unsigned outer_size
= GET_MODE_SIZE (GET_MODE (x
));
6119 unsigned inner_size
= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)));
6124 /* For big-endian paradoxical subregs, SUBREG_BYTE does not
6125 hold the correct (negative) byte offset. */
6126 if (BYTES_BIG_ENDIAN
&& outer_size
> inner_size
)
6127 offset
= inner_size
- outer_size
;
6129 offset
= SUBREG_BYTE (x
);
6131 XEXP (tem
, 0) = plus_constant (XEXP (tem
, 0), offset
);
6132 PUT_MODE (tem
, GET_MODE (x
));
6133 if (MEM_OFFSET (tem
))
6134 set_mem_offset (tem
, plus_constant (MEM_OFFSET (tem
), offset
));
6136 && INTVAL (MEM_SIZE (tem
)) != (HOST_WIDE_INT
) outer_size
)
6137 set_mem_size (tem
, GEN_INT (outer_size
));
6139 /* If this was a paradoxical subreg that we replaced, the
6140 resulting memory must be sufficiently aligned to allow
6141 us to widen the mode of the memory. */
6142 if (outer_size
> inner_size
)
6146 base
= XEXP (tem
, 0);
6147 if (GET_CODE (base
) == PLUS
)
6149 if (CONST_INT_P (XEXP (base
, 1))
6150 && INTVAL (XEXP (base
, 1)) % outer_size
!= 0)
6152 base
= XEXP (base
, 0);
6155 || (REGNO_POINTER_ALIGN (REGNO (base
))
6156 < outer_size
* BITS_PER_UNIT
))
6160 reloaded
= find_reloads_address (GET_MODE (tem
), &tem
,
6161 XEXP (tem
, 0), &XEXP (tem
, 0),
6162 opnum
, type
, ind_levels
, insn
);
6163 /* ??? Do we need to handle nonzero offsets somehow? */
6164 if (!offset
&& !rtx_equal_p (tem
, orig
))
6165 push_reg_equiv_alt_mem (regno
, tem
);
6167 /* For some processors an address may be valid in the
6168 original mode but not in a smaller mode. For
6169 example, ARM accepts a scaled index register in
6170 SImode but not in HImode. Note that this is only
6171 a problem if the address in reg_equiv_mem is already
6172 invalid in the new mode; other cases would be fixed
6173 by find_reloads_address as usual.
6175 ??? We attempt to handle such cases here by doing an
6176 additional reload of the full address after the
6177 usual processing by find_reloads_address. Note that
6178 this may not work in the general case, but it seems
6179 to cover the cases where this situation currently
6180 occurs. A more general fix might be to reload the
6181 *value* instead of the address, but this would not
6182 be expected by the callers of this routine as-is.
6184 If find_reloads_address already completed replaced
6185 the address, there is nothing further to do. */
6187 && reg_equiv_mem
[regno
] != 0
6188 && !strict_memory_address_addr_space_p
6189 (GET_MODE (x
), XEXP (reg_equiv_mem
[regno
], 0),
6190 MEM_ADDR_SPACE (reg_equiv_mem
[regno
])))
6191 push_reload (XEXP (tem
, 0), NULL_RTX
, &XEXP (tem
, 0), (rtx
*) 0,
6192 base_reg_class (GET_MODE (tem
), MEM
, SCRATCH
),
6193 GET_MODE (XEXP (tem
, 0)), VOIDmode
, 0, 0,
6196 /* If this is not a toplevel operand, find_reloads doesn't see
6197 this substitution. We have to emit a USE of the pseudo so
6198 that delete_output_reload can see it. */
6199 if (replace_reloads
&& recog_data
.operand
[opnum
] != x
)
6200 /* We mark the USE with QImode so that we recognize it
6201 as one that can be safely deleted at the end of
6203 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode
,
6213 /* Substitute into the current INSN the registers into which we have reloaded
6214 the things that need reloading. The array `replacements'
6215 contains the locations of all pointers that must be changed
6216 and says what to replace them with.
6218 Return the rtx that X translates into; usually X, but modified. */
6221 subst_reloads (rtx insn
)
6225 for (i
= 0; i
< n_replacements
; i
++)
6227 struct replacement
*r
= &replacements
[i
];
6228 rtx reloadreg
= rld
[r
->what
].reg_rtx
;
6232 /* This checking takes a very long time on some platforms
6233 causing the gcc.c-torture/compile/limits-fnargs.c test
6234 to time out during testing. See PR 31850.
6236 Internal consistency test. Check that we don't modify
6237 anything in the equivalence arrays. Whenever something from
6238 those arrays needs to be reloaded, it must be unshared before
6239 being substituted into; the equivalence must not be modified.
6240 Otherwise, if the equivalence is used after that, it will
6241 have been modified, and the thing substituted (probably a
6242 register) is likely overwritten and not a usable equivalence. */
6245 for (check_regno
= 0; check_regno
< max_regno
; check_regno
++)
6247 #define CHECK_MODF(ARRAY) \
6248 gcc_assert (!ARRAY[check_regno] \
6249 || !loc_mentioned_in_p (r->where, \
6250 ARRAY[check_regno]))
6252 CHECK_MODF (reg_equiv_constant
);
6253 CHECK_MODF (reg_equiv_memory_loc
);
6254 CHECK_MODF (reg_equiv_address
);
6255 CHECK_MODF (reg_equiv_mem
);
6258 #endif /* DEBUG_RELOAD */
6260 /* If we're replacing a LABEL_REF with a register, there must
6261 already be an indication (to e.g. flow) which label this
6262 register refers to. */
6263 gcc_assert (GET_CODE (*r
->where
) != LABEL_REF
6265 || find_reg_note (insn
,
6267 XEXP (*r
->where
, 0))
6268 || label_is_jump_target_p (XEXP (*r
->where
, 0), insn
));
6270 /* Encapsulate RELOADREG so its machine mode matches what
6271 used to be there. Note that gen_lowpart_common will
6272 do the wrong thing if RELOADREG is multi-word. RELOADREG
6273 will always be a REG here. */
6274 if (GET_MODE (reloadreg
) != r
->mode
&& r
->mode
!= VOIDmode
)
6275 reloadreg
= reload_adjust_reg_for_mode (reloadreg
, r
->mode
);
6277 /* If we are putting this into a SUBREG and RELOADREG is a
6278 SUBREG, we would be making nested SUBREGs, so we have to fix
6279 this up. Note that r->where == &SUBREG_REG (*r->subreg_loc). */
6281 if (r
->subreg_loc
!= 0 && GET_CODE (reloadreg
) == SUBREG
)
6283 if (GET_MODE (*r
->subreg_loc
)
6284 == GET_MODE (SUBREG_REG (reloadreg
)))
6285 *r
->subreg_loc
= SUBREG_REG (reloadreg
);
6289 SUBREG_BYTE (*r
->subreg_loc
) + SUBREG_BYTE (reloadreg
);
6291 /* When working with SUBREGs the rule is that the byte
6292 offset must be a multiple of the SUBREG's mode. */
6293 final_offset
= (final_offset
/
6294 GET_MODE_SIZE (GET_MODE (*r
->subreg_loc
)));
6295 final_offset
= (final_offset
*
6296 GET_MODE_SIZE (GET_MODE (*r
->subreg_loc
)));
6298 *r
->where
= SUBREG_REG (reloadreg
);
6299 SUBREG_BYTE (*r
->subreg_loc
) = final_offset
;
6303 *r
->where
= reloadreg
;
6305 /* If reload got no reg and isn't optional, something's wrong. */
6307 gcc_assert (rld
[r
->what
].optional
);
6311 /* Make a copy of any replacements being done into X and move those
6312 copies to locations in Y, a copy of X. */
6315 copy_replacements (rtx x
, rtx y
)
6317 /* We can't support X being a SUBREG because we might then need to know its
6318 location if something inside it was replaced. */
6319 gcc_assert (GET_CODE (x
) != SUBREG
);
6321 copy_replacements_1 (&x
, &y
, n_replacements
);
6325 copy_replacements_1 (rtx
*px
, rtx
*py
, int orig_replacements
)
6329 struct replacement
*r
;
6333 for (j
= 0; j
< orig_replacements
; j
++)
6335 if (replacements
[j
].subreg_loc
== px
)
6337 r
= &replacements
[n_replacements
++];
6338 r
->where
= replacements
[j
].where
;
6340 r
->what
= replacements
[j
].what
;
6341 r
->mode
= replacements
[j
].mode
;
6343 else if (replacements
[j
].where
== px
)
6345 r
= &replacements
[n_replacements
++];
6348 r
->what
= replacements
[j
].what
;
6349 r
->mode
= replacements
[j
].mode
;
6355 code
= GET_CODE (x
);
6356 fmt
= GET_RTX_FORMAT (code
);
6358 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6361 copy_replacements_1 (&XEXP (x
, i
), &XEXP (y
, i
), orig_replacements
);
6362 else if (fmt
[i
] == 'E')
6363 for (j
= XVECLEN (x
, i
); --j
>= 0; )
6364 copy_replacements_1 (&XVECEXP (x
, i
, j
), &XVECEXP (y
, i
, j
),
6369 /* Change any replacements being done to *X to be done to *Y. */
6372 move_replacements (rtx
*x
, rtx
*y
)
6376 for (i
= 0; i
< n_replacements
; i
++)
6377 if (replacements
[i
].subreg_loc
== x
)
6378 replacements
[i
].subreg_loc
= y
;
6379 else if (replacements
[i
].where
== x
)
6381 replacements
[i
].where
= y
;
6382 replacements
[i
].subreg_loc
= 0;
6386 /* If LOC was scheduled to be replaced by something, return the replacement.
6387 Otherwise, return *LOC. */
6390 find_replacement (rtx
*loc
)
6392 struct replacement
*r
;
6394 for (r
= &replacements
[0]; r
< &replacements
[n_replacements
]; r
++)
6396 rtx reloadreg
= rld
[r
->what
].reg_rtx
;
6398 if (reloadreg
&& r
->where
== loc
)
6400 if (r
->mode
!= VOIDmode
&& GET_MODE (reloadreg
) != r
->mode
)
6401 reloadreg
= gen_rtx_REG (r
->mode
, REGNO (reloadreg
));
6405 else if (reloadreg
&& r
->subreg_loc
== loc
)
6407 /* RELOADREG must be either a REG or a SUBREG.
6409 ??? Is it actually still ever a SUBREG? If so, why? */
6411 if (REG_P (reloadreg
))
6412 return gen_rtx_REG (GET_MODE (*loc
),
6413 (REGNO (reloadreg
) +
6414 subreg_regno_offset (REGNO (SUBREG_REG (*loc
)),
6415 GET_MODE (SUBREG_REG (*loc
)),
6418 else if (GET_MODE (reloadreg
) == GET_MODE (*loc
))
6422 int final_offset
= SUBREG_BYTE (reloadreg
) + SUBREG_BYTE (*loc
);
6424 /* When working with SUBREGs the rule is that the byte
6425 offset must be a multiple of the SUBREG's mode. */
6426 final_offset
= (final_offset
/ GET_MODE_SIZE (GET_MODE (*loc
)));
6427 final_offset
= (final_offset
* GET_MODE_SIZE (GET_MODE (*loc
)));
6428 return gen_rtx_SUBREG (GET_MODE (*loc
), SUBREG_REG (reloadreg
),
6434 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6435 what's inside and make a new rtl if so. */
6436 if (GET_CODE (*loc
) == PLUS
|| GET_CODE (*loc
) == MINUS
6437 || GET_CODE (*loc
) == MULT
)
6439 rtx x
= find_replacement (&XEXP (*loc
, 0));
6440 rtx y
= find_replacement (&XEXP (*loc
, 1));
6442 if (x
!= XEXP (*loc
, 0) || y
!= XEXP (*loc
, 1))
6443 return gen_rtx_fmt_ee (GET_CODE (*loc
), GET_MODE (*loc
), x
, y
);
6449 /* Return nonzero if register in range [REGNO, ENDREGNO)
6450 appears either explicitly or implicitly in X
6451 other than being stored into (except for earlyclobber operands).
6453 References contained within the substructure at LOC do not count.
6454 LOC may be zero, meaning don't ignore anything.
6456 This is similar to refers_to_regno_p in rtlanal.c except that we
6457 look at equivalences for pseudos that didn't get hard registers. */
6460 refers_to_regno_for_reload_p (unsigned int regno
, unsigned int endregno
,
6472 code
= GET_CODE (x
);
6479 /* If this is a pseudo, a hard register must not have been allocated.
6480 X must therefore either be a constant or be in memory. */
6481 if (r
>= FIRST_PSEUDO_REGISTER
)
6483 if (reg_equiv_memory_loc
[r
])
6484 return refers_to_regno_for_reload_p (regno
, endregno
,
6485 reg_equiv_memory_loc
[r
],
6488 gcc_assert (reg_equiv_constant
[r
] || reg_equiv_invariant
[r
]);
6492 return (endregno
> r
6493 && regno
< r
+ (r
< FIRST_PSEUDO_REGISTER
6494 ? hard_regno_nregs
[r
][GET_MODE (x
)]
6498 /* If this is a SUBREG of a hard reg, we can see exactly which
6499 registers are being modified. Otherwise, handle normally. */
6500 if (REG_P (SUBREG_REG (x
))
6501 && REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
)
6503 unsigned int inner_regno
= subreg_regno (x
);
6504 unsigned int inner_endregno
6505 = inner_regno
+ (inner_regno
< FIRST_PSEUDO_REGISTER
6506 ? subreg_nregs (x
) : 1);
6508 return endregno
> inner_regno
&& regno
< inner_endregno
;
6514 if (&SET_DEST (x
) != loc
6515 /* Note setting a SUBREG counts as referring to the REG it is in for
6516 a pseudo but not for hard registers since we can
6517 treat each word individually. */
6518 && ((GET_CODE (SET_DEST (x
)) == SUBREG
6519 && loc
!= &SUBREG_REG (SET_DEST (x
))
6520 && REG_P (SUBREG_REG (SET_DEST (x
)))
6521 && REGNO (SUBREG_REG (SET_DEST (x
))) >= FIRST_PSEUDO_REGISTER
6522 && refers_to_regno_for_reload_p (regno
, endregno
,
6523 SUBREG_REG (SET_DEST (x
)),
6525 /* If the output is an earlyclobber operand, this is
6527 || ((!REG_P (SET_DEST (x
))
6528 || earlyclobber_operand_p (SET_DEST (x
)))
6529 && refers_to_regno_for_reload_p (regno
, endregno
,
6530 SET_DEST (x
), loc
))))
6533 if (code
== CLOBBER
|| loc
== &SET_SRC (x
))
6542 /* X does not match, so try its subexpressions. */
6544 fmt
= GET_RTX_FORMAT (code
);
6545 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6547 if (fmt
[i
] == 'e' && loc
!= &XEXP (x
, i
))
6555 if (refers_to_regno_for_reload_p (regno
, endregno
,
6559 else if (fmt
[i
] == 'E')
6562 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
6563 if (loc
!= &XVECEXP (x
, i
, j
)
6564 && refers_to_regno_for_reload_p (regno
, endregno
,
6565 XVECEXP (x
, i
, j
), loc
))
6572 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6573 we check if any register number in X conflicts with the relevant register
6574 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6575 contains a MEM (we don't bother checking for memory addresses that can't
6576 conflict because we expect this to be a rare case.
6578 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6579 that we look at equivalences for pseudos that didn't get hard registers. */
6582 reg_overlap_mentioned_for_reload_p (rtx x
, rtx in
)
6584 int regno
, endregno
;
6586 /* Overly conservative. */
6587 if (GET_CODE (x
) == STRICT_LOW_PART
6588 || GET_RTX_CLASS (GET_CODE (x
)) == RTX_AUTOINC
)
6591 /* If either argument is a constant, then modifying X can not affect IN. */
6592 if (CONSTANT_P (x
) || CONSTANT_P (in
))
6594 else if (GET_CODE (x
) == SUBREG
&& MEM_P (SUBREG_REG (x
)))
6595 return refers_to_mem_for_reload_p (in
);
6596 else if (GET_CODE (x
) == SUBREG
)
6598 regno
= REGNO (SUBREG_REG (x
));
6599 if (regno
< FIRST_PSEUDO_REGISTER
)
6600 regno
+= subreg_regno_offset (REGNO (SUBREG_REG (x
)),
6601 GET_MODE (SUBREG_REG (x
)),
6604 endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
6605 ? subreg_nregs (x
) : 1);
6607 return refers_to_regno_for_reload_p (regno
, endregno
, in
, (rtx
*) 0);
6613 /* If this is a pseudo, it must not have been assigned a hard register.
6614 Therefore, it must either be in memory or be a constant. */
6616 if (regno
>= FIRST_PSEUDO_REGISTER
)
6618 if (reg_equiv_memory_loc
[regno
])
6619 return refers_to_mem_for_reload_p (in
);
6620 gcc_assert (reg_equiv_constant
[regno
]);
6624 endregno
= END_HARD_REGNO (x
);
6626 return refers_to_regno_for_reload_p (regno
, endregno
, in
, (rtx
*) 0);
6629 return refers_to_mem_for_reload_p (in
);
6630 else if (GET_CODE (x
) == SCRATCH
|| GET_CODE (x
) == PC
6631 || GET_CODE (x
) == CC0
)
6632 return reg_mentioned_p (x
, in
);
6635 gcc_assert (GET_CODE (x
) == PLUS
);
6637 /* We actually want to know if X is mentioned somewhere inside IN.
6638 We must not say that (plus (sp) (const_int 124)) is in
6639 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6640 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6641 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6646 else if (GET_CODE (in
) == PLUS
)
6647 return (rtx_equal_p (x
, in
)
6648 || reg_overlap_mentioned_for_reload_p (x
, XEXP (in
, 0))
6649 || reg_overlap_mentioned_for_reload_p (x
, XEXP (in
, 1)));
6650 else return (reg_overlap_mentioned_for_reload_p (XEXP (x
, 0), in
)
6651 || reg_overlap_mentioned_for_reload_p (XEXP (x
, 1), in
));
6657 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6661 refers_to_mem_for_reload_p (rtx x
)
6670 return (REGNO (x
) >= FIRST_PSEUDO_REGISTER
6671 && reg_equiv_memory_loc
[REGNO (x
)]);
6673 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
6674 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
6676 && (MEM_P (XEXP (x
, i
))
6677 || refers_to_mem_for_reload_p (XEXP (x
, i
))))
6683 /* Check the insns before INSN to see if there is a suitable register
6684 containing the same value as GOAL.
6685 If OTHER is -1, look for a register in class RCLASS.
6686 Otherwise, just see if register number OTHER shares GOAL's value.
6688 Return an rtx for the register found, or zero if none is found.
6690 If RELOAD_REG_P is (short *)1,
6691 we reject any hard reg that appears in reload_reg_rtx
6692 because such a hard reg is also needed coming into this insn.
6694 If RELOAD_REG_P is any other nonzero value,
6695 it is a vector indexed by hard reg number
6696 and we reject any hard reg whose element in the vector is nonnegative
6697 as well as any that appears in reload_reg_rtx.
6699 If GOAL is zero, then GOALREG is a register number; we look
6700 for an equivalent for that register.
6702 MODE is the machine mode of the value we want an equivalence for.
6703 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6705 This function is used by jump.c as well as in the reload pass.
6707 If GOAL is the sum of the stack pointer and a constant, we treat it
6708 as if it were a constant except that sp is required to be unchanging. */
6711 find_equiv_reg (rtx goal
, rtx insn
, enum reg_class rclass
, int other
,
6712 short *reload_reg_p
, int goalreg
, enum machine_mode mode
)
6715 rtx goaltry
, valtry
, value
, where
;
6721 int goal_mem_addr_varies
= 0;
6722 int need_stable_sp
= 0;
6729 else if (REG_P (goal
))
6730 regno
= REGNO (goal
);
6731 else if (MEM_P (goal
))
6733 enum rtx_code code
= GET_CODE (XEXP (goal
, 0));
6734 if (MEM_VOLATILE_P (goal
))
6736 if (flag_float_store
&& SCALAR_FLOAT_MODE_P (GET_MODE (goal
)))
6738 /* An address with side effects must be reexecuted. */
6753 else if (CONSTANT_P (goal
))
6755 else if (GET_CODE (goal
) == PLUS
6756 && XEXP (goal
, 0) == stack_pointer_rtx
6757 && CONSTANT_P (XEXP (goal
, 1)))
6758 goal_const
= need_stable_sp
= 1;
6759 else if (GET_CODE (goal
) == PLUS
6760 && XEXP (goal
, 0) == frame_pointer_rtx
6761 && CONSTANT_P (XEXP (goal
, 1)))
6767 /* Scan insns back from INSN, looking for one that copies
6768 a value into or out of GOAL.
6769 Stop and give up if we reach a label. */
6774 if (p
&& DEBUG_INSN_P (p
))
6777 if (p
== 0 || LABEL_P (p
)
6778 || num
> PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS
))
6781 if (NONJUMP_INSN_P (p
)
6782 /* If we don't want spill regs ... */
6783 && (! (reload_reg_p
!= 0
6784 && reload_reg_p
!= (short *) (HOST_WIDE_INT
) 1)
6785 /* ... then ignore insns introduced by reload; they aren't
6786 useful and can cause results in reload_as_needed to be
6787 different from what they were when calculating the need for
6788 spills. If we notice an input-reload insn here, we will
6789 reject it below, but it might hide a usable equivalent.
6790 That makes bad code. It may even fail: perhaps no reg was
6791 spilled for this insn because it was assumed we would find
6793 || INSN_UID (p
) < reload_first_uid
))
6796 pat
= single_set (p
);
6798 /* First check for something that sets some reg equal to GOAL. */
6801 && true_regnum (SET_SRC (pat
)) == regno
6802 && (valueno
= true_regnum (valtry
= SET_DEST (pat
))) >= 0)
6805 && true_regnum (SET_DEST (pat
)) == regno
6806 && (valueno
= true_regnum (valtry
= SET_SRC (pat
))) >= 0)
6808 (goal_const
&& rtx_equal_p (SET_SRC (pat
), goal
)
6809 /* When looking for stack pointer + const,
6810 make sure we don't use a stack adjust. */
6811 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat
), goal
)
6812 && (valueno
= true_regnum (valtry
= SET_DEST (pat
))) >= 0)
6814 && (valueno
= true_regnum (valtry
= SET_DEST (pat
))) >= 0
6815 && rtx_renumbered_equal_p (goal
, SET_SRC (pat
)))
6817 && (valueno
= true_regnum (valtry
= SET_SRC (pat
))) >= 0
6818 && rtx_renumbered_equal_p (goal
, SET_DEST (pat
)))
6819 /* If we are looking for a constant,
6820 and something equivalent to that constant was copied
6821 into a reg, we can use that reg. */
6822 || (goal_const
&& REG_NOTES (p
) != 0
6823 && (tem
= find_reg_note (p
, REG_EQUIV
, NULL_RTX
))
6824 && ((rtx_equal_p (XEXP (tem
, 0), goal
)
6826 = true_regnum (valtry
= SET_DEST (pat
))) >= 0)
6827 || (REG_P (SET_DEST (pat
))
6828 && GET_CODE (XEXP (tem
, 0)) == CONST_DOUBLE
6829 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem
, 0)))
6830 && CONST_INT_P (goal
)
6832 = operand_subword (XEXP (tem
, 0), 0, 0,
6834 && rtx_equal_p (goal
, goaltry
)
6836 = operand_subword (SET_DEST (pat
), 0, 0,
6838 && (valueno
= true_regnum (valtry
)) >= 0)))
6839 || (goal_const
&& (tem
= find_reg_note (p
, REG_EQUIV
,
6841 && REG_P (SET_DEST (pat
))
6842 && GET_CODE (XEXP (tem
, 0)) == CONST_DOUBLE
6843 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem
, 0)))
6844 && CONST_INT_P (goal
)
6845 && 0 != (goaltry
= operand_subword (XEXP (tem
, 0), 1, 0,
6847 && rtx_equal_p (goal
, goaltry
)
6849 = operand_subword (SET_DEST (pat
), 1, 0, VOIDmode
))
6850 && (valueno
= true_regnum (valtry
)) >= 0)))
6854 if (valueno
!= other
)
6857 else if ((unsigned) valueno
>= FIRST_PSEUDO_REGISTER
)
6859 else if (!in_hard_reg_set_p (reg_class_contents
[(int) rclass
],
6869 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6870 (or copying VALUE into GOAL, if GOAL is also a register).
6871 Now verify that VALUE is really valid. */
6873 /* VALUENO is the register number of VALUE; a hard register. */
6875 /* Don't try to re-use something that is killed in this insn. We want
6876 to be able to trust REG_UNUSED notes. */
6877 if (REG_NOTES (where
) != 0 && find_reg_note (where
, REG_UNUSED
, value
))
6880 /* If we propose to get the value from the stack pointer or if GOAL is
6881 a MEM based on the stack pointer, we need a stable SP. */
6882 if (valueno
== STACK_POINTER_REGNUM
|| regno
== STACK_POINTER_REGNUM
6883 || (goal_mem
&& reg_overlap_mentioned_for_reload_p (stack_pointer_rtx
,
6887 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6888 if (GET_MODE (value
) != mode
)
6891 /* Reject VALUE if it was loaded from GOAL
6892 and is also a register that appears in the address of GOAL. */
6894 if (goal_mem
&& value
== SET_DEST (single_set (where
))
6895 && refers_to_regno_for_reload_p (valueno
, end_hard_regno (mode
, valueno
),
6899 /* Reject registers that overlap GOAL. */
6901 if (regno
>= 0 && regno
< FIRST_PSEUDO_REGISTER
)
6902 nregs
= hard_regno_nregs
[regno
][mode
];
6905 valuenregs
= hard_regno_nregs
[valueno
][mode
];
6907 if (!goal_mem
&& !goal_const
6908 && regno
+ nregs
> valueno
&& regno
< valueno
+ valuenregs
)
6911 /* Reject VALUE if it is one of the regs reserved for reloads.
6912 Reload1 knows how to reuse them anyway, and it would get
6913 confused if we allocated one without its knowledge.
6914 (Now that insns introduced by reload are ignored above,
6915 this case shouldn't happen, but I'm not positive.) */
6917 if (reload_reg_p
!= 0 && reload_reg_p
!= (short *) (HOST_WIDE_INT
) 1)
6920 for (i
= 0; i
< valuenregs
; ++i
)
6921 if (reload_reg_p
[valueno
+ i
] >= 0)
6925 /* Reject VALUE if it is a register being used for an input reload
6926 even if it is not one of those reserved. */
6928 if (reload_reg_p
!= 0)
6931 for (i
= 0; i
< n_reloads
; i
++)
6932 if (rld
[i
].reg_rtx
!= 0 && rld
[i
].in
)
6934 int regno1
= REGNO (rld
[i
].reg_rtx
);
6935 int nregs1
= hard_regno_nregs
[regno1
]
6936 [GET_MODE (rld
[i
].reg_rtx
)];
6937 if (regno1
< valueno
+ valuenregs
6938 && regno1
+ nregs1
> valueno
)
6944 /* We must treat frame pointer as varying here,
6945 since it can vary--in a nonlocal goto as generated by expand_goto. */
6946 goal_mem_addr_varies
= !CONSTANT_ADDRESS_P (XEXP (goal
, 0));
6948 /* Now verify that the values of GOAL and VALUE remain unaltered
6949 until INSN is reached. */
6958 /* Don't trust the conversion past a function call
6959 if either of the two is in a call-clobbered register, or memory. */
6964 if (goal_mem
|| need_stable_sp
)
6967 if (regno
>= 0 && regno
< FIRST_PSEUDO_REGISTER
)
6968 for (i
= 0; i
< nregs
; ++i
)
6969 if (call_used_regs
[regno
+ i
]
6970 || HARD_REGNO_CALL_PART_CLOBBERED (regno
+ i
, mode
))
6973 if (valueno
>= 0 && valueno
< FIRST_PSEUDO_REGISTER
)
6974 for (i
= 0; i
< valuenregs
; ++i
)
6975 if (call_used_regs
[valueno
+ i
]
6976 || HARD_REGNO_CALL_PART_CLOBBERED (valueno
+ i
, mode
))
6984 /* Watch out for unspec_volatile, and volatile asms. */
6985 if (volatile_insn_p (pat
))
6988 /* If this insn P stores in either GOAL or VALUE, return 0.
6989 If GOAL is a memory ref and this insn writes memory, return 0.
6990 If GOAL is a memory ref and its address is not constant,
6991 and this insn P changes a register used in GOAL, return 0. */
6993 if (GET_CODE (pat
) == COND_EXEC
)
6994 pat
= COND_EXEC_CODE (pat
);
6995 if (GET_CODE (pat
) == SET
|| GET_CODE (pat
) == CLOBBER
)
6997 rtx dest
= SET_DEST (pat
);
6998 while (GET_CODE (dest
) == SUBREG
6999 || GET_CODE (dest
) == ZERO_EXTRACT
7000 || GET_CODE (dest
) == STRICT_LOW_PART
)
7001 dest
= XEXP (dest
, 0);
7004 int xregno
= REGNO (dest
);
7006 if (REGNO (dest
) < FIRST_PSEUDO_REGISTER
)
7007 xnregs
= hard_regno_nregs
[xregno
][GET_MODE (dest
)];
7010 if (xregno
< regno
+ nregs
&& xregno
+ xnregs
> regno
)
7012 if (xregno
< valueno
+ valuenregs
7013 && xregno
+ xnregs
> valueno
)
7015 if (goal_mem_addr_varies
7016 && reg_overlap_mentioned_for_reload_p (dest
, goal
))
7018 if (xregno
== STACK_POINTER_REGNUM
&& need_stable_sp
)
7021 else if (goal_mem
&& MEM_P (dest
)
7022 && ! push_operand (dest
, GET_MODE (dest
)))
7024 else if (MEM_P (dest
) && regno
>= FIRST_PSEUDO_REGISTER
7025 && reg_equiv_memory_loc
[regno
] != 0)
7027 else if (need_stable_sp
&& push_operand (dest
, GET_MODE (dest
)))
7030 else if (GET_CODE (pat
) == PARALLEL
)
7033 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; i
--)
7035 rtx v1
= XVECEXP (pat
, 0, i
);
7036 if (GET_CODE (v1
) == COND_EXEC
)
7037 v1
= COND_EXEC_CODE (v1
);
7038 if (GET_CODE (v1
) == SET
|| GET_CODE (v1
) == CLOBBER
)
7040 rtx dest
= SET_DEST (v1
);
7041 while (GET_CODE (dest
) == SUBREG
7042 || GET_CODE (dest
) == ZERO_EXTRACT
7043 || GET_CODE (dest
) == STRICT_LOW_PART
)
7044 dest
= XEXP (dest
, 0);
7047 int xregno
= REGNO (dest
);
7049 if (REGNO (dest
) < FIRST_PSEUDO_REGISTER
)
7050 xnregs
= hard_regno_nregs
[xregno
][GET_MODE (dest
)];
7053 if (xregno
< regno
+ nregs
7054 && xregno
+ xnregs
> regno
)
7056 if (xregno
< valueno
+ valuenregs
7057 && xregno
+ xnregs
> valueno
)
7059 if (goal_mem_addr_varies
7060 && reg_overlap_mentioned_for_reload_p (dest
,
7063 if (xregno
== STACK_POINTER_REGNUM
&& need_stable_sp
)
7066 else if (goal_mem
&& MEM_P (dest
)
7067 && ! push_operand (dest
, GET_MODE (dest
)))
7069 else if (MEM_P (dest
) && regno
>= FIRST_PSEUDO_REGISTER
7070 && reg_equiv_memory_loc
[regno
] != 0)
7072 else if (need_stable_sp
7073 && push_operand (dest
, GET_MODE (dest
)))
7079 if (CALL_P (p
) && CALL_INSN_FUNCTION_USAGE (p
))
7083 for (link
= CALL_INSN_FUNCTION_USAGE (p
); XEXP (link
, 1) != 0;
7084 link
= XEXP (link
, 1))
7086 pat
= XEXP (link
, 0);
7087 if (GET_CODE (pat
) == CLOBBER
)
7089 rtx dest
= SET_DEST (pat
);
7093 int xregno
= REGNO (dest
);
7095 = hard_regno_nregs
[xregno
][GET_MODE (dest
)];
7097 if (xregno
< regno
+ nregs
7098 && xregno
+ xnregs
> regno
)
7100 else if (xregno
< valueno
+ valuenregs
7101 && xregno
+ xnregs
> valueno
)
7103 else if (goal_mem_addr_varies
7104 && reg_overlap_mentioned_for_reload_p (dest
,
7109 else if (goal_mem
&& MEM_P (dest
)
7110 && ! push_operand (dest
, GET_MODE (dest
)))
7112 else if (need_stable_sp
7113 && push_operand (dest
, GET_MODE (dest
)))
7120 /* If this insn auto-increments or auto-decrements
7121 either regno or valueno, return 0 now.
7122 If GOAL is a memory ref and its address is not constant,
7123 and this insn P increments a register used in GOAL, return 0. */
7127 for (link
= REG_NOTES (p
); link
; link
= XEXP (link
, 1))
7128 if (REG_NOTE_KIND (link
) == REG_INC
7129 && REG_P (XEXP (link
, 0)))
7131 int incno
= REGNO (XEXP (link
, 0));
7132 if (incno
< regno
+ nregs
&& incno
>= regno
)
7134 if (incno
< valueno
+ valuenregs
&& incno
>= valueno
)
7136 if (goal_mem_addr_varies
7137 && reg_overlap_mentioned_for_reload_p (XEXP (link
, 0),
7147 /* Find a place where INCED appears in an increment or decrement operator
7148 within X, and return the amount INCED is incremented or decremented by.
7149 The value is always positive. */
7152 find_inc_amount (rtx x
, rtx inced
)
7154 enum rtx_code code
= GET_CODE (x
);
7160 rtx addr
= XEXP (x
, 0);
7161 if ((GET_CODE (addr
) == PRE_DEC
7162 || GET_CODE (addr
) == POST_DEC
7163 || GET_CODE (addr
) == PRE_INC
7164 || GET_CODE (addr
) == POST_INC
)
7165 && XEXP (addr
, 0) == inced
)
7166 return GET_MODE_SIZE (GET_MODE (x
));
7167 else if ((GET_CODE (addr
) == PRE_MODIFY
7168 || GET_CODE (addr
) == POST_MODIFY
)
7169 && GET_CODE (XEXP (addr
, 1)) == PLUS
7170 && XEXP (addr
, 0) == XEXP (XEXP (addr
, 1), 0)
7171 && XEXP (addr
, 0) == inced
7172 && CONST_INT_P (XEXP (XEXP (addr
, 1), 1)))
7174 i
= INTVAL (XEXP (XEXP (addr
, 1), 1));
7175 return i
< 0 ? -i
: i
;
7179 fmt
= GET_RTX_FORMAT (code
);
7180 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
7184 int tem
= find_inc_amount (XEXP (x
, i
), inced
);
7191 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
7193 int tem
= find_inc_amount (XVECEXP (x
, i
, j
), inced
);
7203 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7204 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7208 reg_inc_found_and_valid_p (unsigned int regno
, unsigned int endregno
,
7215 if (! INSN_P (insn
))
7218 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
7219 if (REG_NOTE_KIND (link
) == REG_INC
)
7221 unsigned int test
= (int) REGNO (XEXP (link
, 0));
7222 if (test
>= regno
&& test
< endregno
)
7229 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7233 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7234 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7235 REG_INC. REGNO must refer to a hard register. */
7238 regno_clobbered_p (unsigned int regno
, rtx insn
, enum machine_mode mode
,
7241 unsigned int nregs
, endregno
;
7243 /* regno must be a hard register. */
7244 gcc_assert (regno
< FIRST_PSEUDO_REGISTER
);
7246 nregs
= hard_regno_nregs
[regno
][mode
];
7247 endregno
= regno
+ nregs
;
7249 if ((GET_CODE (PATTERN (insn
)) == CLOBBER
7250 || (sets
== 1 && GET_CODE (PATTERN (insn
)) == SET
))
7251 && REG_P (XEXP (PATTERN (insn
), 0)))
7253 unsigned int test
= REGNO (XEXP (PATTERN (insn
), 0));
7255 return test
>= regno
&& test
< endregno
;
7258 if (sets
== 2 && reg_inc_found_and_valid_p (regno
, endregno
, insn
))
7261 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
7263 int i
= XVECLEN (PATTERN (insn
), 0) - 1;
7267 rtx elt
= XVECEXP (PATTERN (insn
), 0, i
);
7268 if ((GET_CODE (elt
) == CLOBBER
7269 || (sets
== 1 && GET_CODE (PATTERN (insn
)) == SET
))
7270 && REG_P (XEXP (elt
, 0)))
7272 unsigned int test
= REGNO (XEXP (elt
, 0));
7274 if (test
>= regno
&& test
< endregno
)
7278 && reg_inc_found_and_valid_p (regno
, endregno
, elt
))
7286 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7288 reload_adjust_reg_for_mode (rtx reloadreg
, enum machine_mode mode
)
7292 if (GET_MODE (reloadreg
) == mode
)
7295 regno
= REGNO (reloadreg
);
7297 if (WORDS_BIG_ENDIAN
)
7298 regno
+= (int) hard_regno_nregs
[regno
][GET_MODE (reloadreg
)]
7299 - (int) hard_regno_nregs
[regno
][mode
];
7301 return gen_rtx_REG (mode
, regno
);
7304 static const char *const reload_when_needed_name
[] =
7307 "RELOAD_FOR_OUTPUT",
7309 "RELOAD_FOR_INPUT_ADDRESS",
7310 "RELOAD_FOR_INPADDR_ADDRESS",
7311 "RELOAD_FOR_OUTPUT_ADDRESS",
7312 "RELOAD_FOR_OUTADDR_ADDRESS",
7313 "RELOAD_FOR_OPERAND_ADDRESS",
7314 "RELOAD_FOR_OPADDR_ADDR",
7316 "RELOAD_FOR_OTHER_ADDRESS"
7319 /* These functions are used to print the variables set by 'find_reloads' */
7322 debug_reload_to_stream (FILE *f
)
7329 for (r
= 0; r
< n_reloads
; r
++)
7331 fprintf (f
, "Reload %d: ", r
);
7335 fprintf (f
, "reload_in (%s) = ",
7336 GET_MODE_NAME (rld
[r
].inmode
));
7337 print_inline_rtx (f
, rld
[r
].in
, 24);
7338 fprintf (f
, "\n\t");
7341 if (rld
[r
].out
!= 0)
7343 fprintf (f
, "reload_out (%s) = ",
7344 GET_MODE_NAME (rld
[r
].outmode
));
7345 print_inline_rtx (f
, rld
[r
].out
, 24);
7346 fprintf (f
, "\n\t");
7349 fprintf (f
, "%s, ", reg_class_names
[(int) rld
[r
].rclass
]);
7351 fprintf (f
, "%s (opnum = %d)",
7352 reload_when_needed_name
[(int) rld
[r
].when_needed
],
7355 if (rld
[r
].optional
)
7356 fprintf (f
, ", optional");
7358 if (rld
[r
].nongroup
)
7359 fprintf (f
, ", nongroup");
7361 if (rld
[r
].inc
!= 0)
7362 fprintf (f
, ", inc by %d", rld
[r
].inc
);
7364 if (rld
[r
].nocombine
)
7365 fprintf (f
, ", can't combine");
7367 if (rld
[r
].secondary_p
)
7368 fprintf (f
, ", secondary_reload_p");
7370 if (rld
[r
].in_reg
!= 0)
7372 fprintf (f
, "\n\treload_in_reg: ");
7373 print_inline_rtx (f
, rld
[r
].in_reg
, 24);
7376 if (rld
[r
].out_reg
!= 0)
7378 fprintf (f
, "\n\treload_out_reg: ");
7379 print_inline_rtx (f
, rld
[r
].out_reg
, 24);
7382 if (rld
[r
].reg_rtx
!= 0)
7384 fprintf (f
, "\n\treload_reg_rtx: ");
7385 print_inline_rtx (f
, rld
[r
].reg_rtx
, 24);
7389 if (rld
[r
].secondary_in_reload
!= -1)
7391 fprintf (f
, "%ssecondary_in_reload = %d",
7392 prefix
, rld
[r
].secondary_in_reload
);
7396 if (rld
[r
].secondary_out_reload
!= -1)
7397 fprintf (f
, "%ssecondary_out_reload = %d\n",
7398 prefix
, rld
[r
].secondary_out_reload
);
7401 if (rld
[r
].secondary_in_icode
!= CODE_FOR_nothing
)
7403 fprintf (f
, "%ssecondary_in_icode = %s", prefix
,
7404 insn_data
[rld
[r
].secondary_in_icode
].name
);
7408 if (rld
[r
].secondary_out_icode
!= CODE_FOR_nothing
)
7409 fprintf (f
, "%ssecondary_out_icode = %s", prefix
,
7410 insn_data
[rld
[r
].secondary_out_icode
].name
);
7419 debug_reload_to_stream (stderr
);