* PR target/49903
[official-gcc.git] / gcc / reload.c
blobc671765ba93850ad0103fc4613d4cefe4a48d1a3
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains subroutines used only from the file reload1.c.
23 It knows how to scan one insn for operands and values
24 that need to be copied into registers to make valid code.
25 It also finds other operands and values which are valid
26 but for which equivalent values in registers exist and
27 ought to be used instead.
29 Before processing the first insn of the function, call `init_reload'.
30 init_reload actually has to be called earlier anyway.
32 To scan an insn, call `find_reloads'. This does two things:
33 1. sets up tables describing which values must be reloaded
34 for this insn, and what kind of hard regs they must be reloaded into;
35 2. optionally record the locations where those values appear in
36 the data, so they can be replaced properly later.
37 This is done only if the second arg to `find_reloads' is nonzero.
39 The third arg to `find_reloads' specifies the number of levels
40 of indirect addressing supported by the machine. If it is zero,
41 indirect addressing is not valid. If it is one, (MEM (REG n))
42 is valid even if (REG n) did not get a hard register; if it is two,
43 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
44 hard register, and similarly for higher values.
46 Then you must choose the hard regs to reload those pseudo regs into,
47 and generate appropriate load insns before this insn and perhaps
48 also store insns after this insn. Set up the array `reload_reg_rtx'
49 to contain the REG rtx's for the registers you used. In some
50 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
51 for certain reloads. Then that tells you which register to use,
52 so you do not need to allocate one. But you still do need to add extra
53 instructions to copy the value into and out of that register.
55 Finally you must call `subst_reloads' to substitute the reload reg rtx's
56 into the locations already recorded.
58 NOTE SIDE EFFECTS:
60 find_reloads can alter the operands of the instruction it is called on.
62 1. Two operands of any sort may be interchanged, if they are in a
63 commutative instruction.
64 This happens only if find_reloads thinks the instruction will compile
65 better that way.
67 2. Pseudo-registers that are equivalent to constants are replaced
68 with those constants if they are not in hard registers.
70 1 happens every time find_reloads is called.
71 2 happens only when REPLACE is 1, which is only when
72 actually doing the reloads, not when just counting them.
74 Using a reload register for several reloads in one insn:
76 When an insn has reloads, it is considered as having three parts:
77 the input reloads, the insn itself after reloading, and the output reloads.
78 Reloads of values used in memory addresses are often needed for only one part.
80 When this is so, reload_when_needed records which part needs the reload.
81 Two reloads for different parts of the insn can share the same reload
82 register.
84 When a reload is used for addresses in multiple parts, or when it is
85 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
86 a register with any other reload. */
88 #define REG_OK_STRICT
90 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
91 #undef DEBUG_RELOAD
93 #include "config.h"
94 #include "system.h"
95 #include "coretypes.h"
96 #include "tm.h"
97 #include "rtl-error.h"
98 #include "tm_p.h"
99 #include "insn-config.h"
100 #include "expr.h"
101 #include "optabs.h"
102 #include "recog.h"
103 #include "df.h"
104 #include "reload.h"
105 #include "regs.h"
106 #include "addresses.h"
107 #include "hard-reg-set.h"
108 #include "flags.h"
109 #include "output.h"
110 #include "function.h"
111 #include "params.h"
112 #include "target.h"
113 #include "ira.h"
115 /* True if X is a constant that can be forced into the constant pool.
116 MODE is the mode of the operand, or VOIDmode if not known. */
117 #define CONST_POOL_OK_P(MODE, X) \
118 ((MODE) != VOIDmode \
119 && CONSTANT_P (X) \
120 && GET_CODE (X) != HIGH \
121 && !targetm.cannot_force_const_mem (MODE, X))
123 /* True if C is a non-empty register class that has too few registers
124 to be safely used as a reload target class. */
126 static inline bool
127 small_register_class_p (reg_class_t rclass)
129 return (reg_class_size [(int) rclass] == 1
130 || (reg_class_size [(int) rclass] >= 1
131 && targetm.class_likely_spilled_p (rclass)));
135 /* All reloads of the current insn are recorded here. See reload.h for
136 comments. */
137 int n_reloads;
138 struct reload rld[MAX_RELOADS];
140 /* All the "earlyclobber" operands of the current insn
141 are recorded here. */
142 int n_earlyclobbers;
143 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
145 int reload_n_operands;
147 /* Replacing reloads.
149 If `replace_reloads' is nonzero, then as each reload is recorded
150 an entry is made for it in the table `replacements'.
151 Then later `subst_reloads' can look through that table and
152 perform all the replacements needed. */
154 /* Nonzero means record the places to replace. */
155 static int replace_reloads;
157 /* Each replacement is recorded with a structure like this. */
158 struct replacement
160 rtx *where; /* Location to store in */
161 int what; /* which reload this is for */
162 enum machine_mode mode; /* mode it must have */
165 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
167 /* Number of replacements currently recorded. */
168 static int n_replacements;
170 /* Used to track what is modified by an operand. */
171 struct decomposition
173 int reg_flag; /* Nonzero if referencing a register. */
174 int safe; /* Nonzero if this can't conflict with anything. */
175 rtx base; /* Base address for MEM. */
176 HOST_WIDE_INT start; /* Starting offset or register number. */
177 HOST_WIDE_INT end; /* Ending offset or register number. */
180 #ifdef SECONDARY_MEMORY_NEEDED
182 /* Save MEMs needed to copy from one class of registers to another. One MEM
183 is used per mode, but normally only one or two modes are ever used.
185 We keep two versions, before and after register elimination. The one
186 after register elimination is record separately for each operand. This
187 is done in case the address is not valid to be sure that we separately
188 reload each. */
190 static rtx secondary_memlocs[NUM_MACHINE_MODES];
191 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
192 static int secondary_memlocs_elim_used = 0;
193 #endif
195 /* The instruction we are doing reloads for;
196 so we can test whether a register dies in it. */
197 static rtx this_insn;
199 /* Nonzero if this instruction is a user-specified asm with operands. */
200 static int this_insn_is_asm;
202 /* If hard_regs_live_known is nonzero,
203 we can tell which hard regs are currently live,
204 at least enough to succeed in choosing dummy reloads. */
205 static int hard_regs_live_known;
207 /* Indexed by hard reg number,
208 element is nonnegative if hard reg has been spilled.
209 This vector is passed to `find_reloads' as an argument
210 and is not changed here. */
211 static short *static_reload_reg_p;
213 /* Set to 1 in subst_reg_equivs if it changes anything. */
214 static int subst_reg_equivs_changed;
216 /* On return from push_reload, holds the reload-number for the OUT
217 operand, which can be different for that from the input operand. */
218 static int output_reloadnum;
220 /* Compare two RTX's. */
221 #define MATCHES(x, y) \
222 (x == y || (x != 0 && (REG_P (x) \
223 ? REG_P (y) && REGNO (x) == REGNO (y) \
224 : rtx_equal_p (x, y) && ! side_effects_p (x))))
226 /* Indicates if two reloads purposes are for similar enough things that we
227 can merge their reloads. */
228 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
229 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
230 || ((when1) == (when2) && (op1) == (op2)) \
231 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
232 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
233 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
234 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
235 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
237 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
238 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
239 ((when1) != (when2) \
240 || ! ((op1) == (op2) \
241 || (when1) == RELOAD_FOR_INPUT \
242 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
243 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
245 /* If we are going to reload an address, compute the reload type to
246 use. */
247 #define ADDR_TYPE(type) \
248 ((type) == RELOAD_FOR_INPUT_ADDRESS \
249 ? RELOAD_FOR_INPADDR_ADDRESS \
250 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
251 ? RELOAD_FOR_OUTADDR_ADDRESS \
252 : (type)))
254 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
255 enum machine_mode, enum reload_type,
256 enum insn_code *, secondary_reload_info *);
257 static enum reg_class find_valid_class (enum machine_mode, enum machine_mode,
258 int, unsigned int);
259 static int reload_inner_reg_of_subreg (rtx, enum machine_mode, int);
260 static void push_replacement (rtx *, int, enum machine_mode);
261 static void dup_replacements (rtx *, rtx *);
262 static void combine_reloads (void);
263 static int find_reusable_reload (rtx *, rtx, enum reg_class,
264 enum reload_type, int, int);
265 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
266 enum machine_mode, reg_class_t, int, int);
267 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
268 static struct decomposition decompose (rtx);
269 static int immune_p (rtx, rtx, struct decomposition);
270 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
271 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int, rtx,
272 int *);
273 static rtx make_memloc (rtx, int);
274 static int maybe_memory_address_addr_space_p (enum machine_mode, rtx,
275 addr_space_t, rtx *);
276 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
277 int, enum reload_type, int, rtx);
278 static rtx subst_reg_equivs (rtx, rtx);
279 static rtx subst_indexed_address (rtx);
280 static void update_auto_inc_notes (rtx, int, int);
281 static int find_reloads_address_1 (enum machine_mode, rtx, int,
282 enum rtx_code, enum rtx_code, rtx *,
283 int, enum reload_type,int, rtx);
284 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
285 enum machine_mode, int,
286 enum reload_type, int);
287 static rtx find_reloads_subreg_address (rtx, int, int, enum reload_type,
288 int, rtx, int *);
289 static void copy_replacements_1 (rtx *, rtx *, int);
290 static int find_inc_amount (rtx, rtx);
291 static int refers_to_mem_for_reload_p (rtx);
292 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
293 rtx, rtx *);
295 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
296 list yet. */
298 static void
299 push_reg_equiv_alt_mem (int regno, rtx mem)
301 rtx it;
303 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
304 if (rtx_equal_p (XEXP (it, 0), mem))
305 return;
307 reg_equiv_alt_mem_list (regno)
308 = alloc_EXPR_LIST (REG_EQUIV, mem,
309 reg_equiv_alt_mem_list (regno));
312 /* Determine if any secondary reloads are needed for loading (if IN_P is
313 nonzero) or storing (if IN_P is zero) X to or from a reload register of
314 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
315 are needed, push them.
317 Return the reload number of the secondary reload we made, or -1 if
318 we didn't need one. *PICODE is set to the insn_code to use if we do
319 need a secondary reload. */
321 static int
322 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
323 enum reg_class reload_class,
324 enum machine_mode reload_mode, enum reload_type type,
325 enum insn_code *picode, secondary_reload_info *prev_sri)
327 enum reg_class rclass = NO_REGS;
328 enum reg_class scratch_class;
329 enum machine_mode mode = reload_mode;
330 enum insn_code icode = CODE_FOR_nothing;
331 enum insn_code t_icode = CODE_FOR_nothing;
332 enum reload_type secondary_type;
333 int s_reload, t_reload = -1;
334 const char *scratch_constraint;
335 char letter;
336 secondary_reload_info sri;
338 if (type == RELOAD_FOR_INPUT_ADDRESS
339 || type == RELOAD_FOR_OUTPUT_ADDRESS
340 || type == RELOAD_FOR_INPADDR_ADDRESS
341 || type == RELOAD_FOR_OUTADDR_ADDRESS)
342 secondary_type = type;
343 else
344 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
346 *picode = CODE_FOR_nothing;
348 /* If X is a paradoxical SUBREG, use the inner value to determine both the
349 mode and object being reloaded. */
350 if (paradoxical_subreg_p (x))
352 x = SUBREG_REG (x);
353 reload_mode = GET_MODE (x);
356 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
357 is still a pseudo-register by now, it *must* have an equivalent MEM
358 but we don't want to assume that), use that equivalent when seeing if
359 a secondary reload is needed since whether or not a reload is needed
360 might be sensitive to the form of the MEM. */
362 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
363 && reg_equiv_mem (REGNO (x)))
364 x = reg_equiv_mem (REGNO (x));
366 sri.icode = CODE_FOR_nothing;
367 sri.prev_sri = prev_sri;
368 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
369 reload_mode, &sri);
370 icode = (enum insn_code) sri.icode;
372 /* If we don't need any secondary registers, done. */
373 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
374 return -1;
376 if (rclass != NO_REGS)
377 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
378 reload_mode, type, &t_icode, &sri);
380 /* If we will be using an insn, the secondary reload is for a
381 scratch register. */
383 if (icode != CODE_FOR_nothing)
385 /* If IN_P is nonzero, the reload register will be the output in
386 operand 0. If IN_P is zero, the reload register will be the input
387 in operand 1. Outputs should have an initial "=", which we must
388 skip. */
390 /* ??? It would be useful to be able to handle only two, or more than
391 three, operands, but for now we can only handle the case of having
392 exactly three: output, input and one temp/scratch. */
393 gcc_assert (insn_data[(int) icode].n_operands == 3);
395 /* ??? We currently have no way to represent a reload that needs
396 an icode to reload from an intermediate tertiary reload register.
397 We should probably have a new field in struct reload to tag a
398 chain of scratch operand reloads onto. */
399 gcc_assert (rclass == NO_REGS);
401 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
402 gcc_assert (*scratch_constraint == '=');
403 scratch_constraint++;
404 if (*scratch_constraint == '&')
405 scratch_constraint++;
406 letter = *scratch_constraint;
407 scratch_class = (letter == 'r' ? GENERAL_REGS
408 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) letter,
409 scratch_constraint));
411 rclass = scratch_class;
412 mode = insn_data[(int) icode].operand[2].mode;
415 /* This case isn't valid, so fail. Reload is allowed to use the same
416 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
417 in the case of a secondary register, we actually need two different
418 registers for correct code. We fail here to prevent the possibility of
419 silently generating incorrect code later.
421 The convention is that secondary input reloads are valid only if the
422 secondary_class is different from class. If you have such a case, you
423 can not use secondary reloads, you must work around the problem some
424 other way.
426 Allow this when a reload_in/out pattern is being used. I.e. assume
427 that the generated code handles this case. */
429 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
430 || t_icode != CODE_FOR_nothing);
432 /* See if we can reuse an existing secondary reload. */
433 for (s_reload = 0; s_reload < n_reloads; s_reload++)
434 if (rld[s_reload].secondary_p
435 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
436 || reg_class_subset_p (rld[s_reload].rclass, rclass))
437 && ((in_p && rld[s_reload].inmode == mode)
438 || (! in_p && rld[s_reload].outmode == mode))
439 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
440 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
441 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
442 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
443 && (small_register_class_p (rclass)
444 || targetm.small_register_classes_for_mode_p (VOIDmode))
445 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
446 opnum, rld[s_reload].opnum))
448 if (in_p)
449 rld[s_reload].inmode = mode;
450 if (! in_p)
451 rld[s_reload].outmode = mode;
453 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
454 rld[s_reload].rclass = rclass;
456 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
457 rld[s_reload].optional &= optional;
458 rld[s_reload].secondary_p = 1;
459 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
460 opnum, rld[s_reload].opnum))
461 rld[s_reload].when_needed = RELOAD_OTHER;
463 break;
466 if (s_reload == n_reloads)
468 #ifdef SECONDARY_MEMORY_NEEDED
469 /* If we need a memory location to copy between the two reload regs,
470 set it up now. Note that we do the input case before making
471 the reload and the output case after. This is due to the
472 way reloads are output. */
474 if (in_p && icode == CODE_FOR_nothing
475 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
477 get_secondary_mem (x, reload_mode, opnum, type);
479 /* We may have just added new reloads. Make sure we add
480 the new reload at the end. */
481 s_reload = n_reloads;
483 #endif
485 /* We need to make a new secondary reload for this register class. */
486 rld[s_reload].in = rld[s_reload].out = 0;
487 rld[s_reload].rclass = rclass;
489 rld[s_reload].inmode = in_p ? mode : VOIDmode;
490 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
491 rld[s_reload].reg_rtx = 0;
492 rld[s_reload].optional = optional;
493 rld[s_reload].inc = 0;
494 /* Maybe we could combine these, but it seems too tricky. */
495 rld[s_reload].nocombine = 1;
496 rld[s_reload].in_reg = 0;
497 rld[s_reload].out_reg = 0;
498 rld[s_reload].opnum = opnum;
499 rld[s_reload].when_needed = secondary_type;
500 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
501 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
502 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
503 rld[s_reload].secondary_out_icode
504 = ! in_p ? t_icode : CODE_FOR_nothing;
505 rld[s_reload].secondary_p = 1;
507 n_reloads++;
509 #ifdef SECONDARY_MEMORY_NEEDED
510 if (! in_p && icode == CODE_FOR_nothing
511 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
512 get_secondary_mem (x, mode, opnum, type);
513 #endif
516 *picode = icode;
517 return s_reload;
520 /* If a secondary reload is needed, return its class. If both an intermediate
521 register and a scratch register is needed, we return the class of the
522 intermediate register. */
523 reg_class_t
524 secondary_reload_class (bool in_p, reg_class_t rclass, enum machine_mode mode,
525 rtx x)
527 enum insn_code icode;
528 secondary_reload_info sri;
530 sri.icode = CODE_FOR_nothing;
531 sri.prev_sri = NULL;
532 rclass
533 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
534 icode = (enum insn_code) sri.icode;
536 /* If there are no secondary reloads at all, we return NO_REGS.
537 If an intermediate register is needed, we return its class. */
538 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
539 return rclass;
541 /* No intermediate register is needed, but we have a special reload
542 pattern, which we assume for now needs a scratch register. */
543 return scratch_reload_class (icode);
546 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
547 three operands, verify that operand 2 is an output operand, and return
548 its register class.
549 ??? We'd like to be able to handle any pattern with at least 2 operands,
550 for zero or more scratch registers, but that needs more infrastructure. */
551 enum reg_class
552 scratch_reload_class (enum insn_code icode)
554 const char *scratch_constraint;
555 char scratch_letter;
556 enum reg_class rclass;
558 gcc_assert (insn_data[(int) icode].n_operands == 3);
559 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
560 gcc_assert (*scratch_constraint == '=');
561 scratch_constraint++;
562 if (*scratch_constraint == '&')
563 scratch_constraint++;
564 scratch_letter = *scratch_constraint;
565 if (scratch_letter == 'r')
566 return GENERAL_REGS;
567 rclass = REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
568 scratch_constraint);
569 gcc_assert (rclass != NO_REGS);
570 return rclass;
573 #ifdef SECONDARY_MEMORY_NEEDED
575 /* Return a memory location that will be used to copy X in mode MODE.
576 If we haven't already made a location for this mode in this insn,
577 call find_reloads_address on the location being returned. */
580 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
581 int opnum, enum reload_type type)
583 rtx loc;
584 int mem_valid;
586 /* By default, if MODE is narrower than a word, widen it to a word.
587 This is required because most machines that require these memory
588 locations do not support short load and stores from all registers
589 (e.g., FP registers). */
591 #ifdef SECONDARY_MEMORY_NEEDED_MODE
592 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
593 #else
594 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
595 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
596 #endif
598 /* If we already have made a MEM for this operand in MODE, return it. */
599 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
600 return secondary_memlocs_elim[(int) mode][opnum];
602 /* If this is the first time we've tried to get a MEM for this mode,
603 allocate a new one. `something_changed' in reload will get set
604 by noticing that the frame size has changed. */
606 if (secondary_memlocs[(int) mode] == 0)
608 #ifdef SECONDARY_MEMORY_NEEDED_RTX
609 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
610 #else
611 secondary_memlocs[(int) mode]
612 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
613 #endif
616 /* Get a version of the address doing any eliminations needed. If that
617 didn't give us a new MEM, make a new one if it isn't valid. */
619 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
620 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
621 MEM_ADDR_SPACE (loc));
623 if (! mem_valid && loc == secondary_memlocs[(int) mode])
624 loc = copy_rtx (loc);
626 /* The only time the call below will do anything is if the stack
627 offset is too large. In that case IND_LEVELS doesn't matter, so we
628 can just pass a zero. Adjust the type to be the address of the
629 corresponding object. If the address was valid, save the eliminated
630 address. If it wasn't valid, we need to make a reload each time, so
631 don't save it. */
633 if (! mem_valid)
635 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
636 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
637 : RELOAD_OTHER);
639 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
640 opnum, type, 0, 0);
643 secondary_memlocs_elim[(int) mode][opnum] = loc;
644 if (secondary_memlocs_elim_used <= (int)mode)
645 secondary_memlocs_elim_used = (int)mode + 1;
646 return loc;
649 /* Clear any secondary memory locations we've made. */
651 void
652 clear_secondary_mem (void)
654 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
656 #endif /* SECONDARY_MEMORY_NEEDED */
659 /* Find the largest class which has at least one register valid in
660 mode INNER, and which for every such register, that register number
661 plus N is also valid in OUTER (if in range) and is cheap to move
662 into REGNO. Such a class must exist. */
664 static enum reg_class
665 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
666 enum machine_mode inner ATTRIBUTE_UNUSED, int n,
667 unsigned int dest_regno ATTRIBUTE_UNUSED)
669 int best_cost = -1;
670 int rclass;
671 int regno;
672 enum reg_class best_class = NO_REGS;
673 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
674 unsigned int best_size = 0;
675 int cost;
677 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
679 int bad = 0;
680 int good = 0;
681 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
682 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
684 if (HARD_REGNO_MODE_OK (regno, inner))
686 good = 1;
687 if (! TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
688 || ! HARD_REGNO_MODE_OK (regno + n, outer))
689 bad = 1;
693 if (bad || !good)
694 continue;
695 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
697 if ((reg_class_size[rclass] > best_size
698 && (best_cost < 0 || best_cost >= cost))
699 || best_cost > cost)
701 best_class = (enum reg_class) rclass;
702 best_size = reg_class_size[rclass];
703 best_cost = register_move_cost (outer, (enum reg_class) rclass,
704 dest_class);
708 gcc_assert (best_size != 0);
710 return best_class;
713 /* Return the number of a previously made reload that can be combined with
714 a new one, or n_reloads if none of the existing reloads can be used.
715 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
716 push_reload, they determine the kind of the new reload that we try to
717 combine. P_IN points to the corresponding value of IN, which can be
718 modified by this function.
719 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
721 static int
722 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
723 enum reload_type type, int opnum, int dont_share)
725 rtx in = *p_in;
726 int i;
727 /* We can't merge two reloads if the output of either one is
728 earlyclobbered. */
730 if (earlyclobber_operand_p (out))
731 return n_reloads;
733 /* We can use an existing reload if the class is right
734 and at least one of IN and OUT is a match
735 and the other is at worst neutral.
736 (A zero compared against anything is neutral.)
738 For targets with small register classes, don't use existing reloads
739 unless they are for the same thing since that can cause us to need
740 more reload registers than we otherwise would. */
742 for (i = 0; i < n_reloads; i++)
743 if ((reg_class_subset_p (rclass, rld[i].rclass)
744 || reg_class_subset_p (rld[i].rclass, rclass))
745 /* If the existing reload has a register, it must fit our class. */
746 && (rld[i].reg_rtx == 0
747 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
748 true_regnum (rld[i].reg_rtx)))
749 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
750 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
751 || (out != 0 && MATCHES (rld[i].out, out)
752 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
753 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
754 && (small_register_class_p (rclass)
755 || targetm.small_register_classes_for_mode_p (VOIDmode))
756 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
757 return i;
759 /* Reloading a plain reg for input can match a reload to postincrement
760 that reg, since the postincrement's value is the right value.
761 Likewise, it can match a preincrement reload, since we regard
762 the preincrementation as happening before any ref in this insn
763 to that register. */
764 for (i = 0; i < n_reloads; i++)
765 if ((reg_class_subset_p (rclass, rld[i].rclass)
766 || reg_class_subset_p (rld[i].rclass, rclass))
767 /* If the existing reload has a register, it must fit our
768 class. */
769 && (rld[i].reg_rtx == 0
770 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
771 true_regnum (rld[i].reg_rtx)))
772 && out == 0 && rld[i].out == 0 && rld[i].in != 0
773 && ((REG_P (in)
774 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
775 && MATCHES (XEXP (rld[i].in, 0), in))
776 || (REG_P (rld[i].in)
777 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
778 && MATCHES (XEXP (in, 0), rld[i].in)))
779 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
780 && (small_register_class_p (rclass)
781 || targetm.small_register_classes_for_mode_p (VOIDmode))
782 && MERGABLE_RELOADS (type, rld[i].when_needed,
783 opnum, rld[i].opnum))
785 /* Make sure reload_in ultimately has the increment,
786 not the plain register. */
787 if (REG_P (in))
788 *p_in = rld[i].in;
789 return i;
791 return n_reloads;
794 /* Return nonzero if X is a SUBREG which will require reloading of its
795 SUBREG_REG expression. */
797 static int
798 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, int output)
800 rtx inner;
802 /* Only SUBREGs are problematical. */
803 if (GET_CODE (x) != SUBREG)
804 return 0;
806 inner = SUBREG_REG (x);
808 /* If INNER is a constant or PLUS, then INNER must be reloaded. */
809 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
810 return 1;
812 /* If INNER is not a hard register, then INNER will not need to
813 be reloaded. */
814 if (!REG_P (inner)
815 || REGNO (inner) >= FIRST_PSEUDO_REGISTER)
816 return 0;
818 /* If INNER is not ok for MODE, then INNER will need reloading. */
819 if (! HARD_REGNO_MODE_OK (subreg_regno (x), mode))
820 return 1;
822 /* If the outer part is a word or smaller, INNER larger than a
823 word and the number of regs for INNER is not the same as the
824 number of words in INNER, then INNER will need reloading. */
825 return (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
826 && output
827 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
828 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
829 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
832 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
833 requiring an extra reload register. The caller has already found that
834 IN contains some reference to REGNO, so check that we can produce the
835 new value in a single step. E.g. if we have
836 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
837 instruction that adds one to a register, this should succeed.
838 However, if we have something like
839 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
840 needs to be loaded into a register first, we need a separate reload
841 register.
842 Such PLUS reloads are generated by find_reload_address_part.
843 The out-of-range PLUS expressions are usually introduced in the instruction
844 patterns by register elimination and substituting pseudos without a home
845 by their function-invariant equivalences. */
846 static int
847 can_reload_into (rtx in, int regno, enum machine_mode mode)
849 rtx dst, test_insn;
850 int r = 0;
851 struct recog_data save_recog_data;
853 /* For matching constraints, we often get notional input reloads where
854 we want to use the original register as the reload register. I.e.
855 technically this is a non-optional input-output reload, but IN is
856 already a valid register, and has been chosen as the reload register.
857 Speed this up, since it trivially works. */
858 if (REG_P (in))
859 return 1;
861 /* To test MEMs properly, we'd have to take into account all the reloads
862 that are already scheduled, which can become quite complicated.
863 And since we've already handled address reloads for this MEM, it
864 should always succeed anyway. */
865 if (MEM_P (in))
866 return 1;
868 /* If we can make a simple SET insn that does the job, everything should
869 be fine. */
870 dst = gen_rtx_REG (mode, regno);
871 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
872 save_recog_data = recog_data;
873 if (recog_memoized (test_insn) >= 0)
875 extract_insn (test_insn);
876 r = constrain_operands (1);
878 recog_data = save_recog_data;
879 return r;
882 /* Record one reload that needs to be performed.
883 IN is an rtx saying where the data are to be found before this instruction.
884 OUT says where they must be stored after the instruction.
885 (IN is zero for data not read, and OUT is zero for data not written.)
886 INLOC and OUTLOC point to the places in the instructions where
887 IN and OUT were found.
888 If IN and OUT are both nonzero, it means the same register must be used
889 to reload both IN and OUT.
891 RCLASS is a register class required for the reloaded data.
892 INMODE is the machine mode that the instruction requires
893 for the reg that replaces IN and OUTMODE is likewise for OUT.
895 If IN is zero, then OUT's location and mode should be passed as
896 INLOC and INMODE.
898 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
900 OPTIONAL nonzero means this reload does not need to be performed:
901 it can be discarded if that is more convenient.
903 OPNUM and TYPE say what the purpose of this reload is.
905 The return value is the reload-number for this reload.
907 If both IN and OUT are nonzero, in some rare cases we might
908 want to make two separate reloads. (Actually we never do this now.)
909 Therefore, the reload-number for OUT is stored in
910 output_reloadnum when we return; the return value applies to IN.
911 Usually (presently always), when IN and OUT are nonzero,
912 the two reload-numbers are equal, but the caller should be careful to
913 distinguish them. */
916 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
917 enum reg_class rclass, enum machine_mode inmode,
918 enum machine_mode outmode, int strict_low, int optional,
919 int opnum, enum reload_type type)
921 int i;
922 int dont_share = 0;
923 int dont_remove_subreg = 0;
924 #ifdef LIMIT_RELOAD_CLASS
925 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
926 #endif
927 int secondary_in_reload = -1, secondary_out_reload = -1;
928 enum insn_code secondary_in_icode = CODE_FOR_nothing;
929 enum insn_code secondary_out_icode = CODE_FOR_nothing;
931 /* INMODE and/or OUTMODE could be VOIDmode if no mode
932 has been specified for the operand. In that case,
933 use the operand's mode as the mode to reload. */
934 if (inmode == VOIDmode && in != 0)
935 inmode = GET_MODE (in);
936 if (outmode == VOIDmode && out != 0)
937 outmode = GET_MODE (out);
939 /* If find_reloads and friends until now missed to replace a pseudo
940 with a constant of reg_equiv_constant something went wrong
941 beforehand.
942 Note that it can't simply be done here if we missed it earlier
943 since the constant might need to be pushed into the literal pool
944 and the resulting memref would probably need further
945 reloading. */
946 if (in != 0 && REG_P (in))
948 int regno = REGNO (in);
950 gcc_assert (regno < FIRST_PSEUDO_REGISTER
951 || reg_renumber[regno] >= 0
952 || reg_equiv_constant (regno) == NULL_RTX);
955 /* reg_equiv_constant only contains constants which are obviously
956 not appropriate as destination. So if we would need to replace
957 the destination pseudo with a constant we are in real
958 trouble. */
959 if (out != 0 && REG_P (out))
961 int regno = REGNO (out);
963 gcc_assert (regno < FIRST_PSEUDO_REGISTER
964 || reg_renumber[regno] >= 0
965 || reg_equiv_constant (regno) == NULL_RTX);
968 /* If we have a read-write operand with an address side-effect,
969 change either IN or OUT so the side-effect happens only once. */
970 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
971 switch (GET_CODE (XEXP (in, 0)))
973 case POST_INC: case POST_DEC: case POST_MODIFY:
974 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
975 break;
977 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
978 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
979 break;
981 default:
982 break;
985 /* If we are reloading a (SUBREG constant ...), really reload just the
986 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
987 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
988 a pseudo and hence will become a MEM) with M1 wider than M2 and the
989 register is a pseudo, also reload the inside expression.
990 For machines that extend byte loads, do this for any SUBREG of a pseudo
991 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
992 M2 is an integral mode that gets extended when loaded.
993 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
994 either M1 is not valid for R or M2 is wider than a word but we only
995 need one word to store an M2-sized quantity in R.
996 (However, if OUT is nonzero, we need to reload the reg *and*
997 the subreg, so do nothing here, and let following statement handle it.)
999 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1000 we can't handle it here because CONST_INT does not indicate a mode.
1002 Similarly, we must reload the inside expression if we have a
1003 STRICT_LOW_PART (presumably, in == out in this case).
1005 Also reload the inner expression if it does not require a secondary
1006 reload but the SUBREG does.
1008 Finally, reload the inner expression if it is a register that is in
1009 the class whose registers cannot be referenced in a different size
1010 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1011 cannot reload just the inside since we might end up with the wrong
1012 register class. But if it is inside a STRICT_LOW_PART, we have
1013 no choice, so we hope we do get the right register class there. */
1015 if (in != 0 && GET_CODE (in) == SUBREG
1016 && (subreg_lowpart_p (in) || strict_low)
1017 #ifdef CANNOT_CHANGE_MODE_CLASS
1018 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1019 #endif
1020 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (in))]
1021 && (CONSTANT_P (SUBREG_REG (in))
1022 || GET_CODE (SUBREG_REG (in)) == PLUS
1023 || strict_low
1024 || (((REG_P (SUBREG_REG (in))
1025 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1026 || MEM_P (SUBREG_REG (in)))
1027 && ((GET_MODE_PRECISION (inmode)
1028 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1029 #ifdef LOAD_EXTEND_OP
1030 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1031 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1032 <= UNITS_PER_WORD)
1033 && (GET_MODE_PRECISION (inmode)
1034 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1035 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1036 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1037 #endif
1038 #ifdef WORD_REGISTER_OPERATIONS
1039 || ((GET_MODE_PRECISION (inmode)
1040 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1041 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1042 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1043 / UNITS_PER_WORD)))
1044 #endif
1046 || (REG_P (SUBREG_REG (in))
1047 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1048 /* The case where out is nonzero
1049 is handled differently in the following statement. */
1050 && (out == 0 || subreg_lowpart_p (in))
1051 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1052 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1053 > UNITS_PER_WORD)
1054 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1055 / UNITS_PER_WORD)
1056 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1057 [GET_MODE (SUBREG_REG (in))]))
1058 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1059 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1060 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1061 SUBREG_REG (in))
1062 == NO_REGS))
1063 #ifdef CANNOT_CHANGE_MODE_CLASS
1064 || (REG_P (SUBREG_REG (in))
1065 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1066 && REG_CANNOT_CHANGE_MODE_P
1067 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1068 #endif
1071 #ifdef LIMIT_RELOAD_CLASS
1072 in_subreg_loc = inloc;
1073 #endif
1074 inloc = &SUBREG_REG (in);
1075 in = *inloc;
1076 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1077 if (MEM_P (in))
1078 /* This is supposed to happen only for paradoxical subregs made by
1079 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1080 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1081 #endif
1082 inmode = GET_MODE (in);
1085 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1086 either M1 is not valid for R or M2 is wider than a word but we only
1087 need one word to store an M2-sized quantity in R.
1089 However, we must reload the inner reg *as well as* the subreg in
1090 that case. */
1092 /* Similar issue for (SUBREG constant ...) if it was not handled by the
1093 code above. This can happen if SUBREG_BYTE != 0. */
1095 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, 0))
1097 enum reg_class in_class = rclass;
1099 if (REG_P (SUBREG_REG (in)))
1100 in_class
1101 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1102 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1103 GET_MODE (SUBREG_REG (in)),
1104 SUBREG_BYTE (in),
1105 GET_MODE (in)),
1106 REGNO (SUBREG_REG (in)));
1108 /* This relies on the fact that emit_reload_insns outputs the
1109 instructions for input reloads of type RELOAD_OTHER in the same
1110 order as the reloads. Thus if the outer reload is also of type
1111 RELOAD_OTHER, we are guaranteed that this inner reload will be
1112 output before the outer reload. */
1113 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1114 in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1115 dont_remove_subreg = 1;
1118 /* Similarly for paradoxical and problematical SUBREGs on the output.
1119 Note that there is no reason we need worry about the previous value
1120 of SUBREG_REG (out); even if wider than out,
1121 storing in a subreg is entitled to clobber it all
1122 (except in the case of STRICT_LOW_PART,
1123 and in that case the constraint should label it input-output.) */
1124 if (out != 0 && GET_CODE (out) == SUBREG
1125 && (subreg_lowpart_p (out) || strict_low)
1126 #ifdef CANNOT_CHANGE_MODE_CLASS
1127 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1128 #endif
1129 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (out))]
1130 && (CONSTANT_P (SUBREG_REG (out))
1131 || strict_low
1132 || (((REG_P (SUBREG_REG (out))
1133 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1134 || MEM_P (SUBREG_REG (out)))
1135 && ((GET_MODE_PRECISION (outmode)
1136 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1137 #ifdef WORD_REGISTER_OPERATIONS
1138 || ((GET_MODE_PRECISION (outmode)
1139 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1140 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1141 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1142 / UNITS_PER_WORD)))
1143 #endif
1145 || (REG_P (SUBREG_REG (out))
1146 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1147 && ((GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1148 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1149 > UNITS_PER_WORD)
1150 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1151 / UNITS_PER_WORD)
1152 != (int) hard_regno_nregs[REGNO (SUBREG_REG (out))]
1153 [GET_MODE (SUBREG_REG (out))]))
1154 || ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode)))
1155 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1156 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1157 SUBREG_REG (out))
1158 == NO_REGS))
1159 #ifdef CANNOT_CHANGE_MODE_CLASS
1160 || (REG_P (SUBREG_REG (out))
1161 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1162 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1163 GET_MODE (SUBREG_REG (out)),
1164 outmode))
1165 #endif
1168 #ifdef LIMIT_RELOAD_CLASS
1169 out_subreg_loc = outloc;
1170 #endif
1171 outloc = &SUBREG_REG (out);
1172 out = *outloc;
1173 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1174 gcc_assert (!MEM_P (out)
1175 || GET_MODE_SIZE (GET_MODE (out))
1176 <= GET_MODE_SIZE (outmode));
1177 #endif
1178 outmode = GET_MODE (out);
1181 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1182 either M1 is not valid for R or M2 is wider than a word but we only
1183 need one word to store an M2-sized quantity in R.
1185 However, we must reload the inner reg *as well as* the subreg in
1186 that case. In this case, the inner reg is an in-out reload. */
1188 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, 1))
1190 /* This relies on the fact that emit_reload_insns outputs the
1191 instructions for output reloads of type RELOAD_OTHER in reverse
1192 order of the reloads. Thus if the outer reload is also of type
1193 RELOAD_OTHER, we are guaranteed that this inner reload will be
1194 output after the outer reload. */
1195 dont_remove_subreg = 1;
1196 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1197 &SUBREG_REG (out),
1198 find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1199 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1200 GET_MODE (SUBREG_REG (out)),
1201 SUBREG_BYTE (out),
1202 GET_MODE (out)),
1203 REGNO (SUBREG_REG (out))),
1204 VOIDmode, VOIDmode, 0, 0,
1205 opnum, RELOAD_OTHER);
1208 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1209 if (in != 0 && out != 0 && MEM_P (out)
1210 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1211 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1212 dont_share = 1;
1214 /* If IN is a SUBREG of a hard register, make a new REG. This
1215 simplifies some of the cases below. */
1217 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1218 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1219 && ! dont_remove_subreg)
1220 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1222 /* Similarly for OUT. */
1223 if (out != 0 && GET_CODE (out) == SUBREG
1224 && REG_P (SUBREG_REG (out))
1225 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1226 && ! dont_remove_subreg)
1227 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1229 /* Narrow down the class of register wanted if that is
1230 desirable on this machine for efficiency. */
1232 reg_class_t preferred_class = rclass;
1234 if (in != 0)
1235 preferred_class = targetm.preferred_reload_class (in, rclass);
1237 /* Output reloads may need analogous treatment, different in detail. */
1238 if (out != 0)
1239 preferred_class
1240 = targetm.preferred_output_reload_class (out, preferred_class);
1242 /* Discard what the target said if we cannot do it. */
1243 if (preferred_class != NO_REGS
1244 || (optional && type == RELOAD_FOR_OUTPUT))
1245 rclass = (enum reg_class) preferred_class;
1248 /* Make sure we use a class that can handle the actual pseudo
1249 inside any subreg. For example, on the 386, QImode regs
1250 can appear within SImode subregs. Although GENERAL_REGS
1251 can handle SImode, QImode needs a smaller class. */
1252 #ifdef LIMIT_RELOAD_CLASS
1253 if (in_subreg_loc)
1254 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1255 else if (in != 0 && GET_CODE (in) == SUBREG)
1256 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1258 if (out_subreg_loc)
1259 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1260 if (out != 0 && GET_CODE (out) == SUBREG)
1261 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1262 #endif
1264 /* Verify that this class is at least possible for the mode that
1265 is specified. */
1266 if (this_insn_is_asm)
1268 enum machine_mode mode;
1269 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1270 mode = inmode;
1271 else
1272 mode = outmode;
1273 if (mode == VOIDmode)
1275 error_for_asm (this_insn, "cannot reload integer constant "
1276 "operand in %<asm%>");
1277 mode = word_mode;
1278 if (in != 0)
1279 inmode = word_mode;
1280 if (out != 0)
1281 outmode = word_mode;
1283 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1284 if (HARD_REGNO_MODE_OK (i, mode)
1285 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1286 break;
1287 if (i == FIRST_PSEUDO_REGISTER)
1289 error_for_asm (this_insn, "impossible register constraint "
1290 "in %<asm%>");
1291 /* Avoid further trouble with this insn. */
1292 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1293 /* We used to continue here setting class to ALL_REGS, but it triggers
1294 sanity check on i386 for:
1295 void foo(long double d)
1297 asm("" :: "a" (d));
1299 Returning zero here ought to be safe as we take care in
1300 find_reloads to not process the reloads when instruction was
1301 replaced by USE. */
1303 return 0;
1307 /* Optional output reloads are always OK even if we have no register class,
1308 since the function of these reloads is only to have spill_reg_store etc.
1309 set, so that the storing insn can be deleted later. */
1310 gcc_assert (rclass != NO_REGS
1311 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1313 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1315 if (i == n_reloads)
1317 /* See if we need a secondary reload register to move between CLASS
1318 and IN or CLASS and OUT. Get the icode and push any required reloads
1319 needed for each of them if so. */
1321 if (in != 0)
1322 secondary_in_reload
1323 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1324 &secondary_in_icode, NULL);
1325 if (out != 0 && GET_CODE (out) != SCRATCH)
1326 secondary_out_reload
1327 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1328 type, &secondary_out_icode, NULL);
1330 /* We found no existing reload suitable for re-use.
1331 So add an additional reload. */
1333 #ifdef SECONDARY_MEMORY_NEEDED
1334 /* If a memory location is needed for the copy, make one. */
1335 if (in != 0
1336 && (REG_P (in)
1337 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1338 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
1339 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
1340 rclass, inmode))
1341 get_secondary_mem (in, inmode, opnum, type);
1342 #endif
1344 i = n_reloads;
1345 rld[i].in = in;
1346 rld[i].out = out;
1347 rld[i].rclass = rclass;
1348 rld[i].inmode = inmode;
1349 rld[i].outmode = outmode;
1350 rld[i].reg_rtx = 0;
1351 rld[i].optional = optional;
1352 rld[i].inc = 0;
1353 rld[i].nocombine = 0;
1354 rld[i].in_reg = inloc ? *inloc : 0;
1355 rld[i].out_reg = outloc ? *outloc : 0;
1356 rld[i].opnum = opnum;
1357 rld[i].when_needed = type;
1358 rld[i].secondary_in_reload = secondary_in_reload;
1359 rld[i].secondary_out_reload = secondary_out_reload;
1360 rld[i].secondary_in_icode = secondary_in_icode;
1361 rld[i].secondary_out_icode = secondary_out_icode;
1362 rld[i].secondary_p = 0;
1364 n_reloads++;
1366 #ifdef SECONDARY_MEMORY_NEEDED
1367 if (out != 0
1368 && (REG_P (out)
1369 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1370 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1371 && SECONDARY_MEMORY_NEEDED (rclass,
1372 REGNO_REG_CLASS (reg_or_subregno (out)),
1373 outmode))
1374 get_secondary_mem (out, outmode, opnum, type);
1375 #endif
1377 else
1379 /* We are reusing an existing reload,
1380 but we may have additional information for it.
1381 For example, we may now have both IN and OUT
1382 while the old one may have just one of them. */
1384 /* The modes can be different. If they are, we want to reload in
1385 the larger mode, so that the value is valid for both modes. */
1386 if (inmode != VOIDmode
1387 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1388 rld[i].inmode = inmode;
1389 if (outmode != VOIDmode
1390 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1391 rld[i].outmode = outmode;
1392 if (in != 0)
1394 rtx in_reg = inloc ? *inloc : 0;
1395 /* If we merge reloads for two distinct rtl expressions that
1396 are identical in content, there might be duplicate address
1397 reloads. Remove the extra set now, so that if we later find
1398 that we can inherit this reload, we can get rid of the
1399 address reloads altogether.
1401 Do not do this if both reloads are optional since the result
1402 would be an optional reload which could potentially leave
1403 unresolved address replacements.
1405 It is not sufficient to call transfer_replacements since
1406 choose_reload_regs will remove the replacements for address
1407 reloads of inherited reloads which results in the same
1408 problem. */
1409 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1410 && ! (rld[i].optional && optional))
1412 /* We must keep the address reload with the lower operand
1413 number alive. */
1414 if (opnum > rld[i].opnum)
1416 remove_address_replacements (in);
1417 in = rld[i].in;
1418 in_reg = rld[i].in_reg;
1420 else
1421 remove_address_replacements (rld[i].in);
1423 /* When emitting reloads we don't necessarily look at the in-
1424 and outmode, but also directly at the operands (in and out).
1425 So we can't simply overwrite them with whatever we have found
1426 for this (to-be-merged) reload, we have to "merge" that too.
1427 Reusing another reload already verified that we deal with the
1428 same operands, just possibly in different modes. So we
1429 overwrite the operands only when the new mode is larger.
1430 See also PR33613. */
1431 if (!rld[i].in
1432 || GET_MODE_SIZE (GET_MODE (in))
1433 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1434 rld[i].in = in;
1435 if (!rld[i].in_reg
1436 || (in_reg
1437 && GET_MODE_SIZE (GET_MODE (in_reg))
1438 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1439 rld[i].in_reg = in_reg;
1441 if (out != 0)
1443 if (!rld[i].out
1444 || (out
1445 && GET_MODE_SIZE (GET_MODE (out))
1446 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1447 rld[i].out = out;
1448 if (outloc
1449 && (!rld[i].out_reg
1450 || GET_MODE_SIZE (GET_MODE (*outloc))
1451 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1452 rld[i].out_reg = *outloc;
1454 if (reg_class_subset_p (rclass, rld[i].rclass))
1455 rld[i].rclass = rclass;
1456 rld[i].optional &= optional;
1457 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1458 opnum, rld[i].opnum))
1459 rld[i].when_needed = RELOAD_OTHER;
1460 rld[i].opnum = MIN (rld[i].opnum, opnum);
1463 /* If the ostensible rtx being reloaded differs from the rtx found
1464 in the location to substitute, this reload is not safe to combine
1465 because we cannot reliably tell whether it appears in the insn. */
1467 if (in != 0 && in != *inloc)
1468 rld[i].nocombine = 1;
1470 #if 0
1471 /* This was replaced by changes in find_reloads_address_1 and the new
1472 function inc_for_reload, which go with a new meaning of reload_inc. */
1474 /* If this is an IN/OUT reload in an insn that sets the CC,
1475 it must be for an autoincrement. It doesn't work to store
1476 the incremented value after the insn because that would clobber the CC.
1477 So we must do the increment of the value reloaded from,
1478 increment it, store it back, then decrement again. */
1479 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1481 out = 0;
1482 rld[i].out = 0;
1483 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1484 /* If we did not find a nonzero amount-to-increment-by,
1485 that contradicts the belief that IN is being incremented
1486 in an address in this insn. */
1487 gcc_assert (rld[i].inc != 0);
1489 #endif
1491 /* If we will replace IN and OUT with the reload-reg,
1492 record where they are located so that substitution need
1493 not do a tree walk. */
1495 if (replace_reloads)
1497 if (inloc != 0)
1499 struct replacement *r = &replacements[n_replacements++];
1500 r->what = i;
1501 r->where = inloc;
1502 r->mode = inmode;
1504 if (outloc != 0 && outloc != inloc)
1506 struct replacement *r = &replacements[n_replacements++];
1507 r->what = i;
1508 r->where = outloc;
1509 r->mode = outmode;
1513 /* If this reload is just being introduced and it has both
1514 an incoming quantity and an outgoing quantity that are
1515 supposed to be made to match, see if either one of the two
1516 can serve as the place to reload into.
1518 If one of them is acceptable, set rld[i].reg_rtx
1519 to that one. */
1521 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1523 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1524 inmode, outmode,
1525 rld[i].rclass, i,
1526 earlyclobber_operand_p (out));
1528 /* If the outgoing register already contains the same value
1529 as the incoming one, we can dispense with loading it.
1530 The easiest way to tell the caller that is to give a phony
1531 value for the incoming operand (same as outgoing one). */
1532 if (rld[i].reg_rtx == out
1533 && (REG_P (in) || CONSTANT_P (in))
1534 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1535 static_reload_reg_p, i, inmode))
1536 rld[i].in = out;
1539 /* If this is an input reload and the operand contains a register that
1540 dies in this insn and is used nowhere else, see if it is the right class
1541 to be used for this reload. Use it if so. (This occurs most commonly
1542 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1543 this if it is also an output reload that mentions the register unless
1544 the output is a SUBREG that clobbers an entire register.
1546 Note that the operand might be one of the spill regs, if it is a
1547 pseudo reg and we are in a block where spilling has not taken place.
1548 But if there is no spilling in this block, that is OK.
1549 An explicitly used hard reg cannot be a spill reg. */
1551 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1553 rtx note;
1554 int regno;
1555 enum machine_mode rel_mode = inmode;
1557 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1558 rel_mode = outmode;
1560 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1561 if (REG_NOTE_KIND (note) == REG_DEAD
1562 && REG_P (XEXP (note, 0))
1563 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1564 && reg_mentioned_p (XEXP (note, 0), in)
1565 /* Check that a former pseudo is valid; see find_dummy_reload. */
1566 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1567 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1568 ORIGINAL_REGNO (XEXP (note, 0)))
1569 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1570 && ! refers_to_regno_for_reload_p (regno,
1571 end_hard_regno (rel_mode,
1572 regno),
1573 PATTERN (this_insn), inloc)
1574 /* If this is also an output reload, IN cannot be used as
1575 the reload register if it is set in this insn unless IN
1576 is also OUT. */
1577 && (out == 0 || in == out
1578 || ! hard_reg_set_here_p (regno,
1579 end_hard_regno (rel_mode, regno),
1580 PATTERN (this_insn)))
1581 /* ??? Why is this code so different from the previous?
1582 Is there any simple coherent way to describe the two together?
1583 What's going on here. */
1584 && (in != out
1585 || (GET_CODE (in) == SUBREG
1586 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1587 / UNITS_PER_WORD)
1588 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1589 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1590 /* Make sure the operand fits in the reg that dies. */
1591 && (GET_MODE_SIZE (rel_mode)
1592 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1593 && HARD_REGNO_MODE_OK (regno, inmode)
1594 && HARD_REGNO_MODE_OK (regno, outmode))
1596 unsigned int offs;
1597 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1598 hard_regno_nregs[regno][outmode]);
1600 for (offs = 0; offs < nregs; offs++)
1601 if (fixed_regs[regno + offs]
1602 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1603 regno + offs))
1604 break;
1606 if (offs == nregs
1607 && (! (refers_to_regno_for_reload_p
1608 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1609 || can_reload_into (in, regno, inmode)))
1611 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1612 break;
1617 if (out)
1618 output_reloadnum = i;
1620 return i;
1623 /* Record an additional place we must replace a value
1624 for which we have already recorded a reload.
1625 RELOADNUM is the value returned by push_reload
1626 when the reload was recorded.
1627 This is used in insn patterns that use match_dup. */
1629 static void
1630 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
1632 if (replace_reloads)
1634 struct replacement *r = &replacements[n_replacements++];
1635 r->what = reloadnum;
1636 r->where = loc;
1637 r->mode = mode;
1641 /* Duplicate any replacement we have recorded to apply at
1642 location ORIG_LOC to also be performed at DUP_LOC.
1643 This is used in insn patterns that use match_dup. */
1645 static void
1646 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1648 int i, n = n_replacements;
1650 for (i = 0; i < n; i++)
1652 struct replacement *r = &replacements[i];
1653 if (r->where == orig_loc)
1654 push_replacement (dup_loc, r->what, r->mode);
1658 /* Transfer all replacements that used to be in reload FROM to be in
1659 reload TO. */
1661 void
1662 transfer_replacements (int to, int from)
1664 int i;
1666 for (i = 0; i < n_replacements; i++)
1667 if (replacements[i].what == from)
1668 replacements[i].what = to;
1671 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1672 or a subpart of it. If we have any replacements registered for IN_RTX,
1673 cancel the reloads that were supposed to load them.
1674 Return nonzero if we canceled any reloads. */
1676 remove_address_replacements (rtx in_rtx)
1678 int i, j;
1679 char reload_flags[MAX_RELOADS];
1680 int something_changed = 0;
1682 memset (reload_flags, 0, sizeof reload_flags);
1683 for (i = 0, j = 0; i < n_replacements; i++)
1685 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1686 reload_flags[replacements[i].what] |= 1;
1687 else
1689 replacements[j++] = replacements[i];
1690 reload_flags[replacements[i].what] |= 2;
1693 /* Note that the following store must be done before the recursive calls. */
1694 n_replacements = j;
1696 for (i = n_reloads - 1; i >= 0; i--)
1698 if (reload_flags[i] == 1)
1700 deallocate_reload_reg (i);
1701 remove_address_replacements (rld[i].in);
1702 rld[i].in = 0;
1703 something_changed = 1;
1706 return something_changed;
1709 /* If there is only one output reload, and it is not for an earlyclobber
1710 operand, try to combine it with a (logically unrelated) input reload
1711 to reduce the number of reload registers needed.
1713 This is safe if the input reload does not appear in
1714 the value being output-reloaded, because this implies
1715 it is not needed any more once the original insn completes.
1717 If that doesn't work, see we can use any of the registers that
1718 die in this insn as a reload register. We can if it is of the right
1719 class and does not appear in the value being output-reloaded. */
1721 static void
1722 combine_reloads (void)
1724 int i, regno;
1725 int output_reload = -1;
1726 int secondary_out = -1;
1727 rtx note;
1729 /* Find the output reload; return unless there is exactly one
1730 and that one is mandatory. */
1732 for (i = 0; i < n_reloads; i++)
1733 if (rld[i].out != 0)
1735 if (output_reload >= 0)
1736 return;
1737 output_reload = i;
1740 if (output_reload < 0 || rld[output_reload].optional)
1741 return;
1743 /* An input-output reload isn't combinable. */
1745 if (rld[output_reload].in != 0)
1746 return;
1748 /* If this reload is for an earlyclobber operand, we can't do anything. */
1749 if (earlyclobber_operand_p (rld[output_reload].out))
1750 return;
1752 /* If there is a reload for part of the address of this operand, we would
1753 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1754 its life to the point where doing this combine would not lower the
1755 number of spill registers needed. */
1756 for (i = 0; i < n_reloads; i++)
1757 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1758 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1759 && rld[i].opnum == rld[output_reload].opnum)
1760 return;
1762 /* Check each input reload; can we combine it? */
1764 for (i = 0; i < n_reloads; i++)
1765 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1766 /* Life span of this reload must not extend past main insn. */
1767 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1768 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1769 && rld[i].when_needed != RELOAD_OTHER
1770 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1771 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1772 [(int) rld[output_reload].outmode])
1773 && rld[i].inc == 0
1774 && rld[i].reg_rtx == 0
1775 #ifdef SECONDARY_MEMORY_NEEDED
1776 /* Don't combine two reloads with different secondary
1777 memory locations. */
1778 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1779 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1780 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1781 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1782 #endif
1783 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1784 ? (rld[i].rclass == rld[output_reload].rclass)
1785 : (reg_class_subset_p (rld[i].rclass,
1786 rld[output_reload].rclass)
1787 || reg_class_subset_p (rld[output_reload].rclass,
1788 rld[i].rclass)))
1789 && (MATCHES (rld[i].in, rld[output_reload].out)
1790 /* Args reversed because the first arg seems to be
1791 the one that we imagine being modified
1792 while the second is the one that might be affected. */
1793 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1794 rld[i].in)
1795 /* However, if the input is a register that appears inside
1796 the output, then we also can't share.
1797 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1798 If the same reload reg is used for both reg 69 and the
1799 result to be stored in memory, then that result
1800 will clobber the address of the memory ref. */
1801 && ! (REG_P (rld[i].in)
1802 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1803 rld[output_reload].out))))
1804 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1805 rld[i].when_needed != RELOAD_FOR_INPUT)
1806 && (reg_class_size[(int) rld[i].rclass]
1807 || targetm.small_register_classes_for_mode_p (VOIDmode))
1808 /* We will allow making things slightly worse by combining an
1809 input and an output, but no worse than that. */
1810 && (rld[i].when_needed == RELOAD_FOR_INPUT
1811 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1813 int j;
1815 /* We have found a reload to combine with! */
1816 rld[i].out = rld[output_reload].out;
1817 rld[i].out_reg = rld[output_reload].out_reg;
1818 rld[i].outmode = rld[output_reload].outmode;
1819 /* Mark the old output reload as inoperative. */
1820 rld[output_reload].out = 0;
1821 /* The combined reload is needed for the entire insn. */
1822 rld[i].when_needed = RELOAD_OTHER;
1823 /* If the output reload had a secondary reload, copy it. */
1824 if (rld[output_reload].secondary_out_reload != -1)
1826 rld[i].secondary_out_reload
1827 = rld[output_reload].secondary_out_reload;
1828 rld[i].secondary_out_icode
1829 = rld[output_reload].secondary_out_icode;
1832 #ifdef SECONDARY_MEMORY_NEEDED
1833 /* Copy any secondary MEM. */
1834 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1835 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1836 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1837 #endif
1838 /* If required, minimize the register class. */
1839 if (reg_class_subset_p (rld[output_reload].rclass,
1840 rld[i].rclass))
1841 rld[i].rclass = rld[output_reload].rclass;
1843 /* Transfer all replacements from the old reload to the combined. */
1844 for (j = 0; j < n_replacements; j++)
1845 if (replacements[j].what == output_reload)
1846 replacements[j].what = i;
1848 return;
1851 /* If this insn has only one operand that is modified or written (assumed
1852 to be the first), it must be the one corresponding to this reload. It
1853 is safe to use anything that dies in this insn for that output provided
1854 that it does not occur in the output (we already know it isn't an
1855 earlyclobber. If this is an asm insn, give up. */
1857 if (INSN_CODE (this_insn) == -1)
1858 return;
1860 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1861 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1862 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1863 return;
1865 /* See if some hard register that dies in this insn and is not used in
1866 the output is the right class. Only works if the register we pick
1867 up can fully hold our output reload. */
1868 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1869 if (REG_NOTE_KIND (note) == REG_DEAD
1870 && REG_P (XEXP (note, 0))
1871 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1872 rld[output_reload].out)
1873 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1874 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1875 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1876 regno)
1877 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1878 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1879 /* Ensure that a secondary or tertiary reload for this output
1880 won't want this register. */
1881 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1882 || (!(TEST_HARD_REG_BIT
1883 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1884 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1885 || !(TEST_HARD_REG_BIT
1886 (reg_class_contents[(int) rld[secondary_out].rclass],
1887 regno)))))
1888 && !fixed_regs[regno]
1889 /* Check that a former pseudo is valid; see find_dummy_reload. */
1890 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1891 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1892 ORIGINAL_REGNO (XEXP (note, 0)))
1893 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1895 rld[output_reload].reg_rtx
1896 = gen_rtx_REG (rld[output_reload].outmode, regno);
1897 return;
1901 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1902 See if one of IN and OUT is a register that may be used;
1903 this is desirable since a spill-register won't be needed.
1904 If so, return the register rtx that proves acceptable.
1906 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1907 RCLASS is the register class required for the reload.
1909 If FOR_REAL is >= 0, it is the number of the reload,
1910 and in some cases when it can be discovered that OUT doesn't need
1911 to be computed, clear out rld[FOR_REAL].out.
1913 If FOR_REAL is -1, this should not be done, because this call
1914 is just to see if a register can be found, not to find and install it.
1916 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1917 puts an additional constraint on being able to use IN for OUT since
1918 IN must not appear elsewhere in the insn (it is assumed that IN itself
1919 is safe from the earlyclobber). */
1921 static rtx
1922 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1923 enum machine_mode inmode, enum machine_mode outmode,
1924 reg_class_t rclass, int for_real, int earlyclobber)
1926 rtx in = real_in;
1927 rtx out = real_out;
1928 int in_offset = 0;
1929 int out_offset = 0;
1930 rtx value = 0;
1932 /* If operands exceed a word, we can't use either of them
1933 unless they have the same size. */
1934 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1935 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1936 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1937 return 0;
1939 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1940 respectively refers to a hard register. */
1942 /* Find the inside of any subregs. */
1943 while (GET_CODE (out) == SUBREG)
1945 if (REG_P (SUBREG_REG (out))
1946 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1947 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1948 GET_MODE (SUBREG_REG (out)),
1949 SUBREG_BYTE (out),
1950 GET_MODE (out));
1951 out = SUBREG_REG (out);
1953 while (GET_CODE (in) == SUBREG)
1955 if (REG_P (SUBREG_REG (in))
1956 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1957 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1958 GET_MODE (SUBREG_REG (in)),
1959 SUBREG_BYTE (in),
1960 GET_MODE (in));
1961 in = SUBREG_REG (in);
1964 /* Narrow down the reg class, the same way push_reload will;
1965 otherwise we might find a dummy now, but push_reload won't. */
1967 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
1968 if (preferred_class != NO_REGS)
1969 rclass = (enum reg_class) preferred_class;
1972 /* See if OUT will do. */
1973 if (REG_P (out)
1974 && REGNO (out) < FIRST_PSEUDO_REGISTER)
1976 unsigned int regno = REGNO (out) + out_offset;
1977 unsigned int nwords = hard_regno_nregs[regno][outmode];
1978 rtx saved_rtx;
1980 /* When we consider whether the insn uses OUT,
1981 ignore references within IN. They don't prevent us
1982 from copying IN into OUT, because those refs would
1983 move into the insn that reloads IN.
1985 However, we only ignore IN in its role as this reload.
1986 If the insn uses IN elsewhere and it contains OUT,
1987 that counts. We can't be sure it's the "same" operand
1988 so it might not go through this reload. */
1989 saved_rtx = *inloc;
1990 *inloc = const0_rtx;
1992 if (regno < FIRST_PSEUDO_REGISTER
1993 && HARD_REGNO_MODE_OK (regno, outmode)
1994 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
1995 PATTERN (this_insn), outloc))
1997 unsigned int i;
1999 for (i = 0; i < nwords; i++)
2000 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2001 regno + i))
2002 break;
2004 if (i == nwords)
2006 if (REG_P (real_out))
2007 value = real_out;
2008 else
2009 value = gen_rtx_REG (outmode, regno);
2013 *inloc = saved_rtx;
2016 /* Consider using IN if OUT was not acceptable
2017 or if OUT dies in this insn (like the quotient in a divmod insn).
2018 We can't use IN unless it is dies in this insn,
2019 which means we must know accurately which hard regs are live.
2020 Also, the result can't go in IN if IN is used within OUT,
2021 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2022 if (hard_regs_live_known
2023 && REG_P (in)
2024 && REGNO (in) < FIRST_PSEUDO_REGISTER
2025 && (value == 0
2026 || find_reg_note (this_insn, REG_UNUSED, real_out))
2027 && find_reg_note (this_insn, REG_DEAD, real_in)
2028 && !fixed_regs[REGNO (in)]
2029 && HARD_REGNO_MODE_OK (REGNO (in),
2030 /* The only case where out and real_out might
2031 have different modes is where real_out
2032 is a subreg, and in that case, out
2033 has a real mode. */
2034 (GET_MODE (out) != VOIDmode
2035 ? GET_MODE (out) : outmode))
2036 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2037 /* However only do this if we can be sure that this input
2038 operand doesn't correspond with an uninitialized pseudo.
2039 global can assign some hardreg to it that is the same as
2040 the one assigned to a different, also live pseudo (as it
2041 can ignore the conflict). We must never introduce writes
2042 to such hardregs, as they would clobber the other live
2043 pseudo. See PR 20973. */
2044 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
2045 ORIGINAL_REGNO (in))
2046 /* Similarly, only do this if we can be sure that the death
2047 note is still valid. global can assign some hardreg to
2048 the pseudo referenced in the note and simultaneously a
2049 subword of this hardreg to a different, also live pseudo,
2050 because only another subword of the hardreg is actually
2051 used in the insn. This cannot happen if the pseudo has
2052 been assigned exactly one hardreg. See PR 33732. */
2053 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2055 unsigned int regno = REGNO (in) + in_offset;
2056 unsigned int nwords = hard_regno_nregs[regno][inmode];
2058 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2059 && ! hard_reg_set_here_p (regno, regno + nwords,
2060 PATTERN (this_insn))
2061 && (! earlyclobber
2062 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2063 PATTERN (this_insn), inloc)))
2065 unsigned int i;
2067 for (i = 0; i < nwords; i++)
2068 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2069 regno + i))
2070 break;
2072 if (i == nwords)
2074 /* If we were going to use OUT as the reload reg
2075 and changed our mind, it means OUT is a dummy that
2076 dies here. So don't bother copying value to it. */
2077 if (for_real >= 0 && value == real_out)
2078 rld[for_real].out = 0;
2079 if (REG_P (real_in))
2080 value = real_in;
2081 else
2082 value = gen_rtx_REG (inmode, regno);
2087 return value;
2090 /* This page contains subroutines used mainly for determining
2091 whether the IN or an OUT of a reload can serve as the
2092 reload register. */
2094 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2097 earlyclobber_operand_p (rtx x)
2099 int i;
2101 for (i = 0; i < n_earlyclobbers; i++)
2102 if (reload_earlyclobbers[i] == x)
2103 return 1;
2105 return 0;
2108 /* Return 1 if expression X alters a hard reg in the range
2109 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2110 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2111 X should be the body of an instruction. */
2113 static int
2114 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2116 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2118 rtx op0 = SET_DEST (x);
2120 while (GET_CODE (op0) == SUBREG)
2121 op0 = SUBREG_REG (op0);
2122 if (REG_P (op0))
2124 unsigned int r = REGNO (op0);
2126 /* See if this reg overlaps range under consideration. */
2127 if (r < end_regno
2128 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2129 return 1;
2132 else if (GET_CODE (x) == PARALLEL)
2134 int i = XVECLEN (x, 0) - 1;
2136 for (; i >= 0; i--)
2137 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2138 return 1;
2141 return 0;
2144 /* Return 1 if ADDR is a valid memory address for mode MODE
2145 in address space AS, and check that each pseudo reg has the
2146 proper kind of hard reg. */
2149 strict_memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2150 rtx addr, addr_space_t as)
2152 #ifdef GO_IF_LEGITIMATE_ADDRESS
2153 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2154 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2155 return 0;
2157 win:
2158 return 1;
2159 #else
2160 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2161 #endif
2164 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2165 if they are the same hard reg, and has special hacks for
2166 autoincrement and autodecrement.
2167 This is specifically intended for find_reloads to use
2168 in determining whether two operands match.
2169 X is the operand whose number is the lower of the two.
2171 The value is 2 if Y contains a pre-increment that matches
2172 a non-incrementing address in X. */
2174 /* ??? To be completely correct, we should arrange to pass
2175 for X the output operand and for Y the input operand.
2176 For now, we assume that the output operand has the lower number
2177 because that is natural in (SET output (... input ...)). */
2180 operands_match_p (rtx x, rtx y)
2182 int i;
2183 RTX_CODE code = GET_CODE (x);
2184 const char *fmt;
2185 int success_2;
2187 if (x == y)
2188 return 1;
2189 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2190 && (REG_P (y) || (GET_CODE (y) == SUBREG
2191 && REG_P (SUBREG_REG (y)))))
2193 int j;
2195 if (code == SUBREG)
2197 i = REGNO (SUBREG_REG (x));
2198 if (i >= FIRST_PSEUDO_REGISTER)
2199 goto slow;
2200 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2201 GET_MODE (SUBREG_REG (x)),
2202 SUBREG_BYTE (x),
2203 GET_MODE (x));
2205 else
2206 i = REGNO (x);
2208 if (GET_CODE (y) == SUBREG)
2210 j = REGNO (SUBREG_REG (y));
2211 if (j >= FIRST_PSEUDO_REGISTER)
2212 goto slow;
2213 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2214 GET_MODE (SUBREG_REG (y)),
2215 SUBREG_BYTE (y),
2216 GET_MODE (y));
2218 else
2219 j = REGNO (y);
2221 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2222 multiple hard register group of scalar integer registers, so that
2223 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2224 register. */
2225 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2226 && SCALAR_INT_MODE_P (GET_MODE (x))
2227 && i < FIRST_PSEUDO_REGISTER)
2228 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2229 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2230 && SCALAR_INT_MODE_P (GET_MODE (y))
2231 && j < FIRST_PSEUDO_REGISTER)
2232 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2234 return i == j;
2236 /* If two operands must match, because they are really a single
2237 operand of an assembler insn, then two postincrements are invalid
2238 because the assembler insn would increment only once.
2239 On the other hand, a postincrement matches ordinary indexing
2240 if the postincrement is the output operand. */
2241 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2242 return operands_match_p (XEXP (x, 0), y);
2243 /* Two preincrements are invalid
2244 because the assembler insn would increment only once.
2245 On the other hand, a preincrement matches ordinary indexing
2246 if the preincrement is the input operand.
2247 In this case, return 2, since some callers need to do special
2248 things when this happens. */
2249 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2250 || GET_CODE (y) == PRE_MODIFY)
2251 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2253 slow:
2255 /* Now we have disposed of all the cases in which different rtx codes
2256 can match. */
2257 if (code != GET_CODE (y))
2258 return 0;
2260 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2261 if (GET_MODE (x) != GET_MODE (y))
2262 return 0;
2264 /* MEMs refering to different address space are not equivalent. */
2265 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2266 return 0;
2268 switch (code)
2270 case CONST_INT:
2271 case CONST_DOUBLE:
2272 case CONST_FIXED:
2273 return 0;
2275 case LABEL_REF:
2276 return XEXP (x, 0) == XEXP (y, 0);
2277 case SYMBOL_REF:
2278 return XSTR (x, 0) == XSTR (y, 0);
2280 default:
2281 break;
2284 /* Compare the elements. If any pair of corresponding elements
2285 fail to match, return 0 for the whole things. */
2287 success_2 = 0;
2288 fmt = GET_RTX_FORMAT (code);
2289 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2291 int val, j;
2292 switch (fmt[i])
2294 case 'w':
2295 if (XWINT (x, i) != XWINT (y, i))
2296 return 0;
2297 break;
2299 case 'i':
2300 if (XINT (x, i) != XINT (y, i))
2301 return 0;
2302 break;
2304 case 'e':
2305 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2306 if (val == 0)
2307 return 0;
2308 /* If any subexpression returns 2,
2309 we should return 2 if we are successful. */
2310 if (val == 2)
2311 success_2 = 1;
2312 break;
2314 case '0':
2315 break;
2317 case 'E':
2318 if (XVECLEN (x, i) != XVECLEN (y, i))
2319 return 0;
2320 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2322 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2323 if (val == 0)
2324 return 0;
2325 if (val == 2)
2326 success_2 = 1;
2328 break;
2330 /* It is believed that rtx's at this level will never
2331 contain anything but integers and other rtx's,
2332 except for within LABEL_REFs and SYMBOL_REFs. */
2333 default:
2334 gcc_unreachable ();
2337 return 1 + success_2;
2340 /* Describe the range of registers or memory referenced by X.
2341 If X is a register, set REG_FLAG and put the first register
2342 number into START and the last plus one into END.
2343 If X is a memory reference, put a base address into BASE
2344 and a range of integer offsets into START and END.
2345 If X is pushing on the stack, we can assume it causes no trouble,
2346 so we set the SAFE field. */
2348 static struct decomposition
2349 decompose (rtx x)
2351 struct decomposition val;
2352 int all_const = 0;
2354 memset (&val, 0, sizeof (val));
2356 switch (GET_CODE (x))
2358 case MEM:
2360 rtx base = NULL_RTX, offset = 0;
2361 rtx addr = XEXP (x, 0);
2363 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2364 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2366 val.base = XEXP (addr, 0);
2367 val.start = -GET_MODE_SIZE (GET_MODE (x));
2368 val.end = GET_MODE_SIZE (GET_MODE (x));
2369 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2370 return val;
2373 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2375 if (GET_CODE (XEXP (addr, 1)) == PLUS
2376 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2377 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2379 val.base = XEXP (addr, 0);
2380 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2381 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2382 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2383 return val;
2387 if (GET_CODE (addr) == CONST)
2389 addr = XEXP (addr, 0);
2390 all_const = 1;
2392 if (GET_CODE (addr) == PLUS)
2394 if (CONSTANT_P (XEXP (addr, 0)))
2396 base = XEXP (addr, 1);
2397 offset = XEXP (addr, 0);
2399 else if (CONSTANT_P (XEXP (addr, 1)))
2401 base = XEXP (addr, 0);
2402 offset = XEXP (addr, 1);
2406 if (offset == 0)
2408 base = addr;
2409 offset = const0_rtx;
2411 if (GET_CODE (offset) == CONST)
2412 offset = XEXP (offset, 0);
2413 if (GET_CODE (offset) == PLUS)
2415 if (CONST_INT_P (XEXP (offset, 0)))
2417 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2418 offset = XEXP (offset, 0);
2420 else if (CONST_INT_P (XEXP (offset, 1)))
2422 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2423 offset = XEXP (offset, 1);
2425 else
2427 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2428 offset = const0_rtx;
2431 else if (!CONST_INT_P (offset))
2433 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2434 offset = const0_rtx;
2437 if (all_const && GET_CODE (base) == PLUS)
2438 base = gen_rtx_CONST (GET_MODE (base), base);
2440 gcc_assert (CONST_INT_P (offset));
2442 val.start = INTVAL (offset);
2443 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2444 val.base = base;
2446 break;
2448 case REG:
2449 val.reg_flag = 1;
2450 val.start = true_regnum (x);
2451 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2453 /* A pseudo with no hard reg. */
2454 val.start = REGNO (x);
2455 val.end = val.start + 1;
2457 else
2458 /* A hard reg. */
2459 val.end = end_hard_regno (GET_MODE (x), val.start);
2460 break;
2462 case SUBREG:
2463 if (!REG_P (SUBREG_REG (x)))
2464 /* This could be more precise, but it's good enough. */
2465 return decompose (SUBREG_REG (x));
2466 val.reg_flag = 1;
2467 val.start = true_regnum (x);
2468 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2469 return decompose (SUBREG_REG (x));
2470 else
2471 /* A hard reg. */
2472 val.end = val.start + subreg_nregs (x);
2473 break;
2475 case SCRATCH:
2476 /* This hasn't been assigned yet, so it can't conflict yet. */
2477 val.safe = 1;
2478 break;
2480 default:
2481 gcc_assert (CONSTANT_P (x));
2482 val.safe = 1;
2483 break;
2485 return val;
2488 /* Return 1 if altering Y will not modify the value of X.
2489 Y is also described by YDATA, which should be decompose (Y). */
2491 static int
2492 immune_p (rtx x, rtx y, struct decomposition ydata)
2494 struct decomposition xdata;
2496 if (ydata.reg_flag)
2497 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2498 if (ydata.safe)
2499 return 1;
2501 gcc_assert (MEM_P (y));
2502 /* If Y is memory and X is not, Y can't affect X. */
2503 if (!MEM_P (x))
2504 return 1;
2506 xdata = decompose (x);
2508 if (! rtx_equal_p (xdata.base, ydata.base))
2510 /* If bases are distinct symbolic constants, there is no overlap. */
2511 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2512 return 1;
2513 /* Constants and stack slots never overlap. */
2514 if (CONSTANT_P (xdata.base)
2515 && (ydata.base == frame_pointer_rtx
2516 || ydata.base == hard_frame_pointer_rtx
2517 || ydata.base == stack_pointer_rtx))
2518 return 1;
2519 if (CONSTANT_P (ydata.base)
2520 && (xdata.base == frame_pointer_rtx
2521 || xdata.base == hard_frame_pointer_rtx
2522 || xdata.base == stack_pointer_rtx))
2523 return 1;
2524 /* If either base is variable, we don't know anything. */
2525 return 0;
2528 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2531 /* Similar, but calls decompose. */
2534 safe_from_earlyclobber (rtx op, rtx clobber)
2536 struct decomposition early_data;
2538 early_data = decompose (clobber);
2539 return immune_p (op, clobber, early_data);
2542 /* Main entry point of this file: search the body of INSN
2543 for values that need reloading and record them with push_reload.
2544 REPLACE nonzero means record also where the values occur
2545 so that subst_reloads can be used.
2547 IND_LEVELS says how many levels of indirection are supported by this
2548 machine; a value of zero means that a memory reference is not a valid
2549 memory address.
2551 LIVE_KNOWN says we have valid information about which hard
2552 regs are live at each point in the program; this is true when
2553 we are called from global_alloc but false when stupid register
2554 allocation has been done.
2556 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2557 which is nonnegative if the reg has been commandeered for reloading into.
2558 It is copied into STATIC_RELOAD_REG_P and referenced from there
2559 by various subroutines.
2561 Return TRUE if some operands need to be changed, because of swapping
2562 commutative operands, reg_equiv_address substitution, or whatever. */
2565 find_reloads (rtx insn, int replace, int ind_levels, int live_known,
2566 short *reload_reg_p)
2568 int insn_code_number;
2569 int i, j;
2570 int noperands;
2571 /* These start out as the constraints for the insn
2572 and they are chewed up as we consider alternatives. */
2573 const char *constraints[MAX_RECOG_OPERANDS];
2574 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2575 a register. */
2576 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2577 char pref_or_nothing[MAX_RECOG_OPERANDS];
2578 /* Nonzero for a MEM operand whose entire address needs a reload.
2579 May be -1 to indicate the entire address may or may not need a reload. */
2580 int address_reloaded[MAX_RECOG_OPERANDS];
2581 /* Nonzero for an address operand that needs to be completely reloaded.
2582 May be -1 to indicate the entire operand may or may not need a reload. */
2583 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2584 /* Value of enum reload_type to use for operand. */
2585 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2586 /* Value of enum reload_type to use within address of operand. */
2587 enum reload_type address_type[MAX_RECOG_OPERANDS];
2588 /* Save the usage of each operand. */
2589 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2590 int no_input_reloads = 0, no_output_reloads = 0;
2591 int n_alternatives;
2592 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2593 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2594 char this_alternative_win[MAX_RECOG_OPERANDS];
2595 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2596 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2597 int this_alternative_matches[MAX_RECOG_OPERANDS];
2598 int swapped;
2599 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2600 int this_alternative_number;
2601 int goal_alternative_number = 0;
2602 int operand_reloadnum[MAX_RECOG_OPERANDS];
2603 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2604 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2605 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2606 char goal_alternative_win[MAX_RECOG_OPERANDS];
2607 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2608 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2609 int goal_alternative_swapped;
2610 int best;
2611 int commutative;
2612 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2613 rtx substed_operand[MAX_RECOG_OPERANDS];
2614 rtx body = PATTERN (insn);
2615 rtx set = single_set (insn);
2616 int goal_earlyclobber = 0, this_earlyclobber;
2617 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
2618 int retval = 0;
2620 this_insn = insn;
2621 n_reloads = 0;
2622 n_replacements = 0;
2623 n_earlyclobbers = 0;
2624 replace_reloads = replace;
2625 hard_regs_live_known = live_known;
2626 static_reload_reg_p = reload_reg_p;
2628 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2629 neither are insns that SET cc0. Insns that use CC0 are not allowed
2630 to have any input reloads. */
2631 if (JUMP_P (insn) || CALL_P (insn))
2632 no_output_reloads = 1;
2634 #ifdef HAVE_cc0
2635 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2636 no_input_reloads = 1;
2637 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2638 no_output_reloads = 1;
2639 #endif
2641 #ifdef SECONDARY_MEMORY_NEEDED
2642 /* The eliminated forms of any secondary memory locations are per-insn, so
2643 clear them out here. */
2645 if (secondary_memlocs_elim_used)
2647 memset (secondary_memlocs_elim, 0,
2648 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2649 secondary_memlocs_elim_used = 0;
2651 #endif
2653 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2654 is cheap to move between them. If it is not, there may not be an insn
2655 to do the copy, so we may need a reload. */
2656 if (GET_CODE (body) == SET
2657 && REG_P (SET_DEST (body))
2658 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2659 && REG_P (SET_SRC (body))
2660 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2661 && register_move_cost (GET_MODE (SET_SRC (body)),
2662 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2663 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2664 return 0;
2666 extract_insn (insn);
2668 noperands = reload_n_operands = recog_data.n_operands;
2669 n_alternatives = recog_data.n_alternatives;
2671 /* Just return "no reloads" if insn has no operands with constraints. */
2672 if (noperands == 0 || n_alternatives == 0)
2673 return 0;
2675 insn_code_number = INSN_CODE (insn);
2676 this_insn_is_asm = insn_code_number < 0;
2678 memcpy (operand_mode, recog_data.operand_mode,
2679 noperands * sizeof (enum machine_mode));
2680 memcpy (constraints, recog_data.constraints,
2681 noperands * sizeof (const char *));
2683 commutative = -1;
2685 /* If we will need to know, later, whether some pair of operands
2686 are the same, we must compare them now and save the result.
2687 Reloading the base and index registers will clobber them
2688 and afterward they will fail to match. */
2690 for (i = 0; i < noperands; i++)
2692 const char *p;
2693 int c;
2694 char *end;
2696 substed_operand[i] = recog_data.operand[i];
2697 p = constraints[i];
2699 modified[i] = RELOAD_READ;
2701 /* Scan this operand's constraint to see if it is an output operand,
2702 an in-out operand, is commutative, or should match another. */
2704 while ((c = *p))
2706 p += CONSTRAINT_LEN (c, p);
2707 switch (c)
2709 case '=':
2710 modified[i] = RELOAD_WRITE;
2711 break;
2712 case '+':
2713 modified[i] = RELOAD_READ_WRITE;
2714 break;
2715 case '%':
2717 /* The last operand should not be marked commutative. */
2718 gcc_assert (i != noperands - 1);
2720 /* We currently only support one commutative pair of
2721 operands. Some existing asm code currently uses more
2722 than one pair. Previously, that would usually work,
2723 but sometimes it would crash the compiler. We
2724 continue supporting that case as well as we can by
2725 silently ignoring all but the first pair. In the
2726 future we may handle it correctly. */
2727 if (commutative < 0)
2728 commutative = i;
2729 else
2730 gcc_assert (this_insn_is_asm);
2732 break;
2733 /* Use of ISDIGIT is tempting here, but it may get expensive because
2734 of locale support we don't want. */
2735 case '0': case '1': case '2': case '3': case '4':
2736 case '5': case '6': case '7': case '8': case '9':
2738 c = strtoul (p - 1, &end, 10);
2739 p = end;
2741 operands_match[c][i]
2742 = operands_match_p (recog_data.operand[c],
2743 recog_data.operand[i]);
2745 /* An operand may not match itself. */
2746 gcc_assert (c != i);
2748 /* If C can be commuted with C+1, and C might need to match I,
2749 then C+1 might also need to match I. */
2750 if (commutative >= 0)
2752 if (c == commutative || c == commutative + 1)
2754 int other = c + (c == commutative ? 1 : -1);
2755 operands_match[other][i]
2756 = operands_match_p (recog_data.operand[other],
2757 recog_data.operand[i]);
2759 if (i == commutative || i == commutative + 1)
2761 int other = i + (i == commutative ? 1 : -1);
2762 operands_match[c][other]
2763 = operands_match_p (recog_data.operand[c],
2764 recog_data.operand[other]);
2766 /* Note that C is supposed to be less than I.
2767 No need to consider altering both C and I because in
2768 that case we would alter one into the other. */
2775 /* Examine each operand that is a memory reference or memory address
2776 and reload parts of the addresses into index registers.
2777 Also here any references to pseudo regs that didn't get hard regs
2778 but are equivalent to constants get replaced in the insn itself
2779 with those constants. Nobody will ever see them again.
2781 Finally, set up the preferred classes of each operand. */
2783 for (i = 0; i < noperands; i++)
2785 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2787 address_reloaded[i] = 0;
2788 address_operand_reloaded[i] = 0;
2789 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2790 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2791 : RELOAD_OTHER);
2792 address_type[i]
2793 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2794 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2795 : RELOAD_OTHER);
2797 if (*constraints[i] == 0)
2798 /* Ignore things like match_operator operands. */
2800 else if (constraints[i][0] == 'p'
2801 || EXTRA_ADDRESS_CONSTRAINT (constraints[i][0], constraints[i]))
2803 address_operand_reloaded[i]
2804 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2805 recog_data.operand[i],
2806 recog_data.operand_loc[i],
2807 i, operand_type[i], ind_levels, insn);
2809 /* If we now have a simple operand where we used to have a
2810 PLUS or MULT, re-recognize and try again. */
2811 if ((OBJECT_P (*recog_data.operand_loc[i])
2812 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2813 && (GET_CODE (recog_data.operand[i]) == MULT
2814 || GET_CODE (recog_data.operand[i]) == PLUS))
2816 INSN_CODE (insn) = -1;
2817 retval = find_reloads (insn, replace, ind_levels, live_known,
2818 reload_reg_p);
2819 return retval;
2822 recog_data.operand[i] = *recog_data.operand_loc[i];
2823 substed_operand[i] = recog_data.operand[i];
2825 /* Address operands are reloaded in their existing mode,
2826 no matter what is specified in the machine description. */
2827 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2829 else if (code == MEM)
2831 address_reloaded[i]
2832 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2833 recog_data.operand_loc[i],
2834 XEXP (recog_data.operand[i], 0),
2835 &XEXP (recog_data.operand[i], 0),
2836 i, address_type[i], ind_levels, insn);
2837 recog_data.operand[i] = *recog_data.operand_loc[i];
2838 substed_operand[i] = recog_data.operand[i];
2840 else if (code == SUBREG)
2842 rtx reg = SUBREG_REG (recog_data.operand[i]);
2843 rtx op
2844 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2845 ind_levels,
2846 set != 0
2847 && &SET_DEST (set) == recog_data.operand_loc[i],
2848 insn,
2849 &address_reloaded[i]);
2851 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2852 that didn't get a hard register, emit a USE with a REG_EQUAL
2853 note in front so that we might inherit a previous, possibly
2854 wider reload. */
2856 if (replace
2857 && MEM_P (op)
2858 && REG_P (reg)
2859 && (GET_MODE_SIZE (GET_MODE (reg))
2860 >= GET_MODE_SIZE (GET_MODE (op)))
2861 && reg_equiv_constant (REGNO (reg)) == 0)
2862 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2863 insn),
2864 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2866 substed_operand[i] = recog_data.operand[i] = op;
2868 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2869 /* We can get a PLUS as an "operand" as a result of register
2870 elimination. See eliminate_regs and gen_reload. We handle
2871 a unary operator by reloading the operand. */
2872 substed_operand[i] = recog_data.operand[i]
2873 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2874 ind_levels, 0, insn,
2875 &address_reloaded[i]);
2876 else if (code == REG)
2878 /* This is equivalent to calling find_reloads_toplev.
2879 The code is duplicated for speed.
2880 When we find a pseudo always equivalent to a constant,
2881 we replace it by the constant. We must be sure, however,
2882 that we don't try to replace it in the insn in which it
2883 is being set. */
2884 int regno = REGNO (recog_data.operand[i]);
2885 if (reg_equiv_constant (regno) != 0
2886 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2888 /* Record the existing mode so that the check if constants are
2889 allowed will work when operand_mode isn't specified. */
2891 if (operand_mode[i] == VOIDmode)
2892 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2894 substed_operand[i] = recog_data.operand[i]
2895 = reg_equiv_constant (regno);
2897 if (reg_equiv_memory_loc (regno) != 0
2898 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2899 /* We need not give a valid is_set_dest argument since the case
2900 of a constant equivalence was checked above. */
2901 substed_operand[i] = recog_data.operand[i]
2902 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2903 ind_levels, 0, insn,
2904 &address_reloaded[i]);
2906 /* If the operand is still a register (we didn't replace it with an
2907 equivalent), get the preferred class to reload it into. */
2908 code = GET_CODE (recog_data.operand[i]);
2909 preferred_class[i]
2910 = ((code == REG && REGNO (recog_data.operand[i])
2911 >= FIRST_PSEUDO_REGISTER)
2912 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2913 : NO_REGS);
2914 pref_or_nothing[i]
2915 = (code == REG
2916 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2917 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2920 /* If this is simply a copy from operand 1 to operand 0, merge the
2921 preferred classes for the operands. */
2922 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2923 && recog_data.operand[1] == SET_SRC (set))
2925 preferred_class[0] = preferred_class[1]
2926 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2927 pref_or_nothing[0] |= pref_or_nothing[1];
2928 pref_or_nothing[1] |= pref_or_nothing[0];
2931 /* Now see what we need for pseudo-regs that didn't get hard regs
2932 or got the wrong kind of hard reg. For this, we must consider
2933 all the operands together against the register constraints. */
2935 best = MAX_RECOG_OPERANDS * 2 + 600;
2937 swapped = 0;
2938 goal_alternative_swapped = 0;
2939 try_swapped:
2941 /* The constraints are made of several alternatives.
2942 Each operand's constraint looks like foo,bar,... with commas
2943 separating the alternatives. The first alternatives for all
2944 operands go together, the second alternatives go together, etc.
2946 First loop over alternatives. */
2948 for (this_alternative_number = 0;
2949 this_alternative_number < n_alternatives;
2950 this_alternative_number++)
2952 /* Loop over operands for one constraint alternative. */
2953 /* LOSERS counts those that don't fit this alternative
2954 and would require loading. */
2955 int losers = 0;
2956 /* BAD is set to 1 if it some operand can't fit this alternative
2957 even after reloading. */
2958 int bad = 0;
2959 /* REJECT is a count of how undesirable this alternative says it is
2960 if any reloading is required. If the alternative matches exactly
2961 then REJECT is ignored, but otherwise it gets this much
2962 counted against it in addition to the reloading needed. Each
2963 ? counts three times here since we want the disparaging caused by
2964 a bad register class to only count 1/3 as much. */
2965 int reject = 0;
2967 if (!recog_data.alternative_enabled_p[this_alternative_number])
2969 int i;
2971 for (i = 0; i < recog_data.n_operands; i++)
2972 constraints[i] = skip_alternative (constraints[i]);
2974 continue;
2977 this_earlyclobber = 0;
2979 for (i = 0; i < noperands; i++)
2981 const char *p = constraints[i];
2982 char *end;
2983 int len;
2984 int win = 0;
2985 int did_match = 0;
2986 /* 0 => this operand can be reloaded somehow for this alternative. */
2987 int badop = 1;
2988 /* 0 => this operand can be reloaded if the alternative allows regs. */
2989 int winreg = 0;
2990 int c;
2991 int m;
2992 rtx operand = recog_data.operand[i];
2993 int offset = 0;
2994 /* Nonzero means this is a MEM that must be reloaded into a reg
2995 regardless of what the constraint says. */
2996 int force_reload = 0;
2997 int offmemok = 0;
2998 /* Nonzero if a constant forced into memory would be OK for this
2999 operand. */
3000 int constmemok = 0;
3001 int earlyclobber = 0;
3003 /* If the predicate accepts a unary operator, it means that
3004 we need to reload the operand, but do not do this for
3005 match_operator and friends. */
3006 if (UNARY_P (operand) && *p != 0)
3007 operand = XEXP (operand, 0);
3009 /* If the operand is a SUBREG, extract
3010 the REG or MEM (or maybe even a constant) within.
3011 (Constants can occur as a result of reg_equiv_constant.) */
3013 while (GET_CODE (operand) == SUBREG)
3015 /* Offset only matters when operand is a REG and
3016 it is a hard reg. This is because it is passed
3017 to reg_fits_class_p if it is a REG and all pseudos
3018 return 0 from that function. */
3019 if (REG_P (SUBREG_REG (operand))
3020 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3022 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3023 GET_MODE (SUBREG_REG (operand)),
3024 SUBREG_BYTE (operand),
3025 GET_MODE (operand)) < 0)
3026 force_reload = 1;
3027 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3028 GET_MODE (SUBREG_REG (operand)),
3029 SUBREG_BYTE (operand),
3030 GET_MODE (operand));
3032 operand = SUBREG_REG (operand);
3033 /* Force reload if this is a constant or PLUS or if there may
3034 be a problem accessing OPERAND in the outer mode. */
3035 if (CONSTANT_P (operand)
3036 || GET_CODE (operand) == PLUS
3037 /* We must force a reload of paradoxical SUBREGs
3038 of a MEM because the alignment of the inner value
3039 may not be enough to do the outer reference. On
3040 big-endian machines, it may also reference outside
3041 the object.
3043 On machines that extend byte operations and we have a
3044 SUBREG where both the inner and outer modes are no wider
3045 than a word and the inner mode is narrower, is integral,
3046 and gets extended when loaded from memory, combine.c has
3047 made assumptions about the behavior of the machine in such
3048 register access. If the data is, in fact, in memory we
3049 must always load using the size assumed to be in the
3050 register and let the insn do the different-sized
3051 accesses.
3053 This is doubly true if WORD_REGISTER_OPERATIONS. In
3054 this case eliminate_regs has left non-paradoxical
3055 subregs for push_reload to see. Make sure it does
3056 by forcing the reload.
3058 ??? When is it right at this stage to have a subreg
3059 of a mem that is _not_ to be handled specially? IMO
3060 those should have been reduced to just a mem. */
3061 || ((MEM_P (operand)
3062 || (REG_P (operand)
3063 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3064 #ifndef WORD_REGISTER_OPERATIONS
3065 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3066 < BIGGEST_ALIGNMENT)
3067 && (GET_MODE_SIZE (operand_mode[i])
3068 > GET_MODE_SIZE (GET_MODE (operand))))
3069 || BYTES_BIG_ENDIAN
3070 #ifdef LOAD_EXTEND_OP
3071 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3072 && (GET_MODE_SIZE (GET_MODE (operand))
3073 <= UNITS_PER_WORD)
3074 && (GET_MODE_SIZE (operand_mode[i])
3075 > GET_MODE_SIZE (GET_MODE (operand)))
3076 && INTEGRAL_MODE_P (GET_MODE (operand))
3077 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3078 #endif
3080 #endif
3083 force_reload = 1;
3086 this_alternative[i] = NO_REGS;
3087 this_alternative_win[i] = 0;
3088 this_alternative_match_win[i] = 0;
3089 this_alternative_offmemok[i] = 0;
3090 this_alternative_earlyclobber[i] = 0;
3091 this_alternative_matches[i] = -1;
3093 /* An empty constraint or empty alternative
3094 allows anything which matched the pattern. */
3095 if (*p == 0 || *p == ',')
3096 win = 1, badop = 0;
3098 /* Scan this alternative's specs for this operand;
3099 set WIN if the operand fits any letter in this alternative.
3100 Otherwise, clear BADOP if this operand could
3101 fit some letter after reloads,
3102 or set WINREG if this operand could fit after reloads
3103 provided the constraint allows some registers. */
3106 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3108 case '\0':
3109 len = 0;
3110 break;
3111 case ',':
3112 c = '\0';
3113 break;
3115 case '=': case '+': case '*':
3116 break;
3118 case '%':
3119 /* We only support one commutative marker, the first
3120 one. We already set commutative above. */
3121 break;
3123 case '?':
3124 reject += 6;
3125 break;
3127 case '!':
3128 reject = 600;
3129 break;
3131 case '#':
3132 /* Ignore rest of this alternative as far as
3133 reloading is concerned. */
3135 p++;
3136 while (*p && *p != ',');
3137 len = 0;
3138 break;
3140 case '0': case '1': case '2': case '3': case '4':
3141 case '5': case '6': case '7': case '8': case '9':
3142 m = strtoul (p, &end, 10);
3143 p = end;
3144 len = 0;
3146 this_alternative_matches[i] = m;
3147 /* We are supposed to match a previous operand.
3148 If we do, we win if that one did.
3149 If we do not, count both of the operands as losers.
3150 (This is too conservative, since most of the time
3151 only a single reload insn will be needed to make
3152 the two operands win. As a result, this alternative
3153 may be rejected when it is actually desirable.) */
3154 if ((swapped && (m != commutative || i != commutative + 1))
3155 /* If we are matching as if two operands were swapped,
3156 also pretend that operands_match had been computed
3157 with swapped.
3158 But if I is the second of those and C is the first,
3159 don't exchange them, because operands_match is valid
3160 only on one side of its diagonal. */
3161 ? (operands_match
3162 [(m == commutative || m == commutative + 1)
3163 ? 2 * commutative + 1 - m : m]
3164 [(i == commutative || i == commutative + 1)
3165 ? 2 * commutative + 1 - i : i])
3166 : operands_match[m][i])
3168 /* If we are matching a non-offsettable address where an
3169 offsettable address was expected, then we must reject
3170 this combination, because we can't reload it. */
3171 if (this_alternative_offmemok[m]
3172 && MEM_P (recog_data.operand[m])
3173 && this_alternative[m] == NO_REGS
3174 && ! this_alternative_win[m])
3175 bad = 1;
3177 did_match = this_alternative_win[m];
3179 else
3181 /* Operands don't match. */
3182 rtx value;
3183 int loc1, loc2;
3184 /* Retroactively mark the operand we had to match
3185 as a loser, if it wasn't already. */
3186 if (this_alternative_win[m])
3187 losers++;
3188 this_alternative_win[m] = 0;
3189 if (this_alternative[m] == NO_REGS)
3190 bad = 1;
3191 /* But count the pair only once in the total badness of
3192 this alternative, if the pair can be a dummy reload.
3193 The pointers in operand_loc are not swapped; swap
3194 them by hand if necessary. */
3195 if (swapped && i == commutative)
3196 loc1 = commutative + 1;
3197 else if (swapped && i == commutative + 1)
3198 loc1 = commutative;
3199 else
3200 loc1 = i;
3201 if (swapped && m == commutative)
3202 loc2 = commutative + 1;
3203 else if (swapped && m == commutative + 1)
3204 loc2 = commutative;
3205 else
3206 loc2 = m;
3207 value
3208 = find_dummy_reload (recog_data.operand[i],
3209 recog_data.operand[m],
3210 recog_data.operand_loc[loc1],
3211 recog_data.operand_loc[loc2],
3212 operand_mode[i], operand_mode[m],
3213 this_alternative[m], -1,
3214 this_alternative_earlyclobber[m]);
3216 if (value != 0)
3217 losers--;
3219 /* This can be fixed with reloads if the operand
3220 we are supposed to match can be fixed with reloads. */
3221 badop = 0;
3222 this_alternative[i] = this_alternative[m];
3224 /* If we have to reload this operand and some previous
3225 operand also had to match the same thing as this
3226 operand, we don't know how to do that. So reject this
3227 alternative. */
3228 if (! did_match || force_reload)
3229 for (j = 0; j < i; j++)
3230 if (this_alternative_matches[j]
3231 == this_alternative_matches[i])
3232 badop = 1;
3233 break;
3235 case 'p':
3236 /* All necessary reloads for an address_operand
3237 were handled in find_reloads_address. */
3238 this_alternative[i] = base_reg_class (VOIDmode, ADDRESS,
3239 SCRATCH);
3240 win = 1;
3241 badop = 0;
3242 break;
3244 case TARGET_MEM_CONSTRAINT:
3245 if (force_reload)
3246 break;
3247 if (MEM_P (operand)
3248 || (REG_P (operand)
3249 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3250 && reg_renumber[REGNO (operand)] < 0))
3251 win = 1;
3252 if (CONST_POOL_OK_P (operand_mode[i], operand))
3253 badop = 0;
3254 constmemok = 1;
3255 break;
3257 case '<':
3258 if (MEM_P (operand)
3259 && ! address_reloaded[i]
3260 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3261 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3262 win = 1;
3263 break;
3265 case '>':
3266 if (MEM_P (operand)
3267 && ! address_reloaded[i]
3268 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3269 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3270 win = 1;
3271 break;
3273 /* Memory operand whose address is not offsettable. */
3274 case 'V':
3275 if (force_reload)
3276 break;
3277 if (MEM_P (operand)
3278 && ! (ind_levels ? offsettable_memref_p (operand)
3279 : offsettable_nonstrict_memref_p (operand))
3280 /* Certain mem addresses will become offsettable
3281 after they themselves are reloaded. This is important;
3282 we don't want our own handling of unoffsettables
3283 to override the handling of reg_equiv_address. */
3284 && !(REG_P (XEXP (operand, 0))
3285 && (ind_levels == 0
3286 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3287 win = 1;
3288 break;
3290 /* Memory operand whose address is offsettable. */
3291 case 'o':
3292 if (force_reload)
3293 break;
3294 if ((MEM_P (operand)
3295 /* If IND_LEVELS, find_reloads_address won't reload a
3296 pseudo that didn't get a hard reg, so we have to
3297 reject that case. */
3298 && ((ind_levels ? offsettable_memref_p (operand)
3299 : offsettable_nonstrict_memref_p (operand))
3300 /* A reloaded address is offsettable because it is now
3301 just a simple register indirect. */
3302 || address_reloaded[i] == 1))
3303 || (REG_P (operand)
3304 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3305 && reg_renumber[REGNO (operand)] < 0
3306 /* If reg_equiv_address is nonzero, we will be
3307 loading it into a register; hence it will be
3308 offsettable, but we cannot say that reg_equiv_mem
3309 is offsettable without checking. */
3310 && ((reg_equiv_mem (REGNO (operand)) != 0
3311 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3312 || (reg_equiv_address (REGNO (operand)) != 0))))
3313 win = 1;
3314 if (CONST_POOL_OK_P (operand_mode[i], operand)
3315 || MEM_P (operand))
3316 badop = 0;
3317 constmemok = 1;
3318 offmemok = 1;
3319 break;
3321 case '&':
3322 /* Output operand that is stored before the need for the
3323 input operands (and their index registers) is over. */
3324 earlyclobber = 1, this_earlyclobber = 1;
3325 break;
3327 case 'E':
3328 case 'F':
3329 if (GET_CODE (operand) == CONST_DOUBLE
3330 || (GET_CODE (operand) == CONST_VECTOR
3331 && (GET_MODE_CLASS (GET_MODE (operand))
3332 == MODE_VECTOR_FLOAT)))
3333 win = 1;
3334 break;
3336 case 'G':
3337 case 'H':
3338 if (GET_CODE (operand) == CONST_DOUBLE
3339 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (operand, c, p))
3340 win = 1;
3341 break;
3343 case 's':
3344 if (CONST_INT_P (operand)
3345 || (GET_CODE (operand) == CONST_DOUBLE
3346 && GET_MODE (operand) == VOIDmode))
3347 break;
3348 case 'i':
3349 if (CONSTANT_P (operand)
3350 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (operand)))
3351 win = 1;
3352 break;
3354 case 'n':
3355 if (CONST_INT_P (operand)
3356 || (GET_CODE (operand) == CONST_DOUBLE
3357 && GET_MODE (operand) == VOIDmode))
3358 win = 1;
3359 break;
3361 case 'I':
3362 case 'J':
3363 case 'K':
3364 case 'L':
3365 case 'M':
3366 case 'N':
3367 case 'O':
3368 case 'P':
3369 if (CONST_INT_P (operand)
3370 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (operand), c, p))
3371 win = 1;
3372 break;
3374 case 'X':
3375 force_reload = 0;
3376 win = 1;
3377 break;
3379 case 'g':
3380 if (! force_reload
3381 /* A PLUS is never a valid operand, but reload can make
3382 it from a register when eliminating registers. */
3383 && GET_CODE (operand) != PLUS
3384 /* A SCRATCH is not a valid operand. */
3385 && GET_CODE (operand) != SCRATCH
3386 && (! CONSTANT_P (operand)
3387 || ! flag_pic
3388 || LEGITIMATE_PIC_OPERAND_P (operand))
3389 && (GENERAL_REGS == ALL_REGS
3390 || !REG_P (operand)
3391 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3392 && reg_renumber[REGNO (operand)] < 0)))
3393 win = 1;
3394 /* Drop through into 'r' case. */
3396 case 'r':
3397 this_alternative[i]
3398 = reg_class_subunion[this_alternative[i]][(int) GENERAL_REGS];
3399 goto reg;
3401 default:
3402 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS)
3404 #ifdef EXTRA_CONSTRAINT_STR
3405 if (EXTRA_MEMORY_CONSTRAINT (c, p))
3407 if (force_reload)
3408 break;
3409 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3410 win = 1;
3411 /* If the address was already reloaded,
3412 we win as well. */
3413 else if (MEM_P (operand)
3414 && address_reloaded[i] == 1)
3415 win = 1;
3416 /* Likewise if the address will be reloaded because
3417 reg_equiv_address is nonzero. For reg_equiv_mem
3418 we have to check. */
3419 else if (REG_P (operand)
3420 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3421 && reg_renumber[REGNO (operand)] < 0
3422 && ((reg_equiv_mem (REGNO (operand)) != 0
3423 && EXTRA_CONSTRAINT_STR (reg_equiv_mem (REGNO (operand)), c, p))
3424 || (reg_equiv_address (REGNO (operand)) != 0)))
3425 win = 1;
3427 /* If we didn't already win, we can reload
3428 constants via force_const_mem, and other
3429 MEMs by reloading the address like for 'o'. */
3430 if (CONST_POOL_OK_P (operand_mode[i], operand)
3431 || MEM_P (operand))
3432 badop = 0;
3433 constmemok = 1;
3434 offmemok = 1;
3435 break;
3437 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
3439 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3440 win = 1;
3442 /* If we didn't already win, we can reload
3443 the address into a base register. */
3444 this_alternative[i] = base_reg_class (VOIDmode,
3445 ADDRESS,
3446 SCRATCH);
3447 badop = 0;
3448 break;
3451 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3452 win = 1;
3453 #endif
3454 break;
3457 this_alternative[i]
3458 = (reg_class_subunion
3459 [this_alternative[i]]
3460 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)]);
3461 reg:
3462 if (GET_MODE (operand) == BLKmode)
3463 break;
3464 winreg = 1;
3465 if (REG_P (operand)
3466 && reg_fits_class_p (operand, this_alternative[i],
3467 offset, GET_MODE (recog_data.operand[i])))
3468 win = 1;
3469 break;
3471 while ((p += len), c);
3473 constraints[i] = p;
3475 /* If this operand could be handled with a reg,
3476 and some reg is allowed, then this operand can be handled. */
3477 if (winreg && this_alternative[i] != NO_REGS
3478 && (win || !class_only_fixed_regs[this_alternative[i]]))
3479 badop = 0;
3481 /* Record which operands fit this alternative. */
3482 this_alternative_earlyclobber[i] = earlyclobber;
3483 if (win && ! force_reload)
3484 this_alternative_win[i] = 1;
3485 else if (did_match && ! force_reload)
3486 this_alternative_match_win[i] = 1;
3487 else
3489 int const_to_mem = 0;
3491 this_alternative_offmemok[i] = offmemok;
3492 losers++;
3493 if (badop)
3494 bad = 1;
3495 /* Alternative loses if it has no regs for a reg operand. */
3496 if (REG_P (operand)
3497 && this_alternative[i] == NO_REGS
3498 && this_alternative_matches[i] < 0)
3499 bad = 1;
3501 /* If this is a constant that is reloaded into the desired
3502 class by copying it to memory first, count that as another
3503 reload. This is consistent with other code and is
3504 required to avoid choosing another alternative when
3505 the constant is moved into memory by this function on
3506 an early reload pass. Note that the test here is
3507 precisely the same as in the code below that calls
3508 force_const_mem. */
3509 if (CONST_POOL_OK_P (operand_mode[i], operand)
3510 && ((targetm.preferred_reload_class (operand,
3511 this_alternative[i])
3512 == NO_REGS)
3513 || no_input_reloads))
3515 const_to_mem = 1;
3516 if (this_alternative[i] != NO_REGS)
3517 losers++;
3520 /* Alternative loses if it requires a type of reload not
3521 permitted for this insn. We can always reload SCRATCH
3522 and objects with a REG_UNUSED note. */
3523 if (GET_CODE (operand) != SCRATCH
3524 && modified[i] != RELOAD_READ && no_output_reloads
3525 && ! find_reg_note (insn, REG_UNUSED, operand))
3526 bad = 1;
3527 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3528 && ! const_to_mem)
3529 bad = 1;
3531 /* If we can't reload this value at all, reject this
3532 alternative. Note that we could also lose due to
3533 LIMIT_RELOAD_CLASS, but we don't check that
3534 here. */
3536 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3538 if (targetm.preferred_reload_class (operand, this_alternative[i])
3539 == NO_REGS)
3540 reject = 600;
3542 if (operand_type[i] == RELOAD_FOR_OUTPUT
3543 && (targetm.preferred_output_reload_class (operand,
3544 this_alternative[i])
3545 == NO_REGS))
3546 reject = 600;
3549 /* We prefer to reload pseudos over reloading other things,
3550 since such reloads may be able to be eliminated later.
3551 If we are reloading a SCRATCH, we won't be generating any
3552 insns, just using a register, so it is also preferred.
3553 So bump REJECT in other cases. Don't do this in the
3554 case where we are forcing a constant into memory and
3555 it will then win since we don't want to have a different
3556 alternative match then. */
3557 if (! (REG_P (operand)
3558 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3559 && GET_CODE (operand) != SCRATCH
3560 && ! (const_to_mem && constmemok))
3561 reject += 2;
3563 /* Input reloads can be inherited more often than output
3564 reloads can be removed, so penalize output reloads. */
3565 if (operand_type[i] != RELOAD_FOR_INPUT
3566 && GET_CODE (operand) != SCRATCH)
3567 reject++;
3570 /* If this operand is a pseudo register that didn't get a hard
3571 reg and this alternative accepts some register, see if the
3572 class that we want is a subset of the preferred class for this
3573 register. If not, but it intersects that class, use the
3574 preferred class instead. If it does not intersect the preferred
3575 class, show that usage of this alternative should be discouraged;
3576 it will be discouraged more still if the register is `preferred
3577 or nothing'. We do this because it increases the chance of
3578 reusing our spill register in a later insn and avoiding a pair
3579 of memory stores and loads.
3581 Don't bother with this if this alternative will accept this
3582 operand.
3584 Don't do this for a multiword operand, since it is only a
3585 small win and has the risk of requiring more spill registers,
3586 which could cause a large loss.
3588 Don't do this if the preferred class has only one register
3589 because we might otherwise exhaust the class. */
3591 if (! win && ! did_match
3592 && this_alternative[i] != NO_REGS
3593 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3594 && reg_class_size [(int) preferred_class[i]] > 0
3595 && ! small_register_class_p (preferred_class[i]))
3597 if (! reg_class_subset_p (this_alternative[i],
3598 preferred_class[i]))
3600 /* Since we don't have a way of forming the intersection,
3601 we just do something special if the preferred class
3602 is a subset of the class we have; that's the most
3603 common case anyway. */
3604 if (reg_class_subset_p (preferred_class[i],
3605 this_alternative[i]))
3606 this_alternative[i] = preferred_class[i];
3607 else
3608 reject += (2 + 2 * pref_or_nothing[i]);
3613 /* Now see if any output operands that are marked "earlyclobber"
3614 in this alternative conflict with any input operands
3615 or any memory addresses. */
3617 for (i = 0; i < noperands; i++)
3618 if (this_alternative_earlyclobber[i]
3619 && (this_alternative_win[i] || this_alternative_match_win[i]))
3621 struct decomposition early_data;
3623 early_data = decompose (recog_data.operand[i]);
3625 gcc_assert (modified[i] != RELOAD_READ);
3627 if (this_alternative[i] == NO_REGS)
3629 this_alternative_earlyclobber[i] = 0;
3630 gcc_assert (this_insn_is_asm);
3631 error_for_asm (this_insn,
3632 "%<&%> constraint used with no register class");
3635 for (j = 0; j < noperands; j++)
3636 /* Is this an input operand or a memory ref? */
3637 if ((MEM_P (recog_data.operand[j])
3638 || modified[j] != RELOAD_WRITE)
3639 && j != i
3640 /* Ignore things like match_operator operands. */
3641 && !recog_data.is_operator[j]
3642 /* Don't count an input operand that is constrained to match
3643 the early clobber operand. */
3644 && ! (this_alternative_matches[j] == i
3645 && rtx_equal_p (recog_data.operand[i],
3646 recog_data.operand[j]))
3647 /* Is it altered by storing the earlyclobber operand? */
3648 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3649 early_data))
3651 /* If the output is in a non-empty few-regs class,
3652 it's costly to reload it, so reload the input instead. */
3653 if (small_register_class_p (this_alternative[i])
3654 && (REG_P (recog_data.operand[j])
3655 || GET_CODE (recog_data.operand[j]) == SUBREG))
3657 losers++;
3658 this_alternative_win[j] = 0;
3659 this_alternative_match_win[j] = 0;
3661 else
3662 break;
3664 /* If an earlyclobber operand conflicts with something,
3665 it must be reloaded, so request this and count the cost. */
3666 if (j != noperands)
3668 losers++;
3669 this_alternative_win[i] = 0;
3670 this_alternative_match_win[j] = 0;
3671 for (j = 0; j < noperands; j++)
3672 if (this_alternative_matches[j] == i
3673 && this_alternative_match_win[j])
3675 this_alternative_win[j] = 0;
3676 this_alternative_match_win[j] = 0;
3677 losers++;
3682 /* If one alternative accepts all the operands, no reload required,
3683 choose that alternative; don't consider the remaining ones. */
3684 if (losers == 0)
3686 /* Unswap these so that they are never swapped at `finish'. */
3687 if (commutative >= 0)
3689 recog_data.operand[commutative] = substed_operand[commutative];
3690 recog_data.operand[commutative + 1]
3691 = substed_operand[commutative + 1];
3693 for (i = 0; i < noperands; i++)
3695 goal_alternative_win[i] = this_alternative_win[i];
3696 goal_alternative_match_win[i] = this_alternative_match_win[i];
3697 goal_alternative[i] = this_alternative[i];
3698 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3699 goal_alternative_matches[i] = this_alternative_matches[i];
3700 goal_alternative_earlyclobber[i]
3701 = this_alternative_earlyclobber[i];
3703 goal_alternative_number = this_alternative_number;
3704 goal_alternative_swapped = swapped;
3705 goal_earlyclobber = this_earlyclobber;
3706 goto finish;
3709 /* REJECT, set by the ! and ? constraint characters and when a register
3710 would be reloaded into a non-preferred class, discourages the use of
3711 this alternative for a reload goal. REJECT is incremented by six
3712 for each ? and two for each non-preferred class. */
3713 losers = losers * 6 + reject;
3715 /* If this alternative can be made to work by reloading,
3716 and it needs less reloading than the others checked so far,
3717 record it as the chosen goal for reloading. */
3718 if (! bad)
3720 if (best > losers)
3722 for (i = 0; i < noperands; i++)
3724 goal_alternative[i] = this_alternative[i];
3725 goal_alternative_win[i] = this_alternative_win[i];
3726 goal_alternative_match_win[i]
3727 = this_alternative_match_win[i];
3728 goal_alternative_offmemok[i]
3729 = this_alternative_offmemok[i];
3730 goal_alternative_matches[i] = this_alternative_matches[i];
3731 goal_alternative_earlyclobber[i]
3732 = this_alternative_earlyclobber[i];
3734 goal_alternative_swapped = swapped;
3735 best = losers;
3736 goal_alternative_number = this_alternative_number;
3737 goal_earlyclobber = this_earlyclobber;
3742 /* If insn is commutative (it's safe to exchange a certain pair of operands)
3743 then we need to try each alternative twice,
3744 the second time matching those two operands
3745 as if we had exchanged them.
3746 To do this, really exchange them in operands.
3748 If we have just tried the alternatives the second time,
3749 return operands to normal and drop through. */
3751 if (commutative >= 0)
3753 swapped = !swapped;
3754 if (swapped)
3756 enum reg_class tclass;
3757 int t;
3759 recog_data.operand[commutative] = substed_operand[commutative + 1];
3760 recog_data.operand[commutative + 1] = substed_operand[commutative];
3761 /* Swap the duplicates too. */
3762 for (i = 0; i < recog_data.n_dups; i++)
3763 if (recog_data.dup_num[i] == commutative
3764 || recog_data.dup_num[i] == commutative + 1)
3765 *recog_data.dup_loc[i]
3766 = recog_data.operand[(int) recog_data.dup_num[i]];
3768 tclass = preferred_class[commutative];
3769 preferred_class[commutative] = preferred_class[commutative + 1];
3770 preferred_class[commutative + 1] = tclass;
3772 t = pref_or_nothing[commutative];
3773 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3774 pref_or_nothing[commutative + 1] = t;
3776 t = address_reloaded[commutative];
3777 address_reloaded[commutative] = address_reloaded[commutative + 1];
3778 address_reloaded[commutative + 1] = t;
3780 memcpy (constraints, recog_data.constraints,
3781 noperands * sizeof (const char *));
3782 goto try_swapped;
3784 else
3786 recog_data.operand[commutative] = substed_operand[commutative];
3787 recog_data.operand[commutative + 1]
3788 = substed_operand[commutative + 1];
3789 /* Unswap the duplicates too. */
3790 for (i = 0; i < recog_data.n_dups; i++)
3791 if (recog_data.dup_num[i] == commutative
3792 || recog_data.dup_num[i] == commutative + 1)
3793 *recog_data.dup_loc[i]
3794 = recog_data.operand[(int) recog_data.dup_num[i]];
3798 /* The operands don't meet the constraints.
3799 goal_alternative describes the alternative
3800 that we could reach by reloading the fewest operands.
3801 Reload so as to fit it. */
3803 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3805 /* No alternative works with reloads?? */
3806 if (insn_code_number >= 0)
3807 fatal_insn ("unable to generate reloads for:", insn);
3808 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3809 /* Avoid further trouble with this insn. */
3810 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3811 n_reloads = 0;
3812 return 0;
3815 /* Jump to `finish' from above if all operands are valid already.
3816 In that case, goal_alternative_win is all 1. */
3817 finish:
3819 /* Right now, for any pair of operands I and J that are required to match,
3820 with I < J,
3821 goal_alternative_matches[J] is I.
3822 Set up goal_alternative_matched as the inverse function:
3823 goal_alternative_matched[I] = J. */
3825 for (i = 0; i < noperands; i++)
3826 goal_alternative_matched[i] = -1;
3828 for (i = 0; i < noperands; i++)
3829 if (! goal_alternative_win[i]
3830 && goal_alternative_matches[i] >= 0)
3831 goal_alternative_matched[goal_alternative_matches[i]] = i;
3833 for (i = 0; i < noperands; i++)
3834 goal_alternative_win[i] |= goal_alternative_match_win[i];
3836 /* If the best alternative is with operands 1 and 2 swapped,
3837 consider them swapped before reporting the reloads. Update the
3838 operand numbers of any reloads already pushed. */
3840 if (goal_alternative_swapped)
3842 rtx tem;
3844 tem = substed_operand[commutative];
3845 substed_operand[commutative] = substed_operand[commutative + 1];
3846 substed_operand[commutative + 1] = tem;
3847 tem = recog_data.operand[commutative];
3848 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3849 recog_data.operand[commutative + 1] = tem;
3850 tem = *recog_data.operand_loc[commutative];
3851 *recog_data.operand_loc[commutative]
3852 = *recog_data.operand_loc[commutative + 1];
3853 *recog_data.operand_loc[commutative + 1] = tem;
3855 for (i = 0; i < n_reloads; i++)
3857 if (rld[i].opnum == commutative)
3858 rld[i].opnum = commutative + 1;
3859 else if (rld[i].opnum == commutative + 1)
3860 rld[i].opnum = commutative;
3864 for (i = 0; i < noperands; i++)
3866 operand_reloadnum[i] = -1;
3868 /* If this is an earlyclobber operand, we need to widen the scope.
3869 The reload must remain valid from the start of the insn being
3870 reloaded until after the operand is stored into its destination.
3871 We approximate this with RELOAD_OTHER even though we know that we
3872 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3874 One special case that is worth checking is when we have an
3875 output that is earlyclobber but isn't used past the insn (typically
3876 a SCRATCH). In this case, we only need have the reload live
3877 through the insn itself, but not for any of our input or output
3878 reloads.
3879 But we must not accidentally narrow the scope of an existing
3880 RELOAD_OTHER reload - leave these alone.
3882 In any case, anything needed to address this operand can remain
3883 however they were previously categorized. */
3885 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3886 operand_type[i]
3887 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3888 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3891 /* Any constants that aren't allowed and can't be reloaded
3892 into registers are here changed into memory references. */
3893 for (i = 0; i < noperands; i++)
3894 if (! goal_alternative_win[i])
3896 rtx op = recog_data.operand[i];
3897 rtx subreg = NULL_RTX;
3898 rtx plus = NULL_RTX;
3899 enum machine_mode mode = operand_mode[i];
3901 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3902 push_reload so we have to let them pass here. */
3903 if (GET_CODE (op) == SUBREG)
3905 subreg = op;
3906 op = SUBREG_REG (op);
3907 mode = GET_MODE (op);
3910 if (GET_CODE (op) == PLUS)
3912 plus = op;
3913 op = XEXP (op, 1);
3916 if (CONST_POOL_OK_P (mode, op)
3917 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3918 == NO_REGS)
3919 || no_input_reloads))
3921 int this_address_reloaded;
3922 rtx tem = force_const_mem (mode, op);
3924 /* If we stripped a SUBREG or a PLUS above add it back. */
3925 if (plus != NULL_RTX)
3926 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3928 if (subreg != NULL_RTX)
3929 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3931 this_address_reloaded = 0;
3932 substed_operand[i] = recog_data.operand[i]
3933 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3934 0, insn, &this_address_reloaded);
3936 /* If the alternative accepts constant pool refs directly
3937 there will be no reload needed at all. */
3938 if (plus == NULL_RTX
3939 && subreg == NULL_RTX
3940 && alternative_allows_const_pool_ref (this_address_reloaded == 0
3941 ? substed_operand[i]
3942 : NULL,
3943 recog_data.constraints[i],
3944 goal_alternative_number))
3945 goal_alternative_win[i] = 1;
3949 /* Record the values of the earlyclobber operands for the caller. */
3950 if (goal_earlyclobber)
3951 for (i = 0; i < noperands; i++)
3952 if (goal_alternative_earlyclobber[i])
3953 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3955 /* Now record reloads for all the operands that need them. */
3956 for (i = 0; i < noperands; i++)
3957 if (! goal_alternative_win[i])
3959 /* Operands that match previous ones have already been handled. */
3960 if (goal_alternative_matches[i] >= 0)
3962 /* Handle an operand with a nonoffsettable address
3963 appearing where an offsettable address will do
3964 by reloading the address into a base register.
3966 ??? We can also do this when the operand is a register and
3967 reg_equiv_mem is not offsettable, but this is a bit tricky,
3968 so we don't bother with it. It may not be worth doing. */
3969 else if (goal_alternative_matched[i] == -1
3970 && goal_alternative_offmemok[i]
3971 && MEM_P (recog_data.operand[i]))
3973 /* If the address to be reloaded is a VOIDmode constant,
3974 use the default address mode as mode of the reload register,
3975 as would have been done by find_reloads_address. */
3976 enum machine_mode address_mode;
3977 address_mode = GET_MODE (XEXP (recog_data.operand[i], 0));
3978 if (address_mode == VOIDmode)
3980 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
3981 address_mode = targetm.addr_space.address_mode (as);
3984 operand_reloadnum[i]
3985 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
3986 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
3987 base_reg_class (VOIDmode, MEM, SCRATCH),
3988 address_mode,
3989 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
3990 rld[operand_reloadnum[i]].inc
3991 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
3993 /* If this operand is an output, we will have made any
3994 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
3995 now we are treating part of the operand as an input, so
3996 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
3998 if (modified[i] == RELOAD_WRITE)
4000 for (j = 0; j < n_reloads; j++)
4002 if (rld[j].opnum == i)
4004 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4005 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4006 else if (rld[j].when_needed
4007 == RELOAD_FOR_OUTADDR_ADDRESS)
4008 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4013 else if (goal_alternative_matched[i] == -1)
4015 operand_reloadnum[i]
4016 = push_reload ((modified[i] != RELOAD_WRITE
4017 ? recog_data.operand[i] : 0),
4018 (modified[i] != RELOAD_READ
4019 ? recog_data.operand[i] : 0),
4020 (modified[i] != RELOAD_WRITE
4021 ? recog_data.operand_loc[i] : 0),
4022 (modified[i] != RELOAD_READ
4023 ? recog_data.operand_loc[i] : 0),
4024 (enum reg_class) goal_alternative[i],
4025 (modified[i] == RELOAD_WRITE
4026 ? VOIDmode : operand_mode[i]),
4027 (modified[i] == RELOAD_READ
4028 ? VOIDmode : operand_mode[i]),
4029 (insn_code_number < 0 ? 0
4030 : insn_data[insn_code_number].operand[i].strict_low),
4031 0, i, operand_type[i]);
4033 /* In a matching pair of operands, one must be input only
4034 and the other must be output only.
4035 Pass the input operand as IN and the other as OUT. */
4036 else if (modified[i] == RELOAD_READ
4037 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4039 operand_reloadnum[i]
4040 = push_reload (recog_data.operand[i],
4041 recog_data.operand[goal_alternative_matched[i]],
4042 recog_data.operand_loc[i],
4043 recog_data.operand_loc[goal_alternative_matched[i]],
4044 (enum reg_class) goal_alternative[i],
4045 operand_mode[i],
4046 operand_mode[goal_alternative_matched[i]],
4047 0, 0, i, RELOAD_OTHER);
4048 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4050 else if (modified[i] == RELOAD_WRITE
4051 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4053 operand_reloadnum[goal_alternative_matched[i]]
4054 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4055 recog_data.operand[i],
4056 recog_data.operand_loc[goal_alternative_matched[i]],
4057 recog_data.operand_loc[i],
4058 (enum reg_class) goal_alternative[i],
4059 operand_mode[goal_alternative_matched[i]],
4060 operand_mode[i],
4061 0, 0, i, RELOAD_OTHER);
4062 operand_reloadnum[i] = output_reloadnum;
4064 else
4066 gcc_assert (insn_code_number < 0);
4067 error_for_asm (insn, "inconsistent operand constraints "
4068 "in an %<asm%>");
4069 /* Avoid further trouble with this insn. */
4070 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4071 n_reloads = 0;
4072 return 0;
4075 else if (goal_alternative_matched[i] < 0
4076 && goal_alternative_matches[i] < 0
4077 && address_operand_reloaded[i] != 1
4078 && optimize)
4080 /* For each non-matching operand that's a MEM or a pseudo-register
4081 that didn't get a hard register, make an optional reload.
4082 This may get done even if the insn needs no reloads otherwise. */
4084 rtx operand = recog_data.operand[i];
4086 while (GET_CODE (operand) == SUBREG)
4087 operand = SUBREG_REG (operand);
4088 if ((MEM_P (operand)
4089 || (REG_P (operand)
4090 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4091 /* If this is only for an output, the optional reload would not
4092 actually cause us to use a register now, just note that
4093 something is stored here. */
4094 && (goal_alternative[i] != NO_REGS
4095 || modified[i] == RELOAD_WRITE)
4096 && ! no_input_reloads
4097 /* An optional output reload might allow to delete INSN later.
4098 We mustn't make in-out reloads on insns that are not permitted
4099 output reloads.
4100 If this is an asm, we can't delete it; we must not even call
4101 push_reload for an optional output reload in this case,
4102 because we can't be sure that the constraint allows a register,
4103 and push_reload verifies the constraints for asms. */
4104 && (modified[i] == RELOAD_READ
4105 || (! no_output_reloads && ! this_insn_is_asm)))
4106 operand_reloadnum[i]
4107 = push_reload ((modified[i] != RELOAD_WRITE
4108 ? recog_data.operand[i] : 0),
4109 (modified[i] != RELOAD_READ
4110 ? recog_data.operand[i] : 0),
4111 (modified[i] != RELOAD_WRITE
4112 ? recog_data.operand_loc[i] : 0),
4113 (modified[i] != RELOAD_READ
4114 ? recog_data.operand_loc[i] : 0),
4115 (enum reg_class) goal_alternative[i],
4116 (modified[i] == RELOAD_WRITE
4117 ? VOIDmode : operand_mode[i]),
4118 (modified[i] == RELOAD_READ
4119 ? VOIDmode : operand_mode[i]),
4120 (insn_code_number < 0 ? 0
4121 : insn_data[insn_code_number].operand[i].strict_low),
4122 1, i, operand_type[i]);
4123 /* If a memory reference remains (either as a MEM or a pseudo that
4124 did not get a hard register), yet we can't make an optional
4125 reload, check if this is actually a pseudo register reference;
4126 we then need to emit a USE and/or a CLOBBER so that reload
4127 inheritance will do the right thing. */
4128 else if (replace
4129 && (MEM_P (operand)
4130 || (REG_P (operand)
4131 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4132 && reg_renumber [REGNO (operand)] < 0)))
4134 operand = *recog_data.operand_loc[i];
4136 while (GET_CODE (operand) == SUBREG)
4137 operand = SUBREG_REG (operand);
4138 if (REG_P (operand))
4140 if (modified[i] != RELOAD_WRITE)
4141 /* We mark the USE with QImode so that we recognize
4142 it as one that can be safely deleted at the end
4143 of reload. */
4144 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4145 insn), QImode);
4146 if (modified[i] != RELOAD_READ)
4147 emit_insn_after (gen_clobber (operand), insn);
4151 else if (goal_alternative_matches[i] >= 0
4152 && goal_alternative_win[goal_alternative_matches[i]]
4153 && modified[i] == RELOAD_READ
4154 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4155 && ! no_input_reloads && ! no_output_reloads
4156 && optimize)
4158 /* Similarly, make an optional reload for a pair of matching
4159 objects that are in MEM or a pseudo that didn't get a hard reg. */
4161 rtx operand = recog_data.operand[i];
4163 while (GET_CODE (operand) == SUBREG)
4164 operand = SUBREG_REG (operand);
4165 if ((MEM_P (operand)
4166 || (REG_P (operand)
4167 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4168 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4169 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4170 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4171 recog_data.operand[i],
4172 recog_data.operand_loc[goal_alternative_matches[i]],
4173 recog_data.operand_loc[i],
4174 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4175 operand_mode[goal_alternative_matches[i]],
4176 operand_mode[i],
4177 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4180 /* Perform whatever substitutions on the operands we are supposed
4181 to make due to commutativity or replacement of registers
4182 with equivalent constants or memory slots. */
4184 for (i = 0; i < noperands; i++)
4186 /* We only do this on the last pass through reload, because it is
4187 possible for some data (like reg_equiv_address) to be changed during
4188 later passes. Moreover, we lose the opportunity to get a useful
4189 reload_{in,out}_reg when we do these replacements. */
4191 if (replace)
4193 rtx substitution = substed_operand[i];
4195 *recog_data.operand_loc[i] = substitution;
4197 /* If we're replacing an operand with a LABEL_REF, we need to
4198 make sure that there's a REG_LABEL_OPERAND note attached to
4199 this instruction. */
4200 if (GET_CODE (substitution) == LABEL_REF
4201 && !find_reg_note (insn, REG_LABEL_OPERAND,
4202 XEXP (substitution, 0))
4203 /* For a JUMP_P, if it was a branch target it must have
4204 already been recorded as such. */
4205 && (!JUMP_P (insn)
4206 || !label_is_jump_target_p (XEXP (substitution, 0),
4207 insn)))
4208 add_reg_note (insn, REG_LABEL_OPERAND, XEXP (substitution, 0));
4210 else
4211 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4214 /* If this insn pattern contains any MATCH_DUP's, make sure that
4215 they will be substituted if the operands they match are substituted.
4216 Also do now any substitutions we already did on the operands.
4218 Don't do this if we aren't making replacements because we might be
4219 propagating things allocated by frame pointer elimination into places
4220 it doesn't expect. */
4222 if (insn_code_number >= 0 && replace)
4223 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4225 int opno = recog_data.dup_num[i];
4226 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4227 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4230 #if 0
4231 /* This loses because reloading of prior insns can invalidate the equivalence
4232 (or at least find_equiv_reg isn't smart enough to find it any more),
4233 causing this insn to need more reload regs than it needed before.
4234 It may be too late to make the reload regs available.
4235 Now this optimization is done safely in choose_reload_regs. */
4237 /* For each reload of a reg into some other class of reg,
4238 search for an existing equivalent reg (same value now) in the right class.
4239 We can use it as long as we don't need to change its contents. */
4240 for (i = 0; i < n_reloads; i++)
4241 if (rld[i].reg_rtx == 0
4242 && rld[i].in != 0
4243 && REG_P (rld[i].in)
4244 && rld[i].out == 0)
4246 rld[i].reg_rtx
4247 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4248 static_reload_reg_p, 0, rld[i].inmode);
4249 /* Prevent generation of insn to load the value
4250 because the one we found already has the value. */
4251 if (rld[i].reg_rtx)
4252 rld[i].in = rld[i].reg_rtx;
4254 #endif
4256 /* If we detected error and replaced asm instruction by USE, forget about the
4257 reloads. */
4258 if (GET_CODE (PATTERN (insn)) == USE
4259 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4260 n_reloads = 0;
4262 /* Perhaps an output reload can be combined with another
4263 to reduce needs by one. */
4264 if (!goal_earlyclobber)
4265 combine_reloads ();
4267 /* If we have a pair of reloads for parts of an address, they are reloading
4268 the same object, the operands themselves were not reloaded, and they
4269 are for two operands that are supposed to match, merge the reloads and
4270 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4272 for (i = 0; i < n_reloads; i++)
4274 int k;
4276 for (j = i + 1; j < n_reloads; j++)
4277 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4278 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4279 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4280 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4281 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4282 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4283 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4284 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4285 && rtx_equal_p (rld[i].in, rld[j].in)
4286 && (operand_reloadnum[rld[i].opnum] < 0
4287 || rld[operand_reloadnum[rld[i].opnum]].optional)
4288 && (operand_reloadnum[rld[j].opnum] < 0
4289 || rld[operand_reloadnum[rld[j].opnum]].optional)
4290 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4291 || (goal_alternative_matches[rld[j].opnum]
4292 == rld[i].opnum)))
4294 for (k = 0; k < n_replacements; k++)
4295 if (replacements[k].what == j)
4296 replacements[k].what = i;
4298 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4299 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4300 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4301 else
4302 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4303 rld[j].in = 0;
4307 /* Scan all the reloads and update their type.
4308 If a reload is for the address of an operand and we didn't reload
4309 that operand, change the type. Similarly, change the operand number
4310 of a reload when two operands match. If a reload is optional, treat it
4311 as though the operand isn't reloaded.
4313 ??? This latter case is somewhat odd because if we do the optional
4314 reload, it means the object is hanging around. Thus we need only
4315 do the address reload if the optional reload was NOT done.
4317 Change secondary reloads to be the address type of their operand, not
4318 the normal type.
4320 If an operand's reload is now RELOAD_OTHER, change any
4321 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4322 RELOAD_FOR_OTHER_ADDRESS. */
4324 for (i = 0; i < n_reloads; i++)
4326 if (rld[i].secondary_p
4327 && rld[i].when_needed == operand_type[rld[i].opnum])
4328 rld[i].when_needed = address_type[rld[i].opnum];
4330 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4331 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4332 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4333 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4334 && (operand_reloadnum[rld[i].opnum] < 0
4335 || rld[operand_reloadnum[rld[i].opnum]].optional))
4337 /* If we have a secondary reload to go along with this reload,
4338 change its type to RELOAD_FOR_OPADDR_ADDR. */
4340 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4341 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4342 && rld[i].secondary_in_reload != -1)
4344 int secondary_in_reload = rld[i].secondary_in_reload;
4346 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4348 /* If there's a tertiary reload we have to change it also. */
4349 if (secondary_in_reload > 0
4350 && rld[secondary_in_reload].secondary_in_reload != -1)
4351 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4352 = RELOAD_FOR_OPADDR_ADDR;
4355 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4356 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4357 && rld[i].secondary_out_reload != -1)
4359 int secondary_out_reload = rld[i].secondary_out_reload;
4361 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4363 /* If there's a tertiary reload we have to change it also. */
4364 if (secondary_out_reload
4365 && rld[secondary_out_reload].secondary_out_reload != -1)
4366 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4367 = RELOAD_FOR_OPADDR_ADDR;
4370 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4371 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4372 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4373 else
4374 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4377 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4378 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4379 && operand_reloadnum[rld[i].opnum] >= 0
4380 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4381 == RELOAD_OTHER))
4382 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4384 if (goal_alternative_matches[rld[i].opnum] >= 0)
4385 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4388 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4389 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4390 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4392 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4393 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4394 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4395 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4396 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4397 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4398 This is complicated by the fact that a single operand can have more
4399 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4400 choose_reload_regs without affecting code quality, and cases that
4401 actually fail are extremely rare, so it turns out to be better to fix
4402 the problem here by not generating cases that choose_reload_regs will
4403 fail for. */
4404 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4405 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4406 a single operand.
4407 We can reduce the register pressure by exploiting that a
4408 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4409 does not conflict with any of them, if it is only used for the first of
4410 the RELOAD_FOR_X_ADDRESS reloads. */
4412 int first_op_addr_num = -2;
4413 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4414 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4415 int need_change = 0;
4416 /* We use last_op_addr_reload and the contents of the above arrays
4417 first as flags - -2 means no instance encountered, -1 means exactly
4418 one instance encountered.
4419 If more than one instance has been encountered, we store the reload
4420 number of the first reload of the kind in question; reload numbers
4421 are known to be non-negative. */
4422 for (i = 0; i < noperands; i++)
4423 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4424 for (i = n_reloads - 1; i >= 0; i--)
4426 switch (rld[i].when_needed)
4428 case RELOAD_FOR_OPERAND_ADDRESS:
4429 if (++first_op_addr_num >= 0)
4431 first_op_addr_num = i;
4432 need_change = 1;
4434 break;
4435 case RELOAD_FOR_INPUT_ADDRESS:
4436 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4438 first_inpaddr_num[rld[i].opnum] = i;
4439 need_change = 1;
4441 break;
4442 case RELOAD_FOR_OUTPUT_ADDRESS:
4443 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4445 first_outpaddr_num[rld[i].opnum] = i;
4446 need_change = 1;
4448 break;
4449 default:
4450 break;
4454 if (need_change)
4456 for (i = 0; i < n_reloads; i++)
4458 int first_num;
4459 enum reload_type type;
4461 switch (rld[i].when_needed)
4463 case RELOAD_FOR_OPADDR_ADDR:
4464 first_num = first_op_addr_num;
4465 type = RELOAD_FOR_OPERAND_ADDRESS;
4466 break;
4467 case RELOAD_FOR_INPADDR_ADDRESS:
4468 first_num = first_inpaddr_num[rld[i].opnum];
4469 type = RELOAD_FOR_INPUT_ADDRESS;
4470 break;
4471 case RELOAD_FOR_OUTADDR_ADDRESS:
4472 first_num = first_outpaddr_num[rld[i].opnum];
4473 type = RELOAD_FOR_OUTPUT_ADDRESS;
4474 break;
4475 default:
4476 continue;
4478 if (first_num < 0)
4479 continue;
4480 else if (i > first_num)
4481 rld[i].when_needed = type;
4482 else
4484 /* Check if the only TYPE reload that uses reload I is
4485 reload FIRST_NUM. */
4486 for (j = n_reloads - 1; j > first_num; j--)
4488 if (rld[j].when_needed == type
4489 && (rld[i].secondary_p
4490 ? rld[j].secondary_in_reload == i
4491 : reg_mentioned_p (rld[i].in, rld[j].in)))
4493 rld[i].when_needed = type;
4494 break;
4502 /* See if we have any reloads that are now allowed to be merged
4503 because we've changed when the reload is needed to
4504 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4505 check for the most common cases. */
4507 for (i = 0; i < n_reloads; i++)
4508 if (rld[i].in != 0 && rld[i].out == 0
4509 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4510 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4511 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4512 for (j = 0; j < n_reloads; j++)
4513 if (i != j && rld[j].in != 0 && rld[j].out == 0
4514 && rld[j].when_needed == rld[i].when_needed
4515 && MATCHES (rld[i].in, rld[j].in)
4516 && rld[i].rclass == rld[j].rclass
4517 && !rld[i].nocombine && !rld[j].nocombine
4518 && rld[i].reg_rtx == rld[j].reg_rtx)
4520 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4521 transfer_replacements (i, j);
4522 rld[j].in = 0;
4525 #ifdef HAVE_cc0
4526 /* If we made any reloads for addresses, see if they violate a
4527 "no input reloads" requirement for this insn. But loads that we
4528 do after the insn (such as for output addresses) are fine. */
4529 if (no_input_reloads)
4530 for (i = 0; i < n_reloads; i++)
4531 gcc_assert (rld[i].in == 0
4532 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4533 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4534 #endif
4536 /* Compute reload_mode and reload_nregs. */
4537 for (i = 0; i < n_reloads; i++)
4539 rld[i].mode
4540 = (rld[i].inmode == VOIDmode
4541 || (GET_MODE_SIZE (rld[i].outmode)
4542 > GET_MODE_SIZE (rld[i].inmode)))
4543 ? rld[i].outmode : rld[i].inmode;
4545 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4548 /* Special case a simple move with an input reload and a
4549 destination of a hard reg, if the hard reg is ok, use it. */
4550 for (i = 0; i < n_reloads; i++)
4551 if (rld[i].when_needed == RELOAD_FOR_INPUT
4552 && GET_CODE (PATTERN (insn)) == SET
4553 && REG_P (SET_DEST (PATTERN (insn)))
4554 && (SET_SRC (PATTERN (insn)) == rld[i].in
4555 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4556 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4558 rtx dest = SET_DEST (PATTERN (insn));
4559 unsigned int regno = REGNO (dest);
4561 if (regno < FIRST_PSEUDO_REGISTER
4562 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4563 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4565 int nr = hard_regno_nregs[regno][rld[i].mode];
4566 int ok = 1, nri;
4568 for (nri = 1; nri < nr; nri ++)
4569 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4570 ok = 0;
4572 if (ok)
4573 rld[i].reg_rtx = dest;
4577 return retval;
4580 /* Return true if alternative number ALTNUM in constraint-string
4581 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4582 MEM gives the reference if it didn't need any reloads, otherwise it
4583 is null. */
4585 static bool
4586 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4587 const char *constraint, int altnum)
4589 int c;
4591 /* Skip alternatives before the one requested. */
4592 while (altnum > 0)
4594 while (*constraint++ != ',')
4596 altnum--;
4598 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4599 If one of them is present, this alternative accepts the result of
4600 passing a constant-pool reference through find_reloads_toplev.
4602 The same is true of extra memory constraints if the address
4603 was reloaded into a register. However, the target may elect
4604 to disallow the original constant address, forcing it to be
4605 reloaded into a register instead. */
4606 for (; (c = *constraint) && c != ',' && c != '#';
4607 constraint += CONSTRAINT_LEN (c, constraint))
4609 if (c == TARGET_MEM_CONSTRAINT || c == 'o')
4610 return true;
4611 #ifdef EXTRA_CONSTRAINT_STR
4612 if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
4613 && (mem == NULL || EXTRA_CONSTRAINT_STR (mem, c, constraint)))
4614 return true;
4615 #endif
4617 return false;
4620 /* Scan X for memory references and scan the addresses for reloading.
4621 Also checks for references to "constant" regs that we want to eliminate
4622 and replaces them with the values they stand for.
4623 We may alter X destructively if it contains a reference to such.
4624 If X is just a constant reg, we return the equivalent value
4625 instead of X.
4627 IND_LEVELS says how many levels of indirect addressing this machine
4628 supports.
4630 OPNUM and TYPE identify the purpose of the reload.
4632 IS_SET_DEST is true if X is the destination of a SET, which is not
4633 appropriate to be replaced by a constant.
4635 INSN, if nonzero, is the insn in which we do the reload. It is used
4636 to determine if we may generate output reloads, and where to put USEs
4637 for pseudos that we have to replace with stack slots.
4639 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4640 result of find_reloads_address. */
4642 static rtx
4643 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4644 int ind_levels, int is_set_dest, rtx insn,
4645 int *address_reloaded)
4647 RTX_CODE code = GET_CODE (x);
4649 const char *fmt = GET_RTX_FORMAT (code);
4650 int i;
4651 int copied;
4653 if (code == REG)
4655 /* This code is duplicated for speed in find_reloads. */
4656 int regno = REGNO (x);
4657 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4658 x = reg_equiv_constant (regno);
4659 #if 0
4660 /* This creates (subreg (mem...)) which would cause an unnecessary
4661 reload of the mem. */
4662 else if (reg_equiv_mem (regno) != 0)
4663 x = reg_equiv_mem (regno);
4664 #endif
4665 else if (reg_equiv_memory_loc (regno)
4666 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4668 rtx mem = make_memloc (x, regno);
4669 if (reg_equiv_address (regno)
4670 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4672 /* If this is not a toplevel operand, find_reloads doesn't see
4673 this substitution. We have to emit a USE of the pseudo so
4674 that delete_output_reload can see it. */
4675 if (replace_reloads && recog_data.operand[opnum] != x)
4676 /* We mark the USE with QImode so that we recognize it
4677 as one that can be safely deleted at the end of
4678 reload. */
4679 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4680 QImode);
4681 x = mem;
4682 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4683 opnum, type, ind_levels, insn);
4684 if (!rtx_equal_p (x, mem))
4685 push_reg_equiv_alt_mem (regno, x);
4686 if (address_reloaded)
4687 *address_reloaded = i;
4690 return x;
4692 if (code == MEM)
4694 rtx tem = x;
4696 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4697 opnum, type, ind_levels, insn);
4698 if (address_reloaded)
4699 *address_reloaded = i;
4701 return tem;
4704 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4706 /* Check for SUBREG containing a REG that's equivalent to a
4707 constant. If the constant has a known value, truncate it
4708 right now. Similarly if we are extracting a single-word of a
4709 multi-word constant. If the constant is symbolic, allow it
4710 to be substituted normally. push_reload will strip the
4711 subreg later. The constant must not be VOIDmode, because we
4712 will lose the mode of the register (this should never happen
4713 because one of the cases above should handle it). */
4715 int regno = REGNO (SUBREG_REG (x));
4716 rtx tem;
4718 if (regno >= FIRST_PSEUDO_REGISTER
4719 && reg_renumber[regno] < 0
4720 && reg_equiv_constant (regno) != 0)
4722 tem =
4723 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4724 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4725 gcc_assert (tem);
4726 if (CONSTANT_P (tem)
4727 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4729 tem = force_const_mem (GET_MODE (x), tem);
4730 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4731 &XEXP (tem, 0), opnum, type,
4732 ind_levels, insn);
4733 if (address_reloaded)
4734 *address_reloaded = i;
4736 return tem;
4739 /* If the subreg contains a reg that will be converted to a mem,
4740 convert the subreg to a narrower memref now.
4741 Otherwise, we would get (subreg (mem ...) ...),
4742 which would force reload of the mem.
4744 We also need to do this if there is an equivalent MEM that is
4745 not offsettable. In that case, alter_subreg would produce an
4746 invalid address on big-endian machines.
4748 For machines that extend byte loads, we must not reload using
4749 a wider mode if we have a paradoxical SUBREG. find_reloads will
4750 force a reload in that case. So we should not do anything here. */
4752 if (regno >= FIRST_PSEUDO_REGISTER
4753 #ifdef LOAD_EXTEND_OP
4754 && !paradoxical_subreg_p (x)
4755 #endif
4756 && (reg_equiv_address (regno) != 0
4757 || (reg_equiv_mem (regno) != 0
4758 && (! strict_memory_address_addr_space_p
4759 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
4760 MEM_ADDR_SPACE (reg_equiv_mem (regno)))
4761 || ! offsettable_memref_p (reg_equiv_mem (regno))
4762 || num_not_at_initial_offset))))
4763 x = find_reloads_subreg_address (x, 1, opnum, type, ind_levels,
4764 insn, address_reloaded);
4767 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4769 if (fmt[i] == 'e')
4771 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4772 ind_levels, is_set_dest, insn,
4773 address_reloaded);
4774 /* If we have replaced a reg with it's equivalent memory loc -
4775 that can still be handled here e.g. if it's in a paradoxical
4776 subreg - we must make the change in a copy, rather than using
4777 a destructive change. This way, find_reloads can still elect
4778 not to do the change. */
4779 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4781 x = shallow_copy_rtx (x);
4782 copied = 1;
4784 XEXP (x, i) = new_part;
4787 return x;
4790 /* Return a mem ref for the memory equivalent of reg REGNO.
4791 This mem ref is not shared with anything. */
4793 static rtx
4794 make_memloc (rtx ad, int regno)
4796 /* We must rerun eliminate_regs, in case the elimination
4797 offsets have changed. */
4798 rtx tem
4799 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4802 /* If TEM might contain a pseudo, we must copy it to avoid
4803 modifying it when we do the substitution for the reload. */
4804 if (rtx_varies_p (tem, 0))
4805 tem = copy_rtx (tem);
4807 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4808 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4810 /* Copy the result if it's still the same as the equivalence, to avoid
4811 modifying it when we do the substitution for the reload. */
4812 if (tem == reg_equiv_memory_loc (regno))
4813 tem = copy_rtx (tem);
4814 return tem;
4817 /* Returns true if AD could be turned into a valid memory reference
4818 to mode MODE in address space AS by reloading the part pointed to
4819 by PART into a register. */
4821 static int
4822 maybe_memory_address_addr_space_p (enum machine_mode mode, rtx ad,
4823 addr_space_t as, rtx *part)
4825 int retv;
4826 rtx tem = *part;
4827 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4829 *part = reg;
4830 retv = memory_address_addr_space_p (mode, ad, as);
4831 *part = tem;
4833 return retv;
4836 /* Record all reloads needed for handling memory address AD
4837 which appears in *LOC in a memory reference to mode MODE
4838 which itself is found in location *MEMREFLOC.
4839 Note that we take shortcuts assuming that no multi-reg machine mode
4840 occurs as part of an address.
4842 OPNUM and TYPE specify the purpose of this reload.
4844 IND_LEVELS says how many levels of indirect addressing this machine
4845 supports.
4847 INSN, if nonzero, is the insn in which we do the reload. It is used
4848 to determine if we may generate output reloads, and where to put USEs
4849 for pseudos that we have to replace with stack slots.
4851 Value is one if this address is reloaded or replaced as a whole; it is
4852 zero if the top level of this address was not reloaded or replaced, and
4853 it is -1 if it may or may not have been reloaded or replaced.
4855 Note that there is no verification that the address will be valid after
4856 this routine does its work. Instead, we rely on the fact that the address
4857 was valid when reload started. So we need only undo things that reload
4858 could have broken. These are wrong register types, pseudos not allocated
4859 to a hard register, and frame pointer elimination. */
4861 static int
4862 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
4863 rtx *loc, int opnum, enum reload_type type,
4864 int ind_levels, rtx insn)
4866 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4867 : ADDR_SPACE_GENERIC;
4868 int regno;
4869 int removed_and = 0;
4870 int op_index;
4871 rtx tem;
4873 /* If the address is a register, see if it is a legitimate address and
4874 reload if not. We first handle the cases where we need not reload
4875 or where we must reload in a non-standard way. */
4877 if (REG_P (ad))
4879 regno = REGNO (ad);
4881 if (reg_equiv_constant (regno) != 0)
4883 find_reloads_address_part (reg_equiv_constant (regno), loc,
4884 base_reg_class (mode, MEM, SCRATCH),
4885 GET_MODE (ad), opnum, type, ind_levels);
4886 return 1;
4889 tem = reg_equiv_memory_loc (regno);
4890 if (tem != 0)
4892 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4894 tem = make_memloc (ad, regno);
4895 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4896 XEXP (tem, 0),
4897 MEM_ADDR_SPACE (tem)))
4899 rtx orig = tem;
4901 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4902 &XEXP (tem, 0), opnum,
4903 ADDR_TYPE (type), ind_levels, insn);
4904 if (!rtx_equal_p (tem, orig))
4905 push_reg_equiv_alt_mem (regno, tem);
4907 /* We can avoid a reload if the register's equivalent memory
4908 expression is valid as an indirect memory address.
4909 But not all addresses are valid in a mem used as an indirect
4910 address: only reg or reg+constant. */
4912 if (ind_levels > 0
4913 && strict_memory_address_addr_space_p (mode, tem, as)
4914 && (REG_P (XEXP (tem, 0))
4915 || (GET_CODE (XEXP (tem, 0)) == PLUS
4916 && REG_P (XEXP (XEXP (tem, 0), 0))
4917 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4919 /* TEM is not the same as what we'll be replacing the
4920 pseudo with after reload, put a USE in front of INSN
4921 in the final reload pass. */
4922 if (replace_reloads
4923 && num_not_at_initial_offset
4924 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4926 *loc = tem;
4927 /* We mark the USE with QImode so that we
4928 recognize it as one that can be safely
4929 deleted at the end of reload. */
4930 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4931 insn), QImode);
4933 /* This doesn't really count as replacing the address
4934 as a whole, since it is still a memory access. */
4936 return 0;
4938 ad = tem;
4942 /* The only remaining case where we can avoid a reload is if this is a
4943 hard register that is valid as a base register and which is not the
4944 subject of a CLOBBER in this insn. */
4946 else if (regno < FIRST_PSEUDO_REGISTER
4947 && regno_ok_for_base_p (regno, mode, MEM, SCRATCH)
4948 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4949 return 0;
4951 /* If we do not have one of the cases above, we must do the reload. */
4952 push_reload (ad, NULL_RTX, loc, (rtx*) 0, base_reg_class (mode, MEM, SCRATCH),
4953 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4954 return 1;
4957 if (strict_memory_address_addr_space_p (mode, ad, as))
4959 /* The address appears valid, so reloads are not needed.
4960 But the address may contain an eliminable register.
4961 This can happen because a machine with indirect addressing
4962 may consider a pseudo register by itself a valid address even when
4963 it has failed to get a hard reg.
4964 So do a tree-walk to find and eliminate all such regs. */
4966 /* But first quickly dispose of a common case. */
4967 if (GET_CODE (ad) == PLUS
4968 && CONST_INT_P (XEXP (ad, 1))
4969 && REG_P (XEXP (ad, 0))
4970 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4971 return 0;
4973 subst_reg_equivs_changed = 0;
4974 *loc = subst_reg_equivs (ad, insn);
4976 if (! subst_reg_equivs_changed)
4977 return 0;
4979 /* Check result for validity after substitution. */
4980 if (strict_memory_address_addr_space_p (mode, ad, as))
4981 return 0;
4984 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4987 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
4989 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
4990 ind_levels, win);
4992 break;
4993 win:
4994 *memrefloc = copy_rtx (*memrefloc);
4995 XEXP (*memrefloc, 0) = ad;
4996 move_replacements (&ad, &XEXP (*memrefloc, 0));
4997 return -1;
4999 while (0);
5000 #endif
5002 /* The address is not valid. We have to figure out why. First see if
5003 we have an outer AND and remove it if so. Then analyze what's inside. */
5005 if (GET_CODE (ad) == AND)
5007 removed_and = 1;
5008 loc = &XEXP (ad, 0);
5009 ad = *loc;
5012 /* One possibility for why the address is invalid is that it is itself
5013 a MEM. This can happen when the frame pointer is being eliminated, a
5014 pseudo is not allocated to a hard register, and the offset between the
5015 frame and stack pointers is not its initial value. In that case the
5016 pseudo will have been replaced by a MEM referring to the
5017 stack pointer. */
5018 if (MEM_P (ad))
5020 /* First ensure that the address in this MEM is valid. Then, unless
5021 indirect addresses are valid, reload the MEM into a register. */
5022 tem = ad;
5023 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5024 opnum, ADDR_TYPE (type),
5025 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5027 /* If tem was changed, then we must create a new memory reference to
5028 hold it and store it back into memrefloc. */
5029 if (tem != ad && memrefloc)
5031 *memrefloc = copy_rtx (*memrefloc);
5032 copy_replacements (tem, XEXP (*memrefloc, 0));
5033 loc = &XEXP (*memrefloc, 0);
5034 if (removed_and)
5035 loc = &XEXP (*loc, 0);
5038 /* Check similar cases as for indirect addresses as above except
5039 that we can allow pseudos and a MEM since they should have been
5040 taken care of above. */
5042 if (ind_levels == 0
5043 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5044 || MEM_P (XEXP (tem, 0))
5045 || ! (REG_P (XEXP (tem, 0))
5046 || (GET_CODE (XEXP (tem, 0)) == PLUS
5047 && REG_P (XEXP (XEXP (tem, 0), 0))
5048 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5050 /* Must use TEM here, not AD, since it is the one that will
5051 have any subexpressions reloaded, if needed. */
5052 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5053 base_reg_class (mode, MEM, SCRATCH), GET_MODE (tem),
5054 VOIDmode, 0,
5055 0, opnum, type);
5056 return ! removed_and;
5058 else
5059 return 0;
5062 /* If we have address of a stack slot but it's not valid because the
5063 displacement is too large, compute the sum in a register.
5064 Handle all base registers here, not just fp/ap/sp, because on some
5065 targets (namely SH) we can also get too large displacements from
5066 big-endian corrections. */
5067 else if (GET_CODE (ad) == PLUS
5068 && REG_P (XEXP (ad, 0))
5069 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5070 && CONST_INT_P (XEXP (ad, 1))
5071 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, PLUS,
5072 CONST_INT)
5073 /* Similarly, if we were to reload the base register and the
5074 mem+offset address is still invalid, then we want to reload
5075 the whole address, not just the base register. */
5076 || ! maybe_memory_address_addr_space_p
5077 (mode, ad, as, &(XEXP (ad, 0)))))
5080 /* Unshare the MEM rtx so we can safely alter it. */
5081 if (memrefloc)
5083 *memrefloc = copy_rtx (*memrefloc);
5084 loc = &XEXP (*memrefloc, 0);
5085 if (removed_and)
5086 loc = &XEXP (*loc, 0);
5089 if (double_reg_address_ok
5090 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode,
5091 PLUS, CONST_INT))
5093 /* Unshare the sum as well. */
5094 *loc = ad = copy_rtx (ad);
5096 /* Reload the displacement into an index reg.
5097 We assume the frame pointer or arg pointer is a base reg. */
5098 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5099 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5100 type, ind_levels);
5101 return 0;
5103 else
5105 /* If the sum of two regs is not necessarily valid,
5106 reload the sum into a base reg.
5107 That will at least work. */
5108 find_reloads_address_part (ad, loc,
5109 base_reg_class (mode, MEM, SCRATCH),
5110 GET_MODE (ad), opnum, type, ind_levels);
5112 return ! removed_and;
5115 /* If we have an indexed stack slot, there are three possible reasons why
5116 it might be invalid: The index might need to be reloaded, the address
5117 might have been made by frame pointer elimination and hence have a
5118 constant out of range, or both reasons might apply.
5120 We can easily check for an index needing reload, but even if that is the
5121 case, we might also have an invalid constant. To avoid making the
5122 conservative assumption and requiring two reloads, we see if this address
5123 is valid when not interpreted strictly. If it is, the only problem is
5124 that the index needs a reload and find_reloads_address_1 will take care
5125 of it.
5127 Handle all base registers here, not just fp/ap/sp, because on some
5128 targets (namely SPARC) we can also get invalid addresses from preventive
5129 subreg big-endian corrections made by find_reloads_toplev. We
5130 can also get expressions involving LO_SUM (rather than PLUS) from
5131 find_reloads_subreg_address.
5133 If we decide to do something, it must be that `double_reg_address_ok'
5134 is true. We generate a reload of the base register + constant and
5135 rework the sum so that the reload register will be added to the index.
5136 This is safe because we know the address isn't shared.
5138 We check for the base register as both the first and second operand of
5139 the innermost PLUS and/or LO_SUM. */
5141 for (op_index = 0; op_index < 2; ++op_index)
5143 rtx operand, addend;
5144 enum rtx_code inner_code;
5146 if (GET_CODE (ad) != PLUS)
5147 continue;
5149 inner_code = GET_CODE (XEXP (ad, 0));
5150 if (!(GET_CODE (ad) == PLUS
5151 && CONST_INT_P (XEXP (ad, 1))
5152 && (inner_code == PLUS || inner_code == LO_SUM)))
5153 continue;
5155 operand = XEXP (XEXP (ad, 0), op_index);
5156 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5157 continue;
5159 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5161 if ((regno_ok_for_base_p (REGNO (operand), mode, inner_code,
5162 GET_CODE (addend))
5163 || operand == frame_pointer_rtx
5164 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
5165 || operand == hard_frame_pointer_rtx
5166 #endif
5167 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5168 || operand == arg_pointer_rtx
5169 #endif
5170 || operand == stack_pointer_rtx)
5171 && ! maybe_memory_address_addr_space_p
5172 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5174 rtx offset_reg;
5175 enum reg_class cls;
5177 offset_reg = plus_constant (operand, INTVAL (XEXP (ad, 1)));
5179 /* Form the adjusted address. */
5180 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5181 ad = gen_rtx_PLUS (GET_MODE (ad),
5182 op_index == 0 ? offset_reg : addend,
5183 op_index == 0 ? addend : offset_reg);
5184 else
5185 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5186 op_index == 0 ? offset_reg : addend,
5187 op_index == 0 ? addend : offset_reg);
5188 *loc = ad;
5190 cls = base_reg_class (mode, MEM, GET_CODE (addend));
5191 find_reloads_address_part (XEXP (ad, op_index),
5192 &XEXP (ad, op_index), cls,
5193 GET_MODE (ad), opnum, type, ind_levels);
5194 find_reloads_address_1 (mode,
5195 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5196 GET_CODE (XEXP (ad, op_index)),
5197 &XEXP (ad, 1 - op_index), opnum,
5198 type, 0, insn);
5200 return 0;
5204 /* See if address becomes valid when an eliminable register
5205 in a sum is replaced. */
5207 tem = ad;
5208 if (GET_CODE (ad) == PLUS)
5209 tem = subst_indexed_address (ad);
5210 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5212 /* Ok, we win that way. Replace any additional eliminable
5213 registers. */
5215 subst_reg_equivs_changed = 0;
5216 tem = subst_reg_equivs (tem, insn);
5218 /* Make sure that didn't make the address invalid again. */
5220 if (! subst_reg_equivs_changed
5221 || strict_memory_address_addr_space_p (mode, tem, as))
5223 *loc = tem;
5224 return 0;
5228 /* If constants aren't valid addresses, reload the constant address
5229 into a register. */
5230 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5232 enum machine_mode address_mode = GET_MODE (ad);
5233 if (address_mode == VOIDmode)
5234 address_mode = targetm.addr_space.address_mode (as);
5236 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5237 Unshare it so we can safely alter it. */
5238 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5239 && CONSTANT_POOL_ADDRESS_P (ad))
5241 *memrefloc = copy_rtx (*memrefloc);
5242 loc = &XEXP (*memrefloc, 0);
5243 if (removed_and)
5244 loc = &XEXP (*loc, 0);
5247 find_reloads_address_part (ad, loc, base_reg_class (mode, MEM, SCRATCH),
5248 address_mode, opnum, type, ind_levels);
5249 return ! removed_and;
5252 return find_reloads_address_1 (mode, ad, 0, MEM, SCRATCH, loc, opnum, type,
5253 ind_levels, insn);
5256 /* Find all pseudo regs appearing in AD
5257 that are eliminable in favor of equivalent values
5258 and do not have hard regs; replace them by their equivalents.
5259 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5260 front of it for pseudos that we have to replace with stack slots. */
5262 static rtx
5263 subst_reg_equivs (rtx ad, rtx insn)
5265 RTX_CODE code = GET_CODE (ad);
5266 int i;
5267 const char *fmt;
5269 switch (code)
5271 case HIGH:
5272 case CONST_INT:
5273 case CONST:
5274 case CONST_DOUBLE:
5275 case CONST_FIXED:
5276 case CONST_VECTOR:
5277 case SYMBOL_REF:
5278 case LABEL_REF:
5279 case PC:
5280 case CC0:
5281 return ad;
5283 case REG:
5285 int regno = REGNO (ad);
5287 if (reg_equiv_constant (regno) != 0)
5289 subst_reg_equivs_changed = 1;
5290 return reg_equiv_constant (regno);
5292 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5294 rtx mem = make_memloc (ad, regno);
5295 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5297 subst_reg_equivs_changed = 1;
5298 /* We mark the USE with QImode so that we recognize it
5299 as one that can be safely deleted at the end of
5300 reload. */
5301 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5302 QImode);
5303 return mem;
5307 return ad;
5309 case PLUS:
5310 /* Quickly dispose of a common case. */
5311 if (XEXP (ad, 0) == frame_pointer_rtx
5312 && CONST_INT_P (XEXP (ad, 1)))
5313 return ad;
5314 break;
5316 default:
5317 break;
5320 fmt = GET_RTX_FORMAT (code);
5321 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5322 if (fmt[i] == 'e')
5323 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5324 return ad;
5327 /* Compute the sum of X and Y, making canonicalizations assumed in an
5328 address, namely: sum constant integers, surround the sum of two
5329 constants with a CONST, put the constant as the second operand, and
5330 group the constant on the outermost sum.
5332 This routine assumes both inputs are already in canonical form. */
5335 form_sum (enum machine_mode mode, rtx x, rtx y)
5337 rtx tem;
5339 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5340 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5342 if (CONST_INT_P (x))
5343 return plus_constant (y, INTVAL (x));
5344 else if (CONST_INT_P (y))
5345 return plus_constant (x, INTVAL (y));
5346 else if (CONSTANT_P (x))
5347 tem = x, x = y, y = tem;
5349 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5350 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5352 /* Note that if the operands of Y are specified in the opposite
5353 order in the recursive calls below, infinite recursion will occur. */
5354 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5355 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5357 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5358 constant will have been placed second. */
5359 if (CONSTANT_P (x) && CONSTANT_P (y))
5361 if (GET_CODE (x) == CONST)
5362 x = XEXP (x, 0);
5363 if (GET_CODE (y) == CONST)
5364 y = XEXP (y, 0);
5366 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5369 return gen_rtx_PLUS (mode, x, y);
5372 /* If ADDR is a sum containing a pseudo register that should be
5373 replaced with a constant (from reg_equiv_constant),
5374 return the result of doing so, and also apply the associative
5375 law so that the result is more likely to be a valid address.
5376 (But it is not guaranteed to be one.)
5378 Note that at most one register is replaced, even if more are
5379 replaceable. Also, we try to put the result into a canonical form
5380 so it is more likely to be a valid address.
5382 In all other cases, return ADDR. */
5384 static rtx
5385 subst_indexed_address (rtx addr)
5387 rtx op0 = 0, op1 = 0, op2 = 0;
5388 rtx tem;
5389 int regno;
5391 if (GET_CODE (addr) == PLUS)
5393 /* Try to find a register to replace. */
5394 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5395 if (REG_P (op0)
5396 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5397 && reg_renumber[regno] < 0
5398 && reg_equiv_constant (regno) != 0)
5399 op0 = reg_equiv_constant (regno);
5400 else if (REG_P (op1)
5401 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5402 && reg_renumber[regno] < 0
5403 && reg_equiv_constant (regno) != 0)
5404 op1 = reg_equiv_constant (regno);
5405 else if (GET_CODE (op0) == PLUS
5406 && (tem = subst_indexed_address (op0)) != op0)
5407 op0 = tem;
5408 else if (GET_CODE (op1) == PLUS
5409 && (tem = subst_indexed_address (op1)) != op1)
5410 op1 = tem;
5411 else
5412 return addr;
5414 /* Pick out up to three things to add. */
5415 if (GET_CODE (op1) == PLUS)
5416 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5417 else if (GET_CODE (op0) == PLUS)
5418 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5420 /* Compute the sum. */
5421 if (op2 != 0)
5422 op1 = form_sum (GET_MODE (addr), op1, op2);
5423 if (op1 != 0)
5424 op0 = form_sum (GET_MODE (addr), op0, op1);
5426 return op0;
5428 return addr;
5431 /* Update the REG_INC notes for an insn. It updates all REG_INC
5432 notes for the instruction which refer to REGNO the to refer
5433 to the reload number.
5435 INSN is the insn for which any REG_INC notes need updating.
5437 REGNO is the register number which has been reloaded.
5439 RELOADNUM is the reload number. */
5441 static void
5442 update_auto_inc_notes (rtx insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5443 int reloadnum ATTRIBUTE_UNUSED)
5445 #ifdef AUTO_INC_DEC
5446 rtx link;
5448 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5449 if (REG_NOTE_KIND (link) == REG_INC
5450 && (int) REGNO (XEXP (link, 0)) == regno)
5451 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5452 #endif
5455 /* Record the pseudo registers we must reload into hard registers in a
5456 subexpression of a would-be memory address, X referring to a value
5457 in mode MODE. (This function is not called if the address we find
5458 is strictly valid.)
5460 CONTEXT = 1 means we are considering regs as index regs,
5461 = 0 means we are considering them as base regs.
5462 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5463 or an autoinc code.
5464 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5465 is the code of the index part of the address. Otherwise, pass SCRATCH
5466 for this argument.
5467 OPNUM and TYPE specify the purpose of any reloads made.
5469 IND_LEVELS says how many levels of indirect addressing are
5470 supported at this point in the address.
5472 INSN, if nonzero, is the insn in which we do the reload. It is used
5473 to determine if we may generate output reloads.
5475 We return nonzero if X, as a whole, is reloaded or replaced. */
5477 /* Note that we take shortcuts assuming that no multi-reg machine mode
5478 occurs as part of an address.
5479 Also, this is not fully machine-customizable; it works for machines
5480 such as VAXen and 68000's and 32000's, but other possible machines
5481 could have addressing modes that this does not handle right.
5482 If you add push_reload calls here, you need to make sure gen_reload
5483 handles those cases gracefully. */
5485 static int
5486 find_reloads_address_1 (enum machine_mode mode, rtx x, int context,
5487 enum rtx_code outer_code, enum rtx_code index_code,
5488 rtx *loc, int opnum, enum reload_type type,
5489 int ind_levels, rtx insn)
5491 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, OUTER, INDEX) \
5492 ((CONTEXT) == 0 \
5493 ? regno_ok_for_base_p (REGNO, MODE, OUTER, INDEX) \
5494 : REGNO_OK_FOR_INDEX_P (REGNO))
5496 enum reg_class context_reg_class;
5497 RTX_CODE code = GET_CODE (x);
5499 if (context == 1)
5500 context_reg_class = INDEX_REG_CLASS;
5501 else
5502 context_reg_class = base_reg_class (mode, outer_code, index_code);
5504 switch (code)
5506 case PLUS:
5508 rtx orig_op0 = XEXP (x, 0);
5509 rtx orig_op1 = XEXP (x, 1);
5510 RTX_CODE code0 = GET_CODE (orig_op0);
5511 RTX_CODE code1 = GET_CODE (orig_op1);
5512 rtx op0 = orig_op0;
5513 rtx op1 = orig_op1;
5515 if (GET_CODE (op0) == SUBREG)
5517 op0 = SUBREG_REG (op0);
5518 code0 = GET_CODE (op0);
5519 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5520 op0 = gen_rtx_REG (word_mode,
5521 (REGNO (op0) +
5522 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5523 GET_MODE (SUBREG_REG (orig_op0)),
5524 SUBREG_BYTE (orig_op0),
5525 GET_MODE (orig_op0))));
5528 if (GET_CODE (op1) == SUBREG)
5530 op1 = SUBREG_REG (op1);
5531 code1 = GET_CODE (op1);
5532 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5533 /* ??? Why is this given op1's mode and above for
5534 ??? op0 SUBREGs we use word_mode? */
5535 op1 = gen_rtx_REG (GET_MODE (op1),
5536 (REGNO (op1) +
5537 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5538 GET_MODE (SUBREG_REG (orig_op1)),
5539 SUBREG_BYTE (orig_op1),
5540 GET_MODE (orig_op1))));
5542 /* Plus in the index register may be created only as a result of
5543 register rematerialization for expression like &localvar*4. Reload it.
5544 It may be possible to combine the displacement on the outer level,
5545 but it is probably not worthwhile to do so. */
5546 if (context == 1)
5548 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5549 opnum, ADDR_TYPE (type), ind_levels, insn);
5550 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5551 context_reg_class,
5552 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5553 return 1;
5556 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5557 || code0 == ZERO_EXTEND || code1 == MEM)
5559 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5560 &XEXP (x, 0), opnum, type, ind_levels,
5561 insn);
5562 find_reloads_address_1 (mode, orig_op1, 0, PLUS, code0,
5563 &XEXP (x, 1), opnum, type, ind_levels,
5564 insn);
5567 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5568 || code1 == ZERO_EXTEND || code0 == MEM)
5570 find_reloads_address_1 (mode, orig_op0, 0, PLUS, code1,
5571 &XEXP (x, 0), opnum, type, ind_levels,
5572 insn);
5573 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5574 &XEXP (x, 1), opnum, type, ind_levels,
5575 insn);
5578 else if (code0 == CONST_INT || code0 == CONST
5579 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5580 find_reloads_address_1 (mode, orig_op1, 0, PLUS, code0,
5581 &XEXP (x, 1), opnum, type, ind_levels,
5582 insn);
5584 else if (code1 == CONST_INT || code1 == CONST
5585 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5586 find_reloads_address_1 (mode, orig_op0, 0, PLUS, code1,
5587 &XEXP (x, 0), opnum, type, ind_levels,
5588 insn);
5590 else if (code0 == REG && code1 == REG)
5592 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5593 && regno_ok_for_base_p (REGNO (op0), mode, PLUS, REG))
5594 return 0;
5595 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5596 && regno_ok_for_base_p (REGNO (op1), mode, PLUS, REG))
5597 return 0;
5598 else if (regno_ok_for_base_p (REGNO (op0), mode, PLUS, REG))
5599 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5600 &XEXP (x, 1), opnum, type, ind_levels,
5601 insn);
5602 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5603 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5604 &XEXP (x, 0), opnum, type, ind_levels,
5605 insn);
5606 else if (regno_ok_for_base_p (REGNO (op1), mode, PLUS, REG))
5607 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5608 &XEXP (x, 0), opnum, type, ind_levels,
5609 insn);
5610 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5611 find_reloads_address_1 (mode, orig_op1, 0, PLUS, REG,
5612 &XEXP (x, 1), opnum, type, ind_levels,
5613 insn);
5614 else
5616 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5617 &XEXP (x, 0), opnum, type, ind_levels,
5618 insn);
5619 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5620 &XEXP (x, 1), opnum, type, ind_levels,
5621 insn);
5625 else if (code0 == REG)
5627 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5628 &XEXP (x, 0), opnum, type, ind_levels,
5629 insn);
5630 find_reloads_address_1 (mode, orig_op1, 0, PLUS, REG,
5631 &XEXP (x, 1), opnum, type, ind_levels,
5632 insn);
5635 else if (code1 == REG)
5637 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5638 &XEXP (x, 1), opnum, type, ind_levels,
5639 insn);
5640 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5641 &XEXP (x, 0), opnum, type, ind_levels,
5642 insn);
5646 return 0;
5648 case POST_MODIFY:
5649 case PRE_MODIFY:
5651 rtx op0 = XEXP (x, 0);
5652 rtx op1 = XEXP (x, 1);
5653 enum rtx_code index_code;
5654 int regno;
5655 int reloadnum;
5657 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5658 return 0;
5660 /* Currently, we only support {PRE,POST}_MODIFY constructs
5661 where a base register is {inc,dec}remented by the contents
5662 of another register or by a constant value. Thus, these
5663 operands must match. */
5664 gcc_assert (op0 == XEXP (op1, 0));
5666 /* Require index register (or constant). Let's just handle the
5667 register case in the meantime... If the target allows
5668 auto-modify by a constant then we could try replacing a pseudo
5669 register with its equivalent constant where applicable.
5671 We also handle the case where the register was eliminated
5672 resulting in a PLUS subexpression.
5674 If we later decide to reload the whole PRE_MODIFY or
5675 POST_MODIFY, inc_for_reload might clobber the reload register
5676 before reading the index. The index register might therefore
5677 need to live longer than a TYPE reload normally would, so be
5678 conservative and class it as RELOAD_OTHER. */
5679 if ((REG_P (XEXP (op1, 1))
5680 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5681 || GET_CODE (XEXP (op1, 1)) == PLUS)
5682 find_reloads_address_1 (mode, XEXP (op1, 1), 1, code, SCRATCH,
5683 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5684 ind_levels, insn);
5686 gcc_assert (REG_P (XEXP (op1, 0)));
5688 regno = REGNO (XEXP (op1, 0));
5689 index_code = GET_CODE (XEXP (op1, 1));
5691 /* A register that is incremented cannot be constant! */
5692 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5693 || reg_equiv_constant (regno) == 0);
5695 /* Handle a register that is equivalent to a memory location
5696 which cannot be addressed directly. */
5697 if (reg_equiv_memory_loc (regno) != 0
5698 && (reg_equiv_address (regno) != 0
5699 || num_not_at_initial_offset))
5701 rtx tem = make_memloc (XEXP (x, 0), regno);
5703 if (reg_equiv_address (regno)
5704 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5706 rtx orig = tem;
5708 /* First reload the memory location's address.
5709 We can't use ADDR_TYPE (type) here, because we need to
5710 write back the value after reading it, hence we actually
5711 need two registers. */
5712 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5713 &XEXP (tem, 0), opnum,
5714 RELOAD_OTHER,
5715 ind_levels, insn);
5717 if (!rtx_equal_p (tem, orig))
5718 push_reg_equiv_alt_mem (regno, tem);
5720 /* Then reload the memory location into a base
5721 register. */
5722 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5723 &XEXP (op1, 0),
5724 base_reg_class (mode, code,
5725 index_code),
5726 GET_MODE (x), GET_MODE (x), 0,
5727 0, opnum, RELOAD_OTHER);
5729 update_auto_inc_notes (this_insn, regno, reloadnum);
5730 return 0;
5734 if (reg_renumber[regno] >= 0)
5735 regno = reg_renumber[regno];
5737 /* We require a base register here... */
5738 if (!regno_ok_for_base_p (regno, GET_MODE (x), code, index_code))
5740 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5741 &XEXP (op1, 0), &XEXP (x, 0),
5742 base_reg_class (mode, code, index_code),
5743 GET_MODE (x), GET_MODE (x), 0, 0,
5744 opnum, RELOAD_OTHER);
5746 update_auto_inc_notes (this_insn, regno, reloadnum);
5747 return 0;
5750 return 0;
5752 case POST_INC:
5753 case POST_DEC:
5754 case PRE_INC:
5755 case PRE_DEC:
5756 if (REG_P (XEXP (x, 0)))
5758 int regno = REGNO (XEXP (x, 0));
5759 int value = 0;
5760 rtx x_orig = x;
5762 /* A register that is incremented cannot be constant! */
5763 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5764 || reg_equiv_constant (regno) == 0);
5766 /* Handle a register that is equivalent to a memory location
5767 which cannot be addressed directly. */
5768 if (reg_equiv_memory_loc (regno) != 0
5769 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5771 rtx tem = make_memloc (XEXP (x, 0), regno);
5772 if (reg_equiv_address (regno)
5773 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5775 rtx orig = tem;
5777 /* First reload the memory location's address.
5778 We can't use ADDR_TYPE (type) here, because we need to
5779 write back the value after reading it, hence we actually
5780 need two registers. */
5781 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5782 &XEXP (tem, 0), opnum, type,
5783 ind_levels, insn);
5784 if (!rtx_equal_p (tem, orig))
5785 push_reg_equiv_alt_mem (regno, tem);
5786 /* Put this inside a new increment-expression. */
5787 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5788 /* Proceed to reload that, as if it contained a register. */
5792 /* If we have a hard register that is ok in this incdec context,
5793 don't make a reload. If the register isn't nice enough for
5794 autoincdec, we can reload it. But, if an autoincrement of a
5795 register that we here verified as playing nice, still outside
5796 isn't "valid", it must be that no autoincrement is "valid".
5797 If that is true and something made an autoincrement anyway,
5798 this must be a special context where one is allowed.
5799 (For example, a "push" instruction.)
5800 We can't improve this address, so leave it alone. */
5802 /* Otherwise, reload the autoincrement into a suitable hard reg
5803 and record how much to increment by. */
5805 if (reg_renumber[regno] >= 0)
5806 regno = reg_renumber[regno];
5807 if (regno >= FIRST_PSEUDO_REGISTER
5808 || !REG_OK_FOR_CONTEXT (context, regno, mode, code,
5809 index_code))
5811 int reloadnum;
5813 /* If we can output the register afterwards, do so, this
5814 saves the extra update.
5815 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5816 CALL_INSN - and it does not set CC0.
5817 But don't do this if we cannot directly address the
5818 memory location, since this will make it harder to
5819 reuse address reloads, and increases register pressure.
5820 Also don't do this if we can probably update x directly. */
5821 rtx equiv = (MEM_P (XEXP (x, 0))
5822 ? XEXP (x, 0)
5823 : reg_equiv_mem (regno));
5824 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5825 if (insn && NONJUMP_INSN_P (insn) && equiv
5826 && memory_operand (equiv, GET_MODE (equiv))
5827 #ifdef HAVE_cc0
5828 && ! sets_cc0_p (PATTERN (insn))
5829 #endif
5830 && ! (icode != CODE_FOR_nothing
5831 && insn_operand_matches (icode, 0, equiv)
5832 && insn_operand_matches (icode, 1, equiv)))
5834 /* We use the original pseudo for loc, so that
5835 emit_reload_insns() knows which pseudo this
5836 reload refers to and updates the pseudo rtx, not
5837 its equivalent memory location, as well as the
5838 corresponding entry in reg_last_reload_reg. */
5839 loc = &XEXP (x_orig, 0);
5840 x = XEXP (x, 0);
5841 reloadnum
5842 = push_reload (x, x, loc, loc,
5843 context_reg_class,
5844 GET_MODE (x), GET_MODE (x), 0, 0,
5845 opnum, RELOAD_OTHER);
5847 else
5849 reloadnum
5850 = push_reload (x, x, loc, (rtx*) 0,
5851 context_reg_class,
5852 GET_MODE (x), GET_MODE (x), 0, 0,
5853 opnum, type);
5854 rld[reloadnum].inc
5855 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5857 value = 1;
5860 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5861 reloadnum);
5863 return value;
5865 return 0;
5867 case TRUNCATE:
5868 case SIGN_EXTEND:
5869 case ZERO_EXTEND:
5870 /* Look for parts to reload in the inner expression and reload them
5871 too, in addition to this operation. Reloading all inner parts in
5872 addition to this one shouldn't be necessary, but at this point,
5873 we don't know if we can possibly omit any part that *can* be
5874 reloaded. Targets that are better off reloading just either part
5875 (or perhaps even a different part of an outer expression), should
5876 define LEGITIMIZE_RELOAD_ADDRESS. */
5877 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), XEXP (x, 0),
5878 context, code, SCRATCH, &XEXP (x, 0), opnum,
5879 type, ind_levels, insn);
5880 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5881 context_reg_class,
5882 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5883 return 1;
5885 case MEM:
5886 /* This is probably the result of a substitution, by eliminate_regs, of
5887 an equivalent address for a pseudo that was not allocated to a hard
5888 register. Verify that the specified address is valid and reload it
5889 into a register.
5891 Since we know we are going to reload this item, don't decrement for
5892 the indirection level.
5894 Note that this is actually conservative: it would be slightly more
5895 efficient to use the value of SPILL_INDIRECT_LEVELS from
5896 reload1.c here. */
5898 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5899 opnum, ADDR_TYPE (type), ind_levels, insn);
5900 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5901 context_reg_class,
5902 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5903 return 1;
5905 case REG:
5907 int regno = REGNO (x);
5909 if (reg_equiv_constant (regno) != 0)
5911 find_reloads_address_part (reg_equiv_constant (regno), loc,
5912 context_reg_class,
5913 GET_MODE (x), opnum, type, ind_levels);
5914 return 1;
5917 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5918 that feeds this insn. */
5919 if (reg_equiv_mem (regno) != 0)
5921 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5922 context_reg_class,
5923 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5924 return 1;
5926 #endif
5928 if (reg_equiv_memory_loc (regno)
5929 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5931 rtx tem = make_memloc (x, regno);
5932 if (reg_equiv_address (regno) != 0
5933 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5935 x = tem;
5936 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5937 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5938 ind_levels, insn);
5939 if (!rtx_equal_p (x, tem))
5940 push_reg_equiv_alt_mem (regno, x);
5944 if (reg_renumber[regno] >= 0)
5945 regno = reg_renumber[regno];
5947 if (regno >= FIRST_PSEUDO_REGISTER
5948 || !REG_OK_FOR_CONTEXT (context, regno, mode, outer_code,
5949 index_code))
5951 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5952 context_reg_class,
5953 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5954 return 1;
5957 /* If a register appearing in an address is the subject of a CLOBBER
5958 in this insn, reload it into some other register to be safe.
5959 The CLOBBER is supposed to make the register unavailable
5960 from before this insn to after it. */
5961 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5963 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5964 context_reg_class,
5965 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5966 return 1;
5969 return 0;
5971 case SUBREG:
5972 if (REG_P (SUBREG_REG (x)))
5974 /* If this is a SUBREG of a hard register and the resulting register
5975 is of the wrong class, reload the whole SUBREG. This avoids
5976 needless copies if SUBREG_REG is multi-word. */
5977 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5979 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5981 if (!REG_OK_FOR_CONTEXT (context, regno, mode, outer_code,
5982 index_code))
5984 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5985 context_reg_class,
5986 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5987 return 1;
5990 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
5991 is larger than the class size, then reload the whole SUBREG. */
5992 else
5994 enum reg_class rclass = context_reg_class;
5995 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
5996 > reg_class_size[(int) rclass])
5998 x = find_reloads_subreg_address (x, 0, opnum,
5999 ADDR_TYPE (type),
6000 ind_levels, insn, NULL);
6001 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6002 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6003 return 1;
6007 break;
6009 default:
6010 break;
6014 const char *fmt = GET_RTX_FORMAT (code);
6015 int i;
6017 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6019 if (fmt[i] == 'e')
6020 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6021 we get here. */
6022 find_reloads_address_1 (mode, XEXP (x, i), context, code, SCRATCH,
6023 &XEXP (x, i), opnum, type, ind_levels, insn);
6027 #undef REG_OK_FOR_CONTEXT
6028 return 0;
6031 /* X, which is found at *LOC, is a part of an address that needs to be
6032 reloaded into a register of class RCLASS. If X is a constant, or if
6033 X is a PLUS that contains a constant, check that the constant is a
6034 legitimate operand and that we are supposed to be able to load
6035 it into the register.
6037 If not, force the constant into memory and reload the MEM instead.
6039 MODE is the mode to use, in case X is an integer constant.
6041 OPNUM and TYPE describe the purpose of any reloads made.
6043 IND_LEVELS says how many levels of indirect addressing this machine
6044 supports. */
6046 static void
6047 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6048 enum machine_mode mode, int opnum,
6049 enum reload_type type, int ind_levels)
6051 if (CONSTANT_P (x)
6052 && (!targetm.legitimate_constant_p (mode, x)
6053 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6055 x = force_const_mem (mode, x);
6056 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6057 opnum, type, ind_levels, 0);
6060 else if (GET_CODE (x) == PLUS
6061 && CONSTANT_P (XEXP (x, 1))
6062 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6063 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6064 == NO_REGS))
6066 rtx tem;
6068 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6069 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6070 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6071 opnum, type, ind_levels, 0);
6074 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6075 mode, VOIDmode, 0, 0, opnum, type);
6078 /* X, a subreg of a pseudo, is a part of an address that needs to be
6079 reloaded.
6081 If the pseudo is equivalent to a memory location that cannot be directly
6082 addressed, make the necessary address reloads.
6084 If address reloads have been necessary, or if the address is changed
6085 by register elimination, return the rtx of the memory location;
6086 otherwise, return X.
6088 If FORCE_REPLACE is nonzero, unconditionally replace the subreg with the
6089 memory location.
6091 OPNUM and TYPE identify the purpose of the reload.
6093 IND_LEVELS says how many levels of indirect addressing are
6094 supported at this point in the address.
6096 INSN, if nonzero, is the insn in which we do the reload. It is used
6097 to determine where to put USEs for pseudos that we have to replace with
6098 stack slots. */
6100 static rtx
6101 find_reloads_subreg_address (rtx x, int force_replace, int opnum,
6102 enum reload_type type, int ind_levels, rtx insn,
6103 int *address_reloaded)
6105 int regno = REGNO (SUBREG_REG (x));
6106 int reloaded = 0;
6108 if (reg_equiv_memory_loc (regno))
6110 /* If the address is not directly addressable, or if the address is not
6111 offsettable, then it must be replaced. */
6112 if (! force_replace
6113 && (reg_equiv_address (regno)
6114 || ! offsettable_memref_p (reg_equiv_mem (regno))))
6115 force_replace = 1;
6117 if (force_replace || num_not_at_initial_offset)
6119 rtx tem = make_memloc (SUBREG_REG (x), regno);
6121 /* If the address changes because of register elimination, then
6122 it must be replaced. */
6123 if (force_replace
6124 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
6126 unsigned outer_size = GET_MODE_SIZE (GET_MODE (x));
6127 unsigned inner_size = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
6128 int offset;
6129 rtx orig = tem;
6131 /* For big-endian paradoxical subregs, SUBREG_BYTE does not
6132 hold the correct (negative) byte offset. */
6133 if (BYTES_BIG_ENDIAN && outer_size > inner_size)
6134 offset = inner_size - outer_size;
6135 else
6136 offset = SUBREG_BYTE (x);
6138 XEXP (tem, 0) = plus_constant (XEXP (tem, 0), offset);
6139 PUT_MODE (tem, GET_MODE (x));
6140 if (MEM_OFFSET_KNOWN_P (tem))
6141 set_mem_offset (tem, MEM_OFFSET (tem) + offset);
6142 if (MEM_SIZE_KNOWN_P (tem)
6143 && MEM_SIZE (tem) != (HOST_WIDE_INT) outer_size)
6144 set_mem_size (tem, outer_size);
6146 /* If this was a paradoxical subreg that we replaced, the
6147 resulting memory must be sufficiently aligned to allow
6148 us to widen the mode of the memory. */
6149 if (outer_size > inner_size)
6151 rtx base;
6153 base = XEXP (tem, 0);
6154 if (GET_CODE (base) == PLUS)
6156 if (CONST_INT_P (XEXP (base, 1))
6157 && INTVAL (XEXP (base, 1)) % outer_size != 0)
6158 return x;
6159 base = XEXP (base, 0);
6161 if (!REG_P (base)
6162 || (REGNO_POINTER_ALIGN (REGNO (base))
6163 < outer_size * BITS_PER_UNIT))
6164 return x;
6167 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6168 XEXP (tem, 0), &XEXP (tem, 0),
6169 opnum, type, ind_levels, insn);
6170 /* ??? Do we need to handle nonzero offsets somehow? */
6171 if (!offset && !rtx_equal_p (tem, orig))
6172 push_reg_equiv_alt_mem (regno, tem);
6174 /* For some processors an address may be valid in the
6175 original mode but not in a smaller mode. For
6176 example, ARM accepts a scaled index register in
6177 SImode but not in HImode. Note that this is only
6178 a problem if the address in reg_equiv_mem is already
6179 invalid in the new mode; other cases would be fixed
6180 by find_reloads_address as usual.
6182 ??? We attempt to handle such cases here by doing an
6183 additional reload of the full address after the
6184 usual processing by find_reloads_address. Note that
6185 this may not work in the general case, but it seems
6186 to cover the cases where this situation currently
6187 occurs. A more general fix might be to reload the
6188 *value* instead of the address, but this would not
6189 be expected by the callers of this routine as-is.
6191 If find_reloads_address already completed replaced
6192 the address, there is nothing further to do. */
6193 if (reloaded == 0
6194 && reg_equiv_mem (regno) != 0
6195 && !strict_memory_address_addr_space_p
6196 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6197 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6199 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6200 base_reg_class (GET_MODE (tem), MEM, SCRATCH),
6201 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0,
6202 opnum, type);
6203 reloaded = 1;
6205 /* If this is not a toplevel operand, find_reloads doesn't see
6206 this substitution. We have to emit a USE of the pseudo so
6207 that delete_output_reload can see it. */
6208 if (replace_reloads && recog_data.operand[opnum] != x)
6209 /* We mark the USE with QImode so that we recognize it
6210 as one that can be safely deleted at the end of
6211 reload. */
6212 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode,
6213 SUBREG_REG (x)),
6214 insn), QImode);
6215 x = tem;
6219 if (reloaded && address_reloaded)
6220 *address_reloaded = 1;
6222 return x;
6225 /* Substitute into the current INSN the registers into which we have reloaded
6226 the things that need reloading. The array `replacements'
6227 contains the locations of all pointers that must be changed
6228 and says what to replace them with.
6230 Return the rtx that X translates into; usually X, but modified. */
6232 void
6233 subst_reloads (rtx insn)
6235 int i;
6237 for (i = 0; i < n_replacements; i++)
6239 struct replacement *r = &replacements[i];
6240 rtx reloadreg = rld[r->what].reg_rtx;
6241 if (reloadreg)
6243 #ifdef DEBUG_RELOAD
6244 /* This checking takes a very long time on some platforms
6245 causing the gcc.c-torture/compile/limits-fnargs.c test
6246 to time out during testing. See PR 31850.
6248 Internal consistency test. Check that we don't modify
6249 anything in the equivalence arrays. Whenever something from
6250 those arrays needs to be reloaded, it must be unshared before
6251 being substituted into; the equivalence must not be modified.
6252 Otherwise, if the equivalence is used after that, it will
6253 have been modified, and the thing substituted (probably a
6254 register) is likely overwritten and not a usable equivalence. */
6255 int check_regno;
6257 for (check_regno = 0; check_regno < max_regno; check_regno++)
6259 #define CHECK_MODF(ARRAY) \
6260 gcc_assert (!VEC_index (reg_equivs_t, reg_equivs, check_regno).ARRAY \
6261 || !loc_mentioned_in_p (r->where, \
6262 VEC_index (reg_equivs_t, reg_equivs, check_regno).ARRAY))
6264 CHECK_MODF (equiv_constant);
6265 CHECK_MODF (equiv_memory_loc);
6266 CHECK_MODF (equiv_address);
6267 CHECK_MODF (equiv_mem);
6268 #undef CHECK_MODF
6270 #endif /* DEBUG_RELOAD */
6272 /* If we're replacing a LABEL_REF with a register, there must
6273 already be an indication (to e.g. flow) which label this
6274 register refers to. */
6275 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6276 || !JUMP_P (insn)
6277 || find_reg_note (insn,
6278 REG_LABEL_OPERAND,
6279 XEXP (*r->where, 0))
6280 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6282 /* Encapsulate RELOADREG so its machine mode matches what
6283 used to be there. Note that gen_lowpart_common will
6284 do the wrong thing if RELOADREG is multi-word. RELOADREG
6285 will always be a REG here. */
6286 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6287 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6289 *r->where = reloadreg;
6291 /* If reload got no reg and isn't optional, something's wrong. */
6292 else
6293 gcc_assert (rld[r->what].optional);
6297 /* Make a copy of any replacements being done into X and move those
6298 copies to locations in Y, a copy of X. */
6300 void
6301 copy_replacements (rtx x, rtx y)
6303 copy_replacements_1 (&x, &y, n_replacements);
6306 static void
6307 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6309 int i, j;
6310 rtx x, y;
6311 struct replacement *r;
6312 enum rtx_code code;
6313 const char *fmt;
6315 for (j = 0; j < orig_replacements; j++)
6316 if (replacements[j].where == px)
6318 r = &replacements[n_replacements++];
6319 r->where = py;
6320 r->what = replacements[j].what;
6321 r->mode = replacements[j].mode;
6324 x = *px;
6325 y = *py;
6326 code = GET_CODE (x);
6327 fmt = GET_RTX_FORMAT (code);
6329 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6331 if (fmt[i] == 'e')
6332 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6333 else if (fmt[i] == 'E')
6334 for (j = XVECLEN (x, i); --j >= 0; )
6335 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6336 orig_replacements);
6340 /* Change any replacements being done to *X to be done to *Y. */
6342 void
6343 move_replacements (rtx *x, rtx *y)
6345 int i;
6347 for (i = 0; i < n_replacements; i++)
6348 if (replacements[i].where == x)
6349 replacements[i].where = y;
6352 /* If LOC was scheduled to be replaced by something, return the replacement.
6353 Otherwise, return *LOC. */
6356 find_replacement (rtx *loc)
6358 struct replacement *r;
6360 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6362 rtx reloadreg = rld[r->what].reg_rtx;
6364 if (reloadreg && r->where == loc)
6366 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6367 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6369 return reloadreg;
6371 else if (reloadreg && GET_CODE (*loc) == SUBREG
6372 && r->where == &SUBREG_REG (*loc))
6374 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6375 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6377 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6378 GET_MODE (SUBREG_REG (*loc)),
6379 SUBREG_BYTE (*loc));
6383 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6384 what's inside and make a new rtl if so. */
6385 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6386 || GET_CODE (*loc) == MULT)
6388 rtx x = find_replacement (&XEXP (*loc, 0));
6389 rtx y = find_replacement (&XEXP (*loc, 1));
6391 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6392 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6395 return *loc;
6398 /* Return nonzero if register in range [REGNO, ENDREGNO)
6399 appears either explicitly or implicitly in X
6400 other than being stored into (except for earlyclobber operands).
6402 References contained within the substructure at LOC do not count.
6403 LOC may be zero, meaning don't ignore anything.
6405 This is similar to refers_to_regno_p in rtlanal.c except that we
6406 look at equivalences for pseudos that didn't get hard registers. */
6408 static int
6409 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6410 rtx x, rtx *loc)
6412 int i;
6413 unsigned int r;
6414 RTX_CODE code;
6415 const char *fmt;
6417 if (x == 0)
6418 return 0;
6420 repeat:
6421 code = GET_CODE (x);
6423 switch (code)
6425 case REG:
6426 r = REGNO (x);
6428 /* If this is a pseudo, a hard register must not have been allocated.
6429 X must therefore either be a constant or be in memory. */
6430 if (r >= FIRST_PSEUDO_REGISTER)
6432 if (reg_equiv_memory_loc (r))
6433 return refers_to_regno_for_reload_p (regno, endregno,
6434 reg_equiv_memory_loc (r),
6435 (rtx*) 0);
6437 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6438 return 0;
6441 return (endregno > r
6442 && regno < r + (r < FIRST_PSEUDO_REGISTER
6443 ? hard_regno_nregs[r][GET_MODE (x)]
6444 : 1));
6446 case SUBREG:
6447 /* If this is a SUBREG of a hard reg, we can see exactly which
6448 registers are being modified. Otherwise, handle normally. */
6449 if (REG_P (SUBREG_REG (x))
6450 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6452 unsigned int inner_regno = subreg_regno (x);
6453 unsigned int inner_endregno
6454 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6455 ? subreg_nregs (x) : 1);
6457 return endregno > inner_regno && regno < inner_endregno;
6459 break;
6461 case CLOBBER:
6462 case SET:
6463 if (&SET_DEST (x) != loc
6464 /* Note setting a SUBREG counts as referring to the REG it is in for
6465 a pseudo but not for hard registers since we can
6466 treat each word individually. */
6467 && ((GET_CODE (SET_DEST (x)) == SUBREG
6468 && loc != &SUBREG_REG (SET_DEST (x))
6469 && REG_P (SUBREG_REG (SET_DEST (x)))
6470 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6471 && refers_to_regno_for_reload_p (regno, endregno,
6472 SUBREG_REG (SET_DEST (x)),
6473 loc))
6474 /* If the output is an earlyclobber operand, this is
6475 a conflict. */
6476 || ((!REG_P (SET_DEST (x))
6477 || earlyclobber_operand_p (SET_DEST (x)))
6478 && refers_to_regno_for_reload_p (regno, endregno,
6479 SET_DEST (x), loc))))
6480 return 1;
6482 if (code == CLOBBER || loc == &SET_SRC (x))
6483 return 0;
6484 x = SET_SRC (x);
6485 goto repeat;
6487 default:
6488 break;
6491 /* X does not match, so try its subexpressions. */
6493 fmt = GET_RTX_FORMAT (code);
6494 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6496 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6498 if (i == 0)
6500 x = XEXP (x, 0);
6501 goto repeat;
6503 else
6504 if (refers_to_regno_for_reload_p (regno, endregno,
6505 XEXP (x, i), loc))
6506 return 1;
6508 else if (fmt[i] == 'E')
6510 int j;
6511 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6512 if (loc != &XVECEXP (x, i, j)
6513 && refers_to_regno_for_reload_p (regno, endregno,
6514 XVECEXP (x, i, j), loc))
6515 return 1;
6518 return 0;
6521 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6522 we check if any register number in X conflicts with the relevant register
6523 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6524 contains a MEM (we don't bother checking for memory addresses that can't
6525 conflict because we expect this to be a rare case.
6527 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6528 that we look at equivalences for pseudos that didn't get hard registers. */
6531 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6533 int regno, endregno;
6535 /* Overly conservative. */
6536 if (GET_CODE (x) == STRICT_LOW_PART
6537 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6538 x = XEXP (x, 0);
6540 /* If either argument is a constant, then modifying X can not affect IN. */
6541 if (CONSTANT_P (x) || CONSTANT_P (in))
6542 return 0;
6543 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6544 return refers_to_mem_for_reload_p (in);
6545 else if (GET_CODE (x) == SUBREG)
6547 regno = REGNO (SUBREG_REG (x));
6548 if (regno < FIRST_PSEUDO_REGISTER)
6549 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6550 GET_MODE (SUBREG_REG (x)),
6551 SUBREG_BYTE (x),
6552 GET_MODE (x));
6553 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6554 ? subreg_nregs (x) : 1);
6556 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6558 else if (REG_P (x))
6560 regno = REGNO (x);
6562 /* If this is a pseudo, it must not have been assigned a hard register.
6563 Therefore, it must either be in memory or be a constant. */
6565 if (regno >= FIRST_PSEUDO_REGISTER)
6567 if (reg_equiv_memory_loc (regno))
6568 return refers_to_mem_for_reload_p (in);
6569 gcc_assert (reg_equiv_constant (regno));
6570 return 0;
6573 endregno = END_HARD_REGNO (x);
6575 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6577 else if (MEM_P (x))
6578 return refers_to_mem_for_reload_p (in);
6579 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6580 || GET_CODE (x) == CC0)
6581 return reg_mentioned_p (x, in);
6582 else
6584 gcc_assert (GET_CODE (x) == PLUS);
6586 /* We actually want to know if X is mentioned somewhere inside IN.
6587 We must not say that (plus (sp) (const_int 124)) is in
6588 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6589 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6590 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6591 while (MEM_P (in))
6592 in = XEXP (in, 0);
6593 if (REG_P (in))
6594 return 0;
6595 else if (GET_CODE (in) == PLUS)
6596 return (rtx_equal_p (x, in)
6597 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6598 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6599 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6600 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6603 gcc_unreachable ();
6606 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6607 registers. */
6609 static int
6610 refers_to_mem_for_reload_p (rtx x)
6612 const char *fmt;
6613 int i;
6615 if (MEM_P (x))
6616 return 1;
6618 if (REG_P (x))
6619 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6620 && reg_equiv_memory_loc (REGNO (x)));
6622 fmt = GET_RTX_FORMAT (GET_CODE (x));
6623 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6624 if (fmt[i] == 'e'
6625 && (MEM_P (XEXP (x, i))
6626 || refers_to_mem_for_reload_p (XEXP (x, i))))
6627 return 1;
6629 return 0;
6632 /* Check the insns before INSN to see if there is a suitable register
6633 containing the same value as GOAL.
6634 If OTHER is -1, look for a register in class RCLASS.
6635 Otherwise, just see if register number OTHER shares GOAL's value.
6637 Return an rtx for the register found, or zero if none is found.
6639 If RELOAD_REG_P is (short *)1,
6640 we reject any hard reg that appears in reload_reg_rtx
6641 because such a hard reg is also needed coming into this insn.
6643 If RELOAD_REG_P is any other nonzero value,
6644 it is a vector indexed by hard reg number
6645 and we reject any hard reg whose element in the vector is nonnegative
6646 as well as any that appears in reload_reg_rtx.
6648 If GOAL is zero, then GOALREG is a register number; we look
6649 for an equivalent for that register.
6651 MODE is the machine mode of the value we want an equivalence for.
6652 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6654 This function is used by jump.c as well as in the reload pass.
6656 If GOAL is the sum of the stack pointer and a constant, we treat it
6657 as if it were a constant except that sp is required to be unchanging. */
6660 find_equiv_reg (rtx goal, rtx insn, enum reg_class rclass, int other,
6661 short *reload_reg_p, int goalreg, enum machine_mode mode)
6663 rtx p = insn;
6664 rtx goaltry, valtry, value, where;
6665 rtx pat;
6666 int regno = -1;
6667 int valueno;
6668 int goal_mem = 0;
6669 int goal_const = 0;
6670 int goal_mem_addr_varies = 0;
6671 int need_stable_sp = 0;
6672 int nregs;
6673 int valuenregs;
6674 int num = 0;
6676 if (goal == 0)
6677 regno = goalreg;
6678 else if (REG_P (goal))
6679 regno = REGNO (goal);
6680 else if (MEM_P (goal))
6682 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6683 if (MEM_VOLATILE_P (goal))
6684 return 0;
6685 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6686 return 0;
6687 /* An address with side effects must be reexecuted. */
6688 switch (code)
6690 case POST_INC:
6691 case PRE_INC:
6692 case POST_DEC:
6693 case PRE_DEC:
6694 case POST_MODIFY:
6695 case PRE_MODIFY:
6696 return 0;
6697 default:
6698 break;
6700 goal_mem = 1;
6702 else if (CONSTANT_P (goal))
6703 goal_const = 1;
6704 else if (GET_CODE (goal) == PLUS
6705 && XEXP (goal, 0) == stack_pointer_rtx
6706 && CONSTANT_P (XEXP (goal, 1)))
6707 goal_const = need_stable_sp = 1;
6708 else if (GET_CODE (goal) == PLUS
6709 && XEXP (goal, 0) == frame_pointer_rtx
6710 && CONSTANT_P (XEXP (goal, 1)))
6711 goal_const = 1;
6712 else
6713 return 0;
6715 num = 0;
6716 /* Scan insns back from INSN, looking for one that copies
6717 a value into or out of GOAL.
6718 Stop and give up if we reach a label. */
6720 while (1)
6722 p = PREV_INSN (p);
6723 if (p && DEBUG_INSN_P (p))
6724 continue;
6725 num++;
6726 if (p == 0 || LABEL_P (p)
6727 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6728 return 0;
6730 /* Don't reuse register contents from before a setjmp-type
6731 function call; on the second return (from the longjmp) it
6732 might have been clobbered by a later reuse. It doesn't
6733 seem worthwhile to actually go and see if it is actually
6734 reused even if that information would be readily available;
6735 just don't reuse it across the setjmp call. */
6736 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6737 return 0;
6739 if (NONJUMP_INSN_P (p)
6740 /* If we don't want spill regs ... */
6741 && (! (reload_reg_p != 0
6742 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6743 /* ... then ignore insns introduced by reload; they aren't
6744 useful and can cause results in reload_as_needed to be
6745 different from what they were when calculating the need for
6746 spills. If we notice an input-reload insn here, we will
6747 reject it below, but it might hide a usable equivalent.
6748 That makes bad code. It may even fail: perhaps no reg was
6749 spilled for this insn because it was assumed we would find
6750 that equivalent. */
6751 || INSN_UID (p) < reload_first_uid))
6753 rtx tem;
6754 pat = single_set (p);
6756 /* First check for something that sets some reg equal to GOAL. */
6757 if (pat != 0
6758 && ((regno >= 0
6759 && true_regnum (SET_SRC (pat)) == regno
6760 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6762 (regno >= 0
6763 && true_regnum (SET_DEST (pat)) == regno
6764 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6766 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6767 /* When looking for stack pointer + const,
6768 make sure we don't use a stack adjust. */
6769 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6770 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6771 || (goal_mem
6772 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6773 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6774 || (goal_mem
6775 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6776 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6777 /* If we are looking for a constant,
6778 and something equivalent to that constant was copied
6779 into a reg, we can use that reg. */
6780 || (goal_const && REG_NOTES (p) != 0
6781 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6782 && ((rtx_equal_p (XEXP (tem, 0), goal)
6783 && (valueno
6784 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6785 || (REG_P (SET_DEST (pat))
6786 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6787 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6788 && CONST_INT_P (goal)
6789 && 0 != (goaltry
6790 = operand_subword (XEXP (tem, 0), 0, 0,
6791 VOIDmode))
6792 && rtx_equal_p (goal, goaltry)
6793 && (valtry
6794 = operand_subword (SET_DEST (pat), 0, 0,
6795 VOIDmode))
6796 && (valueno = true_regnum (valtry)) >= 0)))
6797 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6798 NULL_RTX))
6799 && REG_P (SET_DEST (pat))
6800 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6801 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6802 && CONST_INT_P (goal)
6803 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6804 VOIDmode))
6805 && rtx_equal_p (goal, goaltry)
6806 && (valtry
6807 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6808 && (valueno = true_regnum (valtry)) >= 0)))
6810 if (other >= 0)
6812 if (valueno != other)
6813 continue;
6815 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6816 continue;
6817 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6818 mode, valueno))
6819 continue;
6820 value = valtry;
6821 where = p;
6822 break;
6827 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6828 (or copying VALUE into GOAL, if GOAL is also a register).
6829 Now verify that VALUE is really valid. */
6831 /* VALUENO is the register number of VALUE; a hard register. */
6833 /* Don't try to re-use something that is killed in this insn. We want
6834 to be able to trust REG_UNUSED notes. */
6835 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6836 return 0;
6838 /* If we propose to get the value from the stack pointer or if GOAL is
6839 a MEM based on the stack pointer, we need a stable SP. */
6840 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6841 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6842 goal)))
6843 need_stable_sp = 1;
6845 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6846 if (GET_MODE (value) != mode)
6847 return 0;
6849 /* Reject VALUE if it was loaded from GOAL
6850 and is also a register that appears in the address of GOAL. */
6852 if (goal_mem && value == SET_DEST (single_set (where))
6853 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6854 goal, (rtx*) 0))
6855 return 0;
6857 /* Reject registers that overlap GOAL. */
6859 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6860 nregs = hard_regno_nregs[regno][mode];
6861 else
6862 nregs = 1;
6863 valuenregs = hard_regno_nregs[valueno][mode];
6865 if (!goal_mem && !goal_const
6866 && regno + nregs > valueno && regno < valueno + valuenregs)
6867 return 0;
6869 /* Reject VALUE if it is one of the regs reserved for reloads.
6870 Reload1 knows how to reuse them anyway, and it would get
6871 confused if we allocated one without its knowledge.
6872 (Now that insns introduced by reload are ignored above,
6873 this case shouldn't happen, but I'm not positive.) */
6875 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6877 int i;
6878 for (i = 0; i < valuenregs; ++i)
6879 if (reload_reg_p[valueno + i] >= 0)
6880 return 0;
6883 /* Reject VALUE if it is a register being used for an input reload
6884 even if it is not one of those reserved. */
6886 if (reload_reg_p != 0)
6888 int i;
6889 for (i = 0; i < n_reloads; i++)
6890 if (rld[i].reg_rtx != 0 && rld[i].in)
6892 int regno1 = REGNO (rld[i].reg_rtx);
6893 int nregs1 = hard_regno_nregs[regno1]
6894 [GET_MODE (rld[i].reg_rtx)];
6895 if (regno1 < valueno + valuenregs
6896 && regno1 + nregs1 > valueno)
6897 return 0;
6901 if (goal_mem)
6902 /* We must treat frame pointer as varying here,
6903 since it can vary--in a nonlocal goto as generated by expand_goto. */
6904 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6906 /* Now verify that the values of GOAL and VALUE remain unaltered
6907 until INSN is reached. */
6909 p = insn;
6910 while (1)
6912 p = PREV_INSN (p);
6913 if (p == where)
6914 return value;
6916 /* Don't trust the conversion past a function call
6917 if either of the two is in a call-clobbered register, or memory. */
6918 if (CALL_P (p))
6920 int i;
6922 if (goal_mem || need_stable_sp)
6923 return 0;
6925 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6926 for (i = 0; i < nregs; ++i)
6927 if (call_used_regs[regno + i]
6928 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6929 return 0;
6931 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6932 for (i = 0; i < valuenregs; ++i)
6933 if (call_used_regs[valueno + i]
6934 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6935 return 0;
6938 if (INSN_P (p))
6940 pat = PATTERN (p);
6942 /* Watch out for unspec_volatile, and volatile asms. */
6943 if (volatile_insn_p (pat))
6944 return 0;
6946 /* If this insn P stores in either GOAL or VALUE, return 0.
6947 If GOAL is a memory ref and this insn writes memory, return 0.
6948 If GOAL is a memory ref and its address is not constant,
6949 and this insn P changes a register used in GOAL, return 0. */
6951 if (GET_CODE (pat) == COND_EXEC)
6952 pat = COND_EXEC_CODE (pat);
6953 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6955 rtx dest = SET_DEST (pat);
6956 while (GET_CODE (dest) == SUBREG
6957 || GET_CODE (dest) == ZERO_EXTRACT
6958 || GET_CODE (dest) == STRICT_LOW_PART)
6959 dest = XEXP (dest, 0);
6960 if (REG_P (dest))
6962 int xregno = REGNO (dest);
6963 int xnregs;
6964 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6965 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6966 else
6967 xnregs = 1;
6968 if (xregno < regno + nregs && xregno + xnregs > regno)
6969 return 0;
6970 if (xregno < valueno + valuenregs
6971 && xregno + xnregs > valueno)
6972 return 0;
6973 if (goal_mem_addr_varies
6974 && reg_overlap_mentioned_for_reload_p (dest, goal))
6975 return 0;
6976 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6977 return 0;
6979 else if (goal_mem && MEM_P (dest)
6980 && ! push_operand (dest, GET_MODE (dest)))
6981 return 0;
6982 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6983 && reg_equiv_memory_loc (regno) != 0)
6984 return 0;
6985 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
6986 return 0;
6988 else if (GET_CODE (pat) == PARALLEL)
6990 int i;
6991 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
6993 rtx v1 = XVECEXP (pat, 0, i);
6994 if (GET_CODE (v1) == COND_EXEC)
6995 v1 = COND_EXEC_CODE (v1);
6996 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
6998 rtx dest = SET_DEST (v1);
6999 while (GET_CODE (dest) == SUBREG
7000 || GET_CODE (dest) == ZERO_EXTRACT
7001 || GET_CODE (dest) == STRICT_LOW_PART)
7002 dest = XEXP (dest, 0);
7003 if (REG_P (dest))
7005 int xregno = REGNO (dest);
7006 int xnregs;
7007 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7008 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7009 else
7010 xnregs = 1;
7011 if (xregno < regno + nregs
7012 && xregno + xnregs > regno)
7013 return 0;
7014 if (xregno < valueno + valuenregs
7015 && xregno + xnregs > valueno)
7016 return 0;
7017 if (goal_mem_addr_varies
7018 && reg_overlap_mentioned_for_reload_p (dest,
7019 goal))
7020 return 0;
7021 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7022 return 0;
7024 else if (goal_mem && MEM_P (dest)
7025 && ! push_operand (dest, GET_MODE (dest)))
7026 return 0;
7027 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7028 && reg_equiv_memory_loc (regno) != 0)
7029 return 0;
7030 else if (need_stable_sp
7031 && push_operand (dest, GET_MODE (dest)))
7032 return 0;
7037 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7039 rtx link;
7041 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7042 link = XEXP (link, 1))
7044 pat = XEXP (link, 0);
7045 if (GET_CODE (pat) == CLOBBER)
7047 rtx dest = SET_DEST (pat);
7049 if (REG_P (dest))
7051 int xregno = REGNO (dest);
7052 int xnregs
7053 = hard_regno_nregs[xregno][GET_MODE (dest)];
7055 if (xregno < regno + nregs
7056 && xregno + xnregs > regno)
7057 return 0;
7058 else if (xregno < valueno + valuenregs
7059 && xregno + xnregs > valueno)
7060 return 0;
7061 else if (goal_mem_addr_varies
7062 && reg_overlap_mentioned_for_reload_p (dest,
7063 goal))
7064 return 0;
7067 else if (goal_mem && MEM_P (dest)
7068 && ! push_operand (dest, GET_MODE (dest)))
7069 return 0;
7070 else if (need_stable_sp
7071 && push_operand (dest, GET_MODE (dest)))
7072 return 0;
7077 #ifdef AUTO_INC_DEC
7078 /* If this insn auto-increments or auto-decrements
7079 either regno or valueno, return 0 now.
7080 If GOAL is a memory ref and its address is not constant,
7081 and this insn P increments a register used in GOAL, return 0. */
7083 rtx link;
7085 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7086 if (REG_NOTE_KIND (link) == REG_INC
7087 && REG_P (XEXP (link, 0)))
7089 int incno = REGNO (XEXP (link, 0));
7090 if (incno < regno + nregs && incno >= regno)
7091 return 0;
7092 if (incno < valueno + valuenregs && incno >= valueno)
7093 return 0;
7094 if (goal_mem_addr_varies
7095 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7096 goal))
7097 return 0;
7100 #endif
7105 /* Find a place where INCED appears in an increment or decrement operator
7106 within X, and return the amount INCED is incremented or decremented by.
7107 The value is always positive. */
7109 static int
7110 find_inc_amount (rtx x, rtx inced)
7112 enum rtx_code code = GET_CODE (x);
7113 const char *fmt;
7114 int i;
7116 if (code == MEM)
7118 rtx addr = XEXP (x, 0);
7119 if ((GET_CODE (addr) == PRE_DEC
7120 || GET_CODE (addr) == POST_DEC
7121 || GET_CODE (addr) == PRE_INC
7122 || GET_CODE (addr) == POST_INC)
7123 && XEXP (addr, 0) == inced)
7124 return GET_MODE_SIZE (GET_MODE (x));
7125 else if ((GET_CODE (addr) == PRE_MODIFY
7126 || GET_CODE (addr) == POST_MODIFY)
7127 && GET_CODE (XEXP (addr, 1)) == PLUS
7128 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7129 && XEXP (addr, 0) == inced
7130 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7132 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7133 return i < 0 ? -i : i;
7137 fmt = GET_RTX_FORMAT (code);
7138 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7140 if (fmt[i] == 'e')
7142 int tem = find_inc_amount (XEXP (x, i), inced);
7143 if (tem != 0)
7144 return tem;
7146 if (fmt[i] == 'E')
7148 int j;
7149 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7151 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7152 if (tem != 0)
7153 return tem;
7158 return 0;
7161 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7162 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7164 #ifdef AUTO_INC_DEC
7165 static int
7166 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7167 rtx insn)
7169 rtx link;
7171 gcc_assert (insn);
7173 if (! INSN_P (insn))
7174 return 0;
7176 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7177 if (REG_NOTE_KIND (link) == REG_INC)
7179 unsigned int test = (int) REGNO (XEXP (link, 0));
7180 if (test >= regno && test < endregno)
7181 return 1;
7183 return 0;
7185 #else
7187 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7189 #endif
7191 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7192 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7193 REG_INC. REGNO must refer to a hard register. */
7196 regno_clobbered_p (unsigned int regno, rtx insn, enum machine_mode mode,
7197 int sets)
7199 unsigned int nregs, endregno;
7201 /* regno must be a hard register. */
7202 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7204 nregs = hard_regno_nregs[regno][mode];
7205 endregno = regno + nregs;
7207 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7208 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7209 && REG_P (XEXP (PATTERN (insn), 0)))
7211 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7213 return test >= regno && test < endregno;
7216 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7217 return 1;
7219 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7221 int i = XVECLEN (PATTERN (insn), 0) - 1;
7223 for (; i >= 0; i--)
7225 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7226 if ((GET_CODE (elt) == CLOBBER
7227 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7228 && REG_P (XEXP (elt, 0)))
7230 unsigned int test = REGNO (XEXP (elt, 0));
7232 if (test >= regno && test < endregno)
7233 return 1;
7235 if (sets == 2
7236 && reg_inc_found_and_valid_p (regno, endregno, elt))
7237 return 1;
7241 return 0;
7244 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7246 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
7248 int regno;
7250 if (GET_MODE (reloadreg) == mode)
7251 return reloadreg;
7253 regno = REGNO (reloadreg);
7255 if (REG_WORDS_BIG_ENDIAN)
7256 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7257 - (int) hard_regno_nregs[regno][mode];
7259 return gen_rtx_REG (mode, regno);
7262 static const char *const reload_when_needed_name[] =
7264 "RELOAD_FOR_INPUT",
7265 "RELOAD_FOR_OUTPUT",
7266 "RELOAD_FOR_INSN",
7267 "RELOAD_FOR_INPUT_ADDRESS",
7268 "RELOAD_FOR_INPADDR_ADDRESS",
7269 "RELOAD_FOR_OUTPUT_ADDRESS",
7270 "RELOAD_FOR_OUTADDR_ADDRESS",
7271 "RELOAD_FOR_OPERAND_ADDRESS",
7272 "RELOAD_FOR_OPADDR_ADDR",
7273 "RELOAD_OTHER",
7274 "RELOAD_FOR_OTHER_ADDRESS"
7277 /* These functions are used to print the variables set by 'find_reloads' */
7279 DEBUG_FUNCTION void
7280 debug_reload_to_stream (FILE *f)
7282 int r;
7283 const char *prefix;
7285 if (! f)
7286 f = stderr;
7287 for (r = 0; r < n_reloads; r++)
7289 fprintf (f, "Reload %d: ", r);
7291 if (rld[r].in != 0)
7293 fprintf (f, "reload_in (%s) = ",
7294 GET_MODE_NAME (rld[r].inmode));
7295 print_inline_rtx (f, rld[r].in, 24);
7296 fprintf (f, "\n\t");
7299 if (rld[r].out != 0)
7301 fprintf (f, "reload_out (%s) = ",
7302 GET_MODE_NAME (rld[r].outmode));
7303 print_inline_rtx (f, rld[r].out, 24);
7304 fprintf (f, "\n\t");
7307 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7309 fprintf (f, "%s (opnum = %d)",
7310 reload_when_needed_name[(int) rld[r].when_needed],
7311 rld[r].opnum);
7313 if (rld[r].optional)
7314 fprintf (f, ", optional");
7316 if (rld[r].nongroup)
7317 fprintf (f, ", nongroup");
7319 if (rld[r].inc != 0)
7320 fprintf (f, ", inc by %d", rld[r].inc);
7322 if (rld[r].nocombine)
7323 fprintf (f, ", can't combine");
7325 if (rld[r].secondary_p)
7326 fprintf (f, ", secondary_reload_p");
7328 if (rld[r].in_reg != 0)
7330 fprintf (f, "\n\treload_in_reg: ");
7331 print_inline_rtx (f, rld[r].in_reg, 24);
7334 if (rld[r].out_reg != 0)
7336 fprintf (f, "\n\treload_out_reg: ");
7337 print_inline_rtx (f, rld[r].out_reg, 24);
7340 if (rld[r].reg_rtx != 0)
7342 fprintf (f, "\n\treload_reg_rtx: ");
7343 print_inline_rtx (f, rld[r].reg_rtx, 24);
7346 prefix = "\n\t";
7347 if (rld[r].secondary_in_reload != -1)
7349 fprintf (f, "%ssecondary_in_reload = %d",
7350 prefix, rld[r].secondary_in_reload);
7351 prefix = ", ";
7354 if (rld[r].secondary_out_reload != -1)
7355 fprintf (f, "%ssecondary_out_reload = %d\n",
7356 prefix, rld[r].secondary_out_reload);
7358 prefix = "\n\t";
7359 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7361 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7362 insn_data[rld[r].secondary_in_icode].name);
7363 prefix = ", ";
7366 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7367 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7368 insn_data[rld[r].secondary_out_icode].name);
7370 fprintf (f, "\n");
7374 DEBUG_FUNCTION void
7375 debug_reload (void)
7377 debug_reload_to_stream (stderr);