* arm.h (TARGET_CPU_CPP_BUILTINS): Remove Maverick support.
[official-gcc.git] / gcc / reload.c
blobe42cc5c041f0ec6d0406d87536fa988202f74cb1
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains subroutines used only from the file reload1.c.
23 It knows how to scan one insn for operands and values
24 that need to be copied into registers to make valid code.
25 It also finds other operands and values which are valid
26 but for which equivalent values in registers exist and
27 ought to be used instead.
29 Before processing the first insn of the function, call `init_reload'.
30 init_reload actually has to be called earlier anyway.
32 To scan an insn, call `find_reloads'. This does two things:
33 1. sets up tables describing which values must be reloaded
34 for this insn, and what kind of hard regs they must be reloaded into;
35 2. optionally record the locations where those values appear in
36 the data, so they can be replaced properly later.
37 This is done only if the second arg to `find_reloads' is nonzero.
39 The third arg to `find_reloads' specifies the number of levels
40 of indirect addressing supported by the machine. If it is zero,
41 indirect addressing is not valid. If it is one, (MEM (REG n))
42 is valid even if (REG n) did not get a hard register; if it is two,
43 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
44 hard register, and similarly for higher values.
46 Then you must choose the hard regs to reload those pseudo regs into,
47 and generate appropriate load insns before this insn and perhaps
48 also store insns after this insn. Set up the array `reload_reg_rtx'
49 to contain the REG rtx's for the registers you used. In some
50 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
51 for certain reloads. Then that tells you which register to use,
52 so you do not need to allocate one. But you still do need to add extra
53 instructions to copy the value into and out of that register.
55 Finally you must call `subst_reloads' to substitute the reload reg rtx's
56 into the locations already recorded.
58 NOTE SIDE EFFECTS:
60 find_reloads can alter the operands of the instruction it is called on.
62 1. Two operands of any sort may be interchanged, if they are in a
63 commutative instruction.
64 This happens only if find_reloads thinks the instruction will compile
65 better that way.
67 2. Pseudo-registers that are equivalent to constants are replaced
68 with those constants if they are not in hard registers.
70 1 happens every time find_reloads is called.
71 2 happens only when REPLACE is 1, which is only when
72 actually doing the reloads, not when just counting them.
74 Using a reload register for several reloads in one insn:
76 When an insn has reloads, it is considered as having three parts:
77 the input reloads, the insn itself after reloading, and the output reloads.
78 Reloads of values used in memory addresses are often needed for only one part.
80 When this is so, reload_when_needed records which part needs the reload.
81 Two reloads for different parts of the insn can share the same reload
82 register.
84 When a reload is used for addresses in multiple parts, or when it is
85 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
86 a register with any other reload. */
88 #define REG_OK_STRICT
90 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
91 #undef DEBUG_RELOAD
93 #include "config.h"
94 #include "system.h"
95 #include "coretypes.h"
96 #include "tm.h"
97 #include "rtl-error.h"
98 #include "tm_p.h"
99 #include "insn-config.h"
100 #include "expr.h"
101 #include "optabs.h"
102 #include "recog.h"
103 #include "df.h"
104 #include "reload.h"
105 #include "regs.h"
106 #include "addresses.h"
107 #include "hard-reg-set.h"
108 #include "flags.h"
109 #include "function.h"
110 #include "params.h"
111 #include "target.h"
112 #include "ira.h"
114 /* True if X is a constant that can be forced into the constant pool.
115 MODE is the mode of the operand, or VOIDmode if not known. */
116 #define CONST_POOL_OK_P(MODE, X) \
117 ((MODE) != VOIDmode \
118 && CONSTANT_P (X) \
119 && GET_CODE (X) != HIGH \
120 && !targetm.cannot_force_const_mem (MODE, X))
122 /* True if C is a non-empty register class that has too few registers
123 to be safely used as a reload target class. */
125 static inline bool
126 small_register_class_p (reg_class_t rclass)
128 return (reg_class_size [(int) rclass] == 1
129 || (reg_class_size [(int) rclass] >= 1
130 && targetm.class_likely_spilled_p (rclass)));
134 /* All reloads of the current insn are recorded here. See reload.h for
135 comments. */
136 int n_reloads;
137 struct reload rld[MAX_RELOADS];
139 /* All the "earlyclobber" operands of the current insn
140 are recorded here. */
141 int n_earlyclobbers;
142 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
144 int reload_n_operands;
146 /* Replacing reloads.
148 If `replace_reloads' is nonzero, then as each reload is recorded
149 an entry is made for it in the table `replacements'.
150 Then later `subst_reloads' can look through that table and
151 perform all the replacements needed. */
153 /* Nonzero means record the places to replace. */
154 static int replace_reloads;
156 /* Each replacement is recorded with a structure like this. */
157 struct replacement
159 rtx *where; /* Location to store in */
160 int what; /* which reload this is for */
161 enum machine_mode mode; /* mode it must have */
164 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
166 /* Number of replacements currently recorded. */
167 static int n_replacements;
169 /* Used to track what is modified by an operand. */
170 struct decomposition
172 int reg_flag; /* Nonzero if referencing a register. */
173 int safe; /* Nonzero if this can't conflict with anything. */
174 rtx base; /* Base address for MEM. */
175 HOST_WIDE_INT start; /* Starting offset or register number. */
176 HOST_WIDE_INT end; /* Ending offset or register number. */
179 #ifdef SECONDARY_MEMORY_NEEDED
181 /* Save MEMs needed to copy from one class of registers to another. One MEM
182 is used per mode, but normally only one or two modes are ever used.
184 We keep two versions, before and after register elimination. The one
185 after register elimination is record separately for each operand. This
186 is done in case the address is not valid to be sure that we separately
187 reload each. */
189 static rtx secondary_memlocs[NUM_MACHINE_MODES];
190 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
191 static int secondary_memlocs_elim_used = 0;
192 #endif
194 /* The instruction we are doing reloads for;
195 so we can test whether a register dies in it. */
196 static rtx this_insn;
198 /* Nonzero if this instruction is a user-specified asm with operands. */
199 static int this_insn_is_asm;
201 /* If hard_regs_live_known is nonzero,
202 we can tell which hard regs are currently live,
203 at least enough to succeed in choosing dummy reloads. */
204 static int hard_regs_live_known;
206 /* Indexed by hard reg number,
207 element is nonnegative if hard reg has been spilled.
208 This vector is passed to `find_reloads' as an argument
209 and is not changed here. */
210 static short *static_reload_reg_p;
212 /* Set to 1 in subst_reg_equivs if it changes anything. */
213 static int subst_reg_equivs_changed;
215 /* On return from push_reload, holds the reload-number for the OUT
216 operand, which can be different for that from the input operand. */
217 static int output_reloadnum;
219 /* Compare two RTX's. */
220 #define MATCHES(x, y) \
221 (x == y || (x != 0 && (REG_P (x) \
222 ? REG_P (y) && REGNO (x) == REGNO (y) \
223 : rtx_equal_p (x, y) && ! side_effects_p (x))))
225 /* Indicates if two reloads purposes are for similar enough things that we
226 can merge their reloads. */
227 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
228 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
229 || ((when1) == (when2) && (op1) == (op2)) \
230 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
231 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
232 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
233 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
234 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
236 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
237 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
238 ((when1) != (when2) \
239 || ! ((op1) == (op2) \
240 || (when1) == RELOAD_FOR_INPUT \
241 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
242 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
244 /* If we are going to reload an address, compute the reload type to
245 use. */
246 #define ADDR_TYPE(type) \
247 ((type) == RELOAD_FOR_INPUT_ADDRESS \
248 ? RELOAD_FOR_INPADDR_ADDRESS \
249 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
250 ? RELOAD_FOR_OUTADDR_ADDRESS \
251 : (type)))
253 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
254 enum machine_mode, enum reload_type,
255 enum insn_code *, secondary_reload_info *);
256 static enum reg_class find_valid_class (enum machine_mode, enum machine_mode,
257 int, unsigned int);
258 static void push_replacement (rtx *, int, enum machine_mode);
259 static void dup_replacements (rtx *, rtx *);
260 static void combine_reloads (void);
261 static int find_reusable_reload (rtx *, rtx, enum reg_class,
262 enum reload_type, int, int);
263 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
264 enum machine_mode, reg_class_t, int, int);
265 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
266 static struct decomposition decompose (rtx);
267 static int immune_p (rtx, rtx, struct decomposition);
268 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
269 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int, rtx,
270 int *);
271 static rtx make_memloc (rtx, int);
272 static int maybe_memory_address_addr_space_p (enum machine_mode, rtx,
273 addr_space_t, rtx *);
274 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
275 int, enum reload_type, int, rtx);
276 static rtx subst_reg_equivs (rtx, rtx);
277 static rtx subst_indexed_address (rtx);
278 static void update_auto_inc_notes (rtx, int, int);
279 static int find_reloads_address_1 (enum machine_mode, addr_space_t, rtx, int,
280 enum rtx_code, enum rtx_code, rtx *,
281 int, enum reload_type,int, rtx);
282 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
283 enum machine_mode, int,
284 enum reload_type, int);
285 static rtx find_reloads_subreg_address (rtx, int, int, enum reload_type,
286 int, rtx, int *);
287 static void copy_replacements_1 (rtx *, rtx *, int);
288 static int find_inc_amount (rtx, rtx);
289 static int refers_to_mem_for_reload_p (rtx);
290 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
291 rtx, rtx *);
293 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
294 list yet. */
296 static void
297 push_reg_equiv_alt_mem (int regno, rtx mem)
299 rtx it;
301 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
302 if (rtx_equal_p (XEXP (it, 0), mem))
303 return;
305 reg_equiv_alt_mem_list (regno)
306 = alloc_EXPR_LIST (REG_EQUIV, mem,
307 reg_equiv_alt_mem_list (regno));
310 /* Determine if any secondary reloads are needed for loading (if IN_P is
311 nonzero) or storing (if IN_P is zero) X to or from a reload register of
312 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
313 are needed, push them.
315 Return the reload number of the secondary reload we made, or -1 if
316 we didn't need one. *PICODE is set to the insn_code to use if we do
317 need a secondary reload. */
319 static int
320 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
321 enum reg_class reload_class,
322 enum machine_mode reload_mode, enum reload_type type,
323 enum insn_code *picode, secondary_reload_info *prev_sri)
325 enum reg_class rclass = NO_REGS;
326 enum reg_class scratch_class;
327 enum machine_mode mode = reload_mode;
328 enum insn_code icode = CODE_FOR_nothing;
329 enum insn_code t_icode = CODE_FOR_nothing;
330 enum reload_type secondary_type;
331 int s_reload, t_reload = -1;
332 const char *scratch_constraint;
333 char letter;
334 secondary_reload_info sri;
336 if (type == RELOAD_FOR_INPUT_ADDRESS
337 || type == RELOAD_FOR_OUTPUT_ADDRESS
338 || type == RELOAD_FOR_INPADDR_ADDRESS
339 || type == RELOAD_FOR_OUTADDR_ADDRESS)
340 secondary_type = type;
341 else
342 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
344 *picode = CODE_FOR_nothing;
346 /* If X is a paradoxical SUBREG, use the inner value to determine both the
347 mode and object being reloaded. */
348 if (paradoxical_subreg_p (x))
350 x = SUBREG_REG (x);
351 reload_mode = GET_MODE (x);
354 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
355 is still a pseudo-register by now, it *must* have an equivalent MEM
356 but we don't want to assume that), use that equivalent when seeing if
357 a secondary reload is needed since whether or not a reload is needed
358 might be sensitive to the form of the MEM. */
360 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
361 && reg_equiv_mem (REGNO (x)))
362 x = reg_equiv_mem (REGNO (x));
364 sri.icode = CODE_FOR_nothing;
365 sri.prev_sri = prev_sri;
366 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
367 reload_mode, &sri);
368 icode = (enum insn_code) sri.icode;
370 /* If we don't need any secondary registers, done. */
371 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
372 return -1;
374 if (rclass != NO_REGS)
375 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
376 reload_mode, type, &t_icode, &sri);
378 /* If we will be using an insn, the secondary reload is for a
379 scratch register. */
381 if (icode != CODE_FOR_nothing)
383 /* If IN_P is nonzero, the reload register will be the output in
384 operand 0. If IN_P is zero, the reload register will be the input
385 in operand 1. Outputs should have an initial "=", which we must
386 skip. */
388 /* ??? It would be useful to be able to handle only two, or more than
389 three, operands, but for now we can only handle the case of having
390 exactly three: output, input and one temp/scratch. */
391 gcc_assert (insn_data[(int) icode].n_operands == 3);
393 /* ??? We currently have no way to represent a reload that needs
394 an icode to reload from an intermediate tertiary reload register.
395 We should probably have a new field in struct reload to tag a
396 chain of scratch operand reloads onto. */
397 gcc_assert (rclass == NO_REGS);
399 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
400 gcc_assert (*scratch_constraint == '=');
401 scratch_constraint++;
402 if (*scratch_constraint == '&')
403 scratch_constraint++;
404 letter = *scratch_constraint;
405 scratch_class = (letter == 'r' ? GENERAL_REGS
406 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) letter,
407 scratch_constraint));
409 rclass = scratch_class;
410 mode = insn_data[(int) icode].operand[2].mode;
413 /* This case isn't valid, so fail. Reload is allowed to use the same
414 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
415 in the case of a secondary register, we actually need two different
416 registers for correct code. We fail here to prevent the possibility of
417 silently generating incorrect code later.
419 The convention is that secondary input reloads are valid only if the
420 secondary_class is different from class. If you have such a case, you
421 can not use secondary reloads, you must work around the problem some
422 other way.
424 Allow this when a reload_in/out pattern is being used. I.e. assume
425 that the generated code handles this case. */
427 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
428 || t_icode != CODE_FOR_nothing);
430 /* See if we can reuse an existing secondary reload. */
431 for (s_reload = 0; s_reload < n_reloads; s_reload++)
432 if (rld[s_reload].secondary_p
433 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
434 || reg_class_subset_p (rld[s_reload].rclass, rclass))
435 && ((in_p && rld[s_reload].inmode == mode)
436 || (! in_p && rld[s_reload].outmode == mode))
437 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
438 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
439 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
440 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
441 && (small_register_class_p (rclass)
442 || targetm.small_register_classes_for_mode_p (VOIDmode))
443 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
444 opnum, rld[s_reload].opnum))
446 if (in_p)
447 rld[s_reload].inmode = mode;
448 if (! in_p)
449 rld[s_reload].outmode = mode;
451 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
452 rld[s_reload].rclass = rclass;
454 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
455 rld[s_reload].optional &= optional;
456 rld[s_reload].secondary_p = 1;
457 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
458 opnum, rld[s_reload].opnum))
459 rld[s_reload].when_needed = RELOAD_OTHER;
461 break;
464 if (s_reload == n_reloads)
466 #ifdef SECONDARY_MEMORY_NEEDED
467 /* If we need a memory location to copy between the two reload regs,
468 set it up now. Note that we do the input case before making
469 the reload and the output case after. This is due to the
470 way reloads are output. */
472 if (in_p && icode == CODE_FOR_nothing
473 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
475 get_secondary_mem (x, reload_mode, opnum, type);
477 /* We may have just added new reloads. Make sure we add
478 the new reload at the end. */
479 s_reload = n_reloads;
481 #endif
483 /* We need to make a new secondary reload for this register class. */
484 rld[s_reload].in = rld[s_reload].out = 0;
485 rld[s_reload].rclass = rclass;
487 rld[s_reload].inmode = in_p ? mode : VOIDmode;
488 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
489 rld[s_reload].reg_rtx = 0;
490 rld[s_reload].optional = optional;
491 rld[s_reload].inc = 0;
492 /* Maybe we could combine these, but it seems too tricky. */
493 rld[s_reload].nocombine = 1;
494 rld[s_reload].in_reg = 0;
495 rld[s_reload].out_reg = 0;
496 rld[s_reload].opnum = opnum;
497 rld[s_reload].when_needed = secondary_type;
498 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
499 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
500 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
501 rld[s_reload].secondary_out_icode
502 = ! in_p ? t_icode : CODE_FOR_nothing;
503 rld[s_reload].secondary_p = 1;
505 n_reloads++;
507 #ifdef SECONDARY_MEMORY_NEEDED
508 if (! in_p && icode == CODE_FOR_nothing
509 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
510 get_secondary_mem (x, mode, opnum, type);
511 #endif
514 *picode = icode;
515 return s_reload;
518 /* If a secondary reload is needed, return its class. If both an intermediate
519 register and a scratch register is needed, we return the class of the
520 intermediate register. */
521 reg_class_t
522 secondary_reload_class (bool in_p, reg_class_t rclass, enum machine_mode mode,
523 rtx x)
525 enum insn_code icode;
526 secondary_reload_info sri;
528 sri.icode = CODE_FOR_nothing;
529 sri.prev_sri = NULL;
530 rclass
531 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
532 icode = (enum insn_code) sri.icode;
534 /* If there are no secondary reloads at all, we return NO_REGS.
535 If an intermediate register is needed, we return its class. */
536 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
537 return rclass;
539 /* No intermediate register is needed, but we have a special reload
540 pattern, which we assume for now needs a scratch register. */
541 return scratch_reload_class (icode);
544 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
545 three operands, verify that operand 2 is an output operand, and return
546 its register class.
547 ??? We'd like to be able to handle any pattern with at least 2 operands,
548 for zero or more scratch registers, but that needs more infrastructure. */
549 enum reg_class
550 scratch_reload_class (enum insn_code icode)
552 const char *scratch_constraint;
553 char scratch_letter;
554 enum reg_class rclass;
556 gcc_assert (insn_data[(int) icode].n_operands == 3);
557 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
558 gcc_assert (*scratch_constraint == '=');
559 scratch_constraint++;
560 if (*scratch_constraint == '&')
561 scratch_constraint++;
562 scratch_letter = *scratch_constraint;
563 if (scratch_letter == 'r')
564 return GENERAL_REGS;
565 rclass = REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
566 scratch_constraint);
567 gcc_assert (rclass != NO_REGS);
568 return rclass;
571 #ifdef SECONDARY_MEMORY_NEEDED
573 /* Return a memory location that will be used to copy X in mode MODE.
574 If we haven't already made a location for this mode in this insn,
575 call find_reloads_address on the location being returned. */
578 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
579 int opnum, enum reload_type type)
581 rtx loc;
582 int mem_valid;
584 /* By default, if MODE is narrower than a word, widen it to a word.
585 This is required because most machines that require these memory
586 locations do not support short load and stores from all registers
587 (e.g., FP registers). */
589 #ifdef SECONDARY_MEMORY_NEEDED_MODE
590 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
591 #else
592 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
593 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
594 #endif
596 /* If we already have made a MEM for this operand in MODE, return it. */
597 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
598 return secondary_memlocs_elim[(int) mode][opnum];
600 /* If this is the first time we've tried to get a MEM for this mode,
601 allocate a new one. `something_changed' in reload will get set
602 by noticing that the frame size has changed. */
604 if (secondary_memlocs[(int) mode] == 0)
606 #ifdef SECONDARY_MEMORY_NEEDED_RTX
607 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
608 #else
609 secondary_memlocs[(int) mode]
610 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
611 #endif
614 /* Get a version of the address doing any eliminations needed. If that
615 didn't give us a new MEM, make a new one if it isn't valid. */
617 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
618 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
619 MEM_ADDR_SPACE (loc));
621 if (! mem_valid && loc == secondary_memlocs[(int) mode])
622 loc = copy_rtx (loc);
624 /* The only time the call below will do anything is if the stack
625 offset is too large. In that case IND_LEVELS doesn't matter, so we
626 can just pass a zero. Adjust the type to be the address of the
627 corresponding object. If the address was valid, save the eliminated
628 address. If it wasn't valid, we need to make a reload each time, so
629 don't save it. */
631 if (! mem_valid)
633 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
634 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
635 : RELOAD_OTHER);
637 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
638 opnum, type, 0, 0);
641 secondary_memlocs_elim[(int) mode][opnum] = loc;
642 if (secondary_memlocs_elim_used <= (int)mode)
643 secondary_memlocs_elim_used = (int)mode + 1;
644 return loc;
647 /* Clear any secondary memory locations we've made. */
649 void
650 clear_secondary_mem (void)
652 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
654 #endif /* SECONDARY_MEMORY_NEEDED */
657 /* Find the largest class which has at least one register valid in
658 mode INNER, and which for every such register, that register number
659 plus N is also valid in OUTER (if in range) and is cheap to move
660 into REGNO. Such a class must exist. */
662 static enum reg_class
663 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
664 enum machine_mode inner ATTRIBUTE_UNUSED, int n,
665 unsigned int dest_regno ATTRIBUTE_UNUSED)
667 int best_cost = -1;
668 int rclass;
669 int regno;
670 enum reg_class best_class = NO_REGS;
671 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
672 unsigned int best_size = 0;
673 int cost;
675 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
677 int bad = 0;
678 int good = 0;
679 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
680 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
682 if (HARD_REGNO_MODE_OK (regno, inner))
684 good = 1;
685 if (! TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
686 || ! HARD_REGNO_MODE_OK (regno + n, outer))
687 bad = 1;
691 if (bad || !good)
692 continue;
693 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
695 if ((reg_class_size[rclass] > best_size
696 && (best_cost < 0 || best_cost >= cost))
697 || best_cost > cost)
699 best_class = (enum reg_class) rclass;
700 best_size = reg_class_size[rclass];
701 best_cost = register_move_cost (outer, (enum reg_class) rclass,
702 dest_class);
706 gcc_assert (best_size != 0);
708 return best_class;
711 /* Return the number of a previously made reload that can be combined with
712 a new one, or n_reloads if none of the existing reloads can be used.
713 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
714 push_reload, they determine the kind of the new reload that we try to
715 combine. P_IN points to the corresponding value of IN, which can be
716 modified by this function.
717 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
719 static int
720 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
721 enum reload_type type, int opnum, int dont_share)
723 rtx in = *p_in;
724 int i;
725 /* We can't merge two reloads if the output of either one is
726 earlyclobbered. */
728 if (earlyclobber_operand_p (out))
729 return n_reloads;
731 /* We can use an existing reload if the class is right
732 and at least one of IN and OUT is a match
733 and the other is at worst neutral.
734 (A zero compared against anything is neutral.)
736 For targets with small register classes, don't use existing reloads
737 unless they are for the same thing since that can cause us to need
738 more reload registers than we otherwise would. */
740 for (i = 0; i < n_reloads; i++)
741 if ((reg_class_subset_p (rclass, rld[i].rclass)
742 || reg_class_subset_p (rld[i].rclass, rclass))
743 /* If the existing reload has a register, it must fit our class. */
744 && (rld[i].reg_rtx == 0
745 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
746 true_regnum (rld[i].reg_rtx)))
747 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
748 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
749 || (out != 0 && MATCHES (rld[i].out, out)
750 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
751 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
752 && (small_register_class_p (rclass)
753 || targetm.small_register_classes_for_mode_p (VOIDmode))
754 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
755 return i;
757 /* Reloading a plain reg for input can match a reload to postincrement
758 that reg, since the postincrement's value is the right value.
759 Likewise, it can match a preincrement reload, since we regard
760 the preincrementation as happening before any ref in this insn
761 to that register. */
762 for (i = 0; i < n_reloads; i++)
763 if ((reg_class_subset_p (rclass, rld[i].rclass)
764 || reg_class_subset_p (rld[i].rclass, rclass))
765 /* If the existing reload has a register, it must fit our
766 class. */
767 && (rld[i].reg_rtx == 0
768 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
769 true_regnum (rld[i].reg_rtx)))
770 && out == 0 && rld[i].out == 0 && rld[i].in != 0
771 && ((REG_P (in)
772 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
773 && MATCHES (XEXP (rld[i].in, 0), in))
774 || (REG_P (rld[i].in)
775 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
776 && MATCHES (XEXP (in, 0), rld[i].in)))
777 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
778 && (small_register_class_p (rclass)
779 || targetm.small_register_classes_for_mode_p (VOIDmode))
780 && MERGABLE_RELOADS (type, rld[i].when_needed,
781 opnum, rld[i].opnum))
783 /* Make sure reload_in ultimately has the increment,
784 not the plain register. */
785 if (REG_P (in))
786 *p_in = rld[i].in;
787 return i;
789 return n_reloads;
792 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
793 expression. MODE is the mode that X will be used in. OUTPUT is true if
794 the function is invoked for the output part of an enclosing reload. */
796 static bool
797 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, bool output)
799 rtx inner;
801 /* Only SUBREGs are problematical. */
802 if (GET_CODE (x) != SUBREG)
803 return false;
805 inner = SUBREG_REG (x);
807 /* If INNER is a constant or PLUS, then INNER will need reloading. */
808 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
809 return true;
811 /* If INNER is not a hard register, then INNER will not need reloading. */
812 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
813 return false;
815 /* If INNER is not ok for MODE, then INNER will need reloading. */
816 if (!HARD_REGNO_MODE_OK (subreg_regno (x), mode))
817 return true;
819 /* If this is for an output, and the outer part is a word or smaller,
820 INNER is larger than a word and the number of registers in INNER is
821 not the same as the number of words in INNER, then INNER will need
822 reloading (with an in-out reload). */
823 return (output
824 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
825 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
826 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
827 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
830 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
831 requiring an extra reload register. The caller has already found that
832 IN contains some reference to REGNO, so check that we can produce the
833 new value in a single step. E.g. if we have
834 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
835 instruction that adds one to a register, this should succeed.
836 However, if we have something like
837 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
838 needs to be loaded into a register first, we need a separate reload
839 register.
840 Such PLUS reloads are generated by find_reload_address_part.
841 The out-of-range PLUS expressions are usually introduced in the instruction
842 patterns by register elimination and substituting pseudos without a home
843 by their function-invariant equivalences. */
844 static int
845 can_reload_into (rtx in, int regno, enum machine_mode mode)
847 rtx dst, test_insn;
848 int r = 0;
849 struct recog_data save_recog_data;
851 /* For matching constraints, we often get notional input reloads where
852 we want to use the original register as the reload register. I.e.
853 technically this is a non-optional input-output reload, but IN is
854 already a valid register, and has been chosen as the reload register.
855 Speed this up, since it trivially works. */
856 if (REG_P (in))
857 return 1;
859 /* To test MEMs properly, we'd have to take into account all the reloads
860 that are already scheduled, which can become quite complicated.
861 And since we've already handled address reloads for this MEM, it
862 should always succeed anyway. */
863 if (MEM_P (in))
864 return 1;
866 /* If we can make a simple SET insn that does the job, everything should
867 be fine. */
868 dst = gen_rtx_REG (mode, regno);
869 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
870 save_recog_data = recog_data;
871 if (recog_memoized (test_insn) >= 0)
873 extract_insn (test_insn);
874 r = constrain_operands (1);
876 recog_data = save_recog_data;
877 return r;
880 /* Record one reload that needs to be performed.
881 IN is an rtx saying where the data are to be found before this instruction.
882 OUT says where they must be stored after the instruction.
883 (IN is zero for data not read, and OUT is zero for data not written.)
884 INLOC and OUTLOC point to the places in the instructions where
885 IN and OUT were found.
886 If IN and OUT are both nonzero, it means the same register must be used
887 to reload both IN and OUT.
889 RCLASS is a register class required for the reloaded data.
890 INMODE is the machine mode that the instruction requires
891 for the reg that replaces IN and OUTMODE is likewise for OUT.
893 If IN is zero, then OUT's location and mode should be passed as
894 INLOC and INMODE.
896 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
898 OPTIONAL nonzero means this reload does not need to be performed:
899 it can be discarded if that is more convenient.
901 OPNUM and TYPE say what the purpose of this reload is.
903 The return value is the reload-number for this reload.
905 If both IN and OUT are nonzero, in some rare cases we might
906 want to make two separate reloads. (Actually we never do this now.)
907 Therefore, the reload-number for OUT is stored in
908 output_reloadnum when we return; the return value applies to IN.
909 Usually (presently always), when IN and OUT are nonzero,
910 the two reload-numbers are equal, but the caller should be careful to
911 distinguish them. */
914 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
915 enum reg_class rclass, enum machine_mode inmode,
916 enum machine_mode outmode, int strict_low, int optional,
917 int opnum, enum reload_type type)
919 int i;
920 int dont_share = 0;
921 int dont_remove_subreg = 0;
922 #ifdef LIMIT_RELOAD_CLASS
923 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
924 #endif
925 int secondary_in_reload = -1, secondary_out_reload = -1;
926 enum insn_code secondary_in_icode = CODE_FOR_nothing;
927 enum insn_code secondary_out_icode = CODE_FOR_nothing;
929 /* INMODE and/or OUTMODE could be VOIDmode if no mode
930 has been specified for the operand. In that case,
931 use the operand's mode as the mode to reload. */
932 if (inmode == VOIDmode && in != 0)
933 inmode = GET_MODE (in);
934 if (outmode == VOIDmode && out != 0)
935 outmode = GET_MODE (out);
937 /* If find_reloads and friends until now missed to replace a pseudo
938 with a constant of reg_equiv_constant something went wrong
939 beforehand.
940 Note that it can't simply be done here if we missed it earlier
941 since the constant might need to be pushed into the literal pool
942 and the resulting memref would probably need further
943 reloading. */
944 if (in != 0 && REG_P (in))
946 int regno = REGNO (in);
948 gcc_assert (regno < FIRST_PSEUDO_REGISTER
949 || reg_renumber[regno] >= 0
950 || reg_equiv_constant (regno) == NULL_RTX);
953 /* reg_equiv_constant only contains constants which are obviously
954 not appropriate as destination. So if we would need to replace
955 the destination pseudo with a constant we are in real
956 trouble. */
957 if (out != 0 && REG_P (out))
959 int regno = REGNO (out);
961 gcc_assert (regno < FIRST_PSEUDO_REGISTER
962 || reg_renumber[regno] >= 0
963 || reg_equiv_constant (regno) == NULL_RTX);
966 /* If we have a read-write operand with an address side-effect,
967 change either IN or OUT so the side-effect happens only once. */
968 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
969 switch (GET_CODE (XEXP (in, 0)))
971 case POST_INC: case POST_DEC: case POST_MODIFY:
972 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
973 break;
975 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
976 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
977 break;
979 default:
980 break;
983 /* If we are reloading a (SUBREG constant ...), really reload just the
984 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
985 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
986 a pseudo and hence will become a MEM) with M1 wider than M2 and the
987 register is a pseudo, also reload the inside expression.
988 For machines that extend byte loads, do this for any SUBREG of a pseudo
989 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
990 M2 is an integral mode that gets extended when loaded.
991 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
992 where either M1 is not valid for R or M2 is wider than a word but we
993 only need one register to store an M2-sized quantity in R.
994 (However, if OUT is nonzero, we need to reload the reg *and*
995 the subreg, so do nothing here, and let following statement handle it.)
997 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
998 we can't handle it here because CONST_INT does not indicate a mode.
1000 Similarly, we must reload the inside expression if we have a
1001 STRICT_LOW_PART (presumably, in == out in this case).
1003 Also reload the inner expression if it does not require a secondary
1004 reload but the SUBREG does.
1006 Finally, reload the inner expression if it is a register that is in
1007 the class whose registers cannot be referenced in a different size
1008 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1009 cannot reload just the inside since we might end up with the wrong
1010 register class. But if it is inside a STRICT_LOW_PART, we have
1011 no choice, so we hope we do get the right register class there. */
1013 if (in != 0 && GET_CODE (in) == SUBREG
1014 && (subreg_lowpart_p (in) || strict_low)
1015 #ifdef CANNOT_CHANGE_MODE_CLASS
1016 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1017 #endif
1018 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (in))]
1019 && (CONSTANT_P (SUBREG_REG (in))
1020 || GET_CODE (SUBREG_REG (in)) == PLUS
1021 || strict_low
1022 || (((REG_P (SUBREG_REG (in))
1023 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1024 || MEM_P (SUBREG_REG (in)))
1025 && ((GET_MODE_PRECISION (inmode)
1026 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1027 #ifdef LOAD_EXTEND_OP
1028 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1029 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1030 <= UNITS_PER_WORD)
1031 && (GET_MODE_PRECISION (inmode)
1032 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1033 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1034 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1035 #endif
1036 #ifdef WORD_REGISTER_OPERATIONS
1037 || ((GET_MODE_PRECISION (inmode)
1038 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1039 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1040 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1041 / UNITS_PER_WORD)))
1042 #endif
1044 || (REG_P (SUBREG_REG (in))
1045 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1046 /* The case where out is nonzero
1047 is handled differently in the following statement. */
1048 && (out == 0 || subreg_lowpart_p (in))
1049 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1050 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1051 > UNITS_PER_WORD)
1052 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1053 / UNITS_PER_WORD)
1054 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1055 [GET_MODE (SUBREG_REG (in))]))
1056 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1057 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1058 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1059 SUBREG_REG (in))
1060 == NO_REGS))
1061 #ifdef CANNOT_CHANGE_MODE_CLASS
1062 || (REG_P (SUBREG_REG (in))
1063 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1064 && REG_CANNOT_CHANGE_MODE_P
1065 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1066 #endif
1069 #ifdef LIMIT_RELOAD_CLASS
1070 in_subreg_loc = inloc;
1071 #endif
1072 inloc = &SUBREG_REG (in);
1073 in = *inloc;
1074 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1075 if (MEM_P (in))
1076 /* This is supposed to happen only for paradoxical subregs made by
1077 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1078 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1079 #endif
1080 inmode = GET_MODE (in);
1083 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1084 where M1 is not valid for R if it was not handled by the code above.
1086 Similar issue for (SUBREG constant ...) if it was not handled by the
1087 code above. This can happen if SUBREG_BYTE != 0.
1089 However, we must reload the inner reg *as well as* the subreg in
1090 that case. */
1092 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1094 enum reg_class in_class = rclass;
1096 if (REG_P (SUBREG_REG (in)))
1097 in_class
1098 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1099 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1100 GET_MODE (SUBREG_REG (in)),
1101 SUBREG_BYTE (in),
1102 GET_MODE (in)),
1103 REGNO (SUBREG_REG (in)));
1105 /* This relies on the fact that emit_reload_insns outputs the
1106 instructions for input reloads of type RELOAD_OTHER in the same
1107 order as the reloads. Thus if the outer reload is also of type
1108 RELOAD_OTHER, we are guaranteed that this inner reload will be
1109 output before the outer reload. */
1110 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1111 in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1112 dont_remove_subreg = 1;
1115 /* Similarly for paradoxical and problematical SUBREGs on the output.
1116 Note that there is no reason we need worry about the previous value
1117 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1118 entitled to clobber it all (except in the case of a word mode subreg
1119 or of a STRICT_LOW_PART, in that latter case the constraint should
1120 label it input-output.) */
1121 if (out != 0 && GET_CODE (out) == SUBREG
1122 && (subreg_lowpart_p (out) || strict_low)
1123 #ifdef CANNOT_CHANGE_MODE_CLASS
1124 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1125 #endif
1126 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (out))]
1127 && (CONSTANT_P (SUBREG_REG (out))
1128 || strict_low
1129 || (((REG_P (SUBREG_REG (out))
1130 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1131 || MEM_P (SUBREG_REG (out)))
1132 && ((GET_MODE_PRECISION (outmode)
1133 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1134 #ifdef WORD_REGISTER_OPERATIONS
1135 || ((GET_MODE_PRECISION (outmode)
1136 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1137 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1138 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1139 / UNITS_PER_WORD)))
1140 #endif
1142 || (REG_P (SUBREG_REG (out))
1143 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1144 /* The case of a word mode subreg
1145 is handled differently in the following statement. */
1146 && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1147 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1148 > UNITS_PER_WORD))
1149 && ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode))
1150 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1151 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1152 SUBREG_REG (out))
1153 == NO_REGS))
1154 #ifdef CANNOT_CHANGE_MODE_CLASS
1155 || (REG_P (SUBREG_REG (out))
1156 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1157 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1158 GET_MODE (SUBREG_REG (out)),
1159 outmode))
1160 #endif
1163 #ifdef LIMIT_RELOAD_CLASS
1164 out_subreg_loc = outloc;
1165 #endif
1166 outloc = &SUBREG_REG (out);
1167 out = *outloc;
1168 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1169 gcc_assert (!MEM_P (out)
1170 || GET_MODE_SIZE (GET_MODE (out))
1171 <= GET_MODE_SIZE (outmode));
1172 #endif
1173 outmode = GET_MODE (out);
1176 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1177 where either M1 is not valid for R or M2 is wider than a word but we
1178 only need one register to store an M2-sized quantity in R.
1180 However, we must reload the inner reg *as well as* the subreg in
1181 that case and the inner reg is an in-out reload. */
1183 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1185 enum reg_class in_out_class
1186 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1187 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1188 GET_MODE (SUBREG_REG (out)),
1189 SUBREG_BYTE (out),
1190 GET_MODE (out)),
1191 REGNO (SUBREG_REG (out)));
1193 /* This relies on the fact that emit_reload_insns outputs the
1194 instructions for output reloads of type RELOAD_OTHER in reverse
1195 order of the reloads. Thus if the outer reload is also of type
1196 RELOAD_OTHER, we are guaranteed that this inner reload will be
1197 output after the outer reload. */
1198 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1199 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1200 0, 0, opnum, RELOAD_OTHER);
1201 dont_remove_subreg = 1;
1204 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1205 if (in != 0 && out != 0 && MEM_P (out)
1206 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1207 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1208 dont_share = 1;
1210 /* If IN is a SUBREG of a hard register, make a new REG. This
1211 simplifies some of the cases below. */
1213 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1214 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1215 && ! dont_remove_subreg)
1216 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1218 /* Similarly for OUT. */
1219 if (out != 0 && GET_CODE (out) == SUBREG
1220 && REG_P (SUBREG_REG (out))
1221 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1222 && ! dont_remove_subreg)
1223 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1225 /* Narrow down the class of register wanted if that is
1226 desirable on this machine for efficiency. */
1228 reg_class_t preferred_class = rclass;
1230 if (in != 0)
1231 preferred_class = targetm.preferred_reload_class (in, rclass);
1233 /* Output reloads may need analogous treatment, different in detail. */
1234 if (out != 0)
1235 preferred_class
1236 = targetm.preferred_output_reload_class (out, preferred_class);
1238 /* Discard what the target said if we cannot do it. */
1239 if (preferred_class != NO_REGS
1240 || (optional && type == RELOAD_FOR_OUTPUT))
1241 rclass = (enum reg_class) preferred_class;
1244 /* Make sure we use a class that can handle the actual pseudo
1245 inside any subreg. For example, on the 386, QImode regs
1246 can appear within SImode subregs. Although GENERAL_REGS
1247 can handle SImode, QImode needs a smaller class. */
1248 #ifdef LIMIT_RELOAD_CLASS
1249 if (in_subreg_loc)
1250 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1251 else if (in != 0 && GET_CODE (in) == SUBREG)
1252 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1254 if (out_subreg_loc)
1255 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1256 if (out != 0 && GET_CODE (out) == SUBREG)
1257 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1258 #endif
1260 /* Verify that this class is at least possible for the mode that
1261 is specified. */
1262 if (this_insn_is_asm)
1264 enum machine_mode mode;
1265 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1266 mode = inmode;
1267 else
1268 mode = outmode;
1269 if (mode == VOIDmode)
1271 error_for_asm (this_insn, "cannot reload integer constant "
1272 "operand in %<asm%>");
1273 mode = word_mode;
1274 if (in != 0)
1275 inmode = word_mode;
1276 if (out != 0)
1277 outmode = word_mode;
1279 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1280 if (HARD_REGNO_MODE_OK (i, mode)
1281 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1282 break;
1283 if (i == FIRST_PSEUDO_REGISTER)
1285 error_for_asm (this_insn, "impossible register constraint "
1286 "in %<asm%>");
1287 /* Avoid further trouble with this insn. */
1288 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1289 /* We used to continue here setting class to ALL_REGS, but it triggers
1290 sanity check on i386 for:
1291 void foo(long double d)
1293 asm("" :: "a" (d));
1295 Returning zero here ought to be safe as we take care in
1296 find_reloads to not process the reloads when instruction was
1297 replaced by USE. */
1299 return 0;
1303 /* Optional output reloads are always OK even if we have no register class,
1304 since the function of these reloads is only to have spill_reg_store etc.
1305 set, so that the storing insn can be deleted later. */
1306 gcc_assert (rclass != NO_REGS
1307 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1309 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1311 if (i == n_reloads)
1313 /* See if we need a secondary reload register to move between CLASS
1314 and IN or CLASS and OUT. Get the icode and push any required reloads
1315 needed for each of them if so. */
1317 if (in != 0)
1318 secondary_in_reload
1319 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1320 &secondary_in_icode, NULL);
1321 if (out != 0 && GET_CODE (out) != SCRATCH)
1322 secondary_out_reload
1323 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1324 type, &secondary_out_icode, NULL);
1326 /* We found no existing reload suitable for re-use.
1327 So add an additional reload. */
1329 #ifdef SECONDARY_MEMORY_NEEDED
1330 /* If a memory location is needed for the copy, make one. */
1331 if (in != 0
1332 && (REG_P (in)
1333 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1334 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
1335 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
1336 rclass, inmode))
1337 get_secondary_mem (in, inmode, opnum, type);
1338 #endif
1340 i = n_reloads;
1341 rld[i].in = in;
1342 rld[i].out = out;
1343 rld[i].rclass = rclass;
1344 rld[i].inmode = inmode;
1345 rld[i].outmode = outmode;
1346 rld[i].reg_rtx = 0;
1347 rld[i].optional = optional;
1348 rld[i].inc = 0;
1349 rld[i].nocombine = 0;
1350 rld[i].in_reg = inloc ? *inloc : 0;
1351 rld[i].out_reg = outloc ? *outloc : 0;
1352 rld[i].opnum = opnum;
1353 rld[i].when_needed = type;
1354 rld[i].secondary_in_reload = secondary_in_reload;
1355 rld[i].secondary_out_reload = secondary_out_reload;
1356 rld[i].secondary_in_icode = secondary_in_icode;
1357 rld[i].secondary_out_icode = secondary_out_icode;
1358 rld[i].secondary_p = 0;
1360 n_reloads++;
1362 #ifdef SECONDARY_MEMORY_NEEDED
1363 if (out != 0
1364 && (REG_P (out)
1365 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1366 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1367 && SECONDARY_MEMORY_NEEDED (rclass,
1368 REGNO_REG_CLASS (reg_or_subregno (out)),
1369 outmode))
1370 get_secondary_mem (out, outmode, opnum, type);
1371 #endif
1373 else
1375 /* We are reusing an existing reload,
1376 but we may have additional information for it.
1377 For example, we may now have both IN and OUT
1378 while the old one may have just one of them. */
1380 /* The modes can be different. If they are, we want to reload in
1381 the larger mode, so that the value is valid for both modes. */
1382 if (inmode != VOIDmode
1383 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1384 rld[i].inmode = inmode;
1385 if (outmode != VOIDmode
1386 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1387 rld[i].outmode = outmode;
1388 if (in != 0)
1390 rtx in_reg = inloc ? *inloc : 0;
1391 /* If we merge reloads for two distinct rtl expressions that
1392 are identical in content, there might be duplicate address
1393 reloads. Remove the extra set now, so that if we later find
1394 that we can inherit this reload, we can get rid of the
1395 address reloads altogether.
1397 Do not do this if both reloads are optional since the result
1398 would be an optional reload which could potentially leave
1399 unresolved address replacements.
1401 It is not sufficient to call transfer_replacements since
1402 choose_reload_regs will remove the replacements for address
1403 reloads of inherited reloads which results in the same
1404 problem. */
1405 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1406 && ! (rld[i].optional && optional))
1408 /* We must keep the address reload with the lower operand
1409 number alive. */
1410 if (opnum > rld[i].opnum)
1412 remove_address_replacements (in);
1413 in = rld[i].in;
1414 in_reg = rld[i].in_reg;
1416 else
1417 remove_address_replacements (rld[i].in);
1419 /* When emitting reloads we don't necessarily look at the in-
1420 and outmode, but also directly at the operands (in and out).
1421 So we can't simply overwrite them with whatever we have found
1422 for this (to-be-merged) reload, we have to "merge" that too.
1423 Reusing another reload already verified that we deal with the
1424 same operands, just possibly in different modes. So we
1425 overwrite the operands only when the new mode is larger.
1426 See also PR33613. */
1427 if (!rld[i].in
1428 || GET_MODE_SIZE (GET_MODE (in))
1429 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1430 rld[i].in = in;
1431 if (!rld[i].in_reg
1432 || (in_reg
1433 && GET_MODE_SIZE (GET_MODE (in_reg))
1434 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1435 rld[i].in_reg = in_reg;
1437 if (out != 0)
1439 if (!rld[i].out
1440 || (out
1441 && GET_MODE_SIZE (GET_MODE (out))
1442 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1443 rld[i].out = out;
1444 if (outloc
1445 && (!rld[i].out_reg
1446 || GET_MODE_SIZE (GET_MODE (*outloc))
1447 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1448 rld[i].out_reg = *outloc;
1450 if (reg_class_subset_p (rclass, rld[i].rclass))
1451 rld[i].rclass = rclass;
1452 rld[i].optional &= optional;
1453 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1454 opnum, rld[i].opnum))
1455 rld[i].when_needed = RELOAD_OTHER;
1456 rld[i].opnum = MIN (rld[i].opnum, opnum);
1459 /* If the ostensible rtx being reloaded differs from the rtx found
1460 in the location to substitute, this reload is not safe to combine
1461 because we cannot reliably tell whether it appears in the insn. */
1463 if (in != 0 && in != *inloc)
1464 rld[i].nocombine = 1;
1466 #if 0
1467 /* This was replaced by changes in find_reloads_address_1 and the new
1468 function inc_for_reload, which go with a new meaning of reload_inc. */
1470 /* If this is an IN/OUT reload in an insn that sets the CC,
1471 it must be for an autoincrement. It doesn't work to store
1472 the incremented value after the insn because that would clobber the CC.
1473 So we must do the increment of the value reloaded from,
1474 increment it, store it back, then decrement again. */
1475 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1477 out = 0;
1478 rld[i].out = 0;
1479 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1480 /* If we did not find a nonzero amount-to-increment-by,
1481 that contradicts the belief that IN is being incremented
1482 in an address in this insn. */
1483 gcc_assert (rld[i].inc != 0);
1485 #endif
1487 /* If we will replace IN and OUT with the reload-reg,
1488 record where they are located so that substitution need
1489 not do a tree walk. */
1491 if (replace_reloads)
1493 if (inloc != 0)
1495 struct replacement *r = &replacements[n_replacements++];
1496 r->what = i;
1497 r->where = inloc;
1498 r->mode = inmode;
1500 if (outloc != 0 && outloc != inloc)
1502 struct replacement *r = &replacements[n_replacements++];
1503 r->what = i;
1504 r->where = outloc;
1505 r->mode = outmode;
1509 /* If this reload is just being introduced and it has both
1510 an incoming quantity and an outgoing quantity that are
1511 supposed to be made to match, see if either one of the two
1512 can serve as the place to reload into.
1514 If one of them is acceptable, set rld[i].reg_rtx
1515 to that one. */
1517 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1519 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1520 inmode, outmode,
1521 rld[i].rclass, i,
1522 earlyclobber_operand_p (out));
1524 /* If the outgoing register already contains the same value
1525 as the incoming one, we can dispense with loading it.
1526 The easiest way to tell the caller that is to give a phony
1527 value for the incoming operand (same as outgoing one). */
1528 if (rld[i].reg_rtx == out
1529 && (REG_P (in) || CONSTANT_P (in))
1530 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1531 static_reload_reg_p, i, inmode))
1532 rld[i].in = out;
1535 /* If this is an input reload and the operand contains a register that
1536 dies in this insn and is used nowhere else, see if it is the right class
1537 to be used for this reload. Use it if so. (This occurs most commonly
1538 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1539 this if it is also an output reload that mentions the register unless
1540 the output is a SUBREG that clobbers an entire register.
1542 Note that the operand might be one of the spill regs, if it is a
1543 pseudo reg and we are in a block where spilling has not taken place.
1544 But if there is no spilling in this block, that is OK.
1545 An explicitly used hard reg cannot be a spill reg. */
1547 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1549 rtx note;
1550 int regno;
1551 enum machine_mode rel_mode = inmode;
1553 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1554 rel_mode = outmode;
1556 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1557 if (REG_NOTE_KIND (note) == REG_DEAD
1558 && REG_P (XEXP (note, 0))
1559 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1560 && reg_mentioned_p (XEXP (note, 0), in)
1561 /* Check that a former pseudo is valid; see find_dummy_reload. */
1562 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1563 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1564 ORIGINAL_REGNO (XEXP (note, 0)))
1565 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1566 && ! refers_to_regno_for_reload_p (regno,
1567 end_hard_regno (rel_mode,
1568 regno),
1569 PATTERN (this_insn), inloc)
1570 /* If this is also an output reload, IN cannot be used as
1571 the reload register if it is set in this insn unless IN
1572 is also OUT. */
1573 && (out == 0 || in == out
1574 || ! hard_reg_set_here_p (regno,
1575 end_hard_regno (rel_mode, regno),
1576 PATTERN (this_insn)))
1577 /* ??? Why is this code so different from the previous?
1578 Is there any simple coherent way to describe the two together?
1579 What's going on here. */
1580 && (in != out
1581 || (GET_CODE (in) == SUBREG
1582 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1583 / UNITS_PER_WORD)
1584 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1585 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1586 /* Make sure the operand fits in the reg that dies. */
1587 && (GET_MODE_SIZE (rel_mode)
1588 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1589 && HARD_REGNO_MODE_OK (regno, inmode)
1590 && HARD_REGNO_MODE_OK (regno, outmode))
1592 unsigned int offs;
1593 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1594 hard_regno_nregs[regno][outmode]);
1596 for (offs = 0; offs < nregs; offs++)
1597 if (fixed_regs[regno + offs]
1598 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1599 regno + offs))
1600 break;
1602 if (offs == nregs
1603 && (! (refers_to_regno_for_reload_p
1604 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1605 || can_reload_into (in, regno, inmode)))
1607 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1608 break;
1613 if (out)
1614 output_reloadnum = i;
1616 return i;
1619 /* Record an additional place we must replace a value
1620 for which we have already recorded a reload.
1621 RELOADNUM is the value returned by push_reload
1622 when the reload was recorded.
1623 This is used in insn patterns that use match_dup. */
1625 static void
1626 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
1628 if (replace_reloads)
1630 struct replacement *r = &replacements[n_replacements++];
1631 r->what = reloadnum;
1632 r->where = loc;
1633 r->mode = mode;
1637 /* Duplicate any replacement we have recorded to apply at
1638 location ORIG_LOC to also be performed at DUP_LOC.
1639 This is used in insn patterns that use match_dup. */
1641 static void
1642 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1644 int i, n = n_replacements;
1646 for (i = 0; i < n; i++)
1648 struct replacement *r = &replacements[i];
1649 if (r->where == orig_loc)
1650 push_replacement (dup_loc, r->what, r->mode);
1654 /* Transfer all replacements that used to be in reload FROM to be in
1655 reload TO. */
1657 void
1658 transfer_replacements (int to, int from)
1660 int i;
1662 for (i = 0; i < n_replacements; i++)
1663 if (replacements[i].what == from)
1664 replacements[i].what = to;
1667 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1668 or a subpart of it. If we have any replacements registered for IN_RTX,
1669 cancel the reloads that were supposed to load them.
1670 Return nonzero if we canceled any reloads. */
1672 remove_address_replacements (rtx in_rtx)
1674 int i, j;
1675 char reload_flags[MAX_RELOADS];
1676 int something_changed = 0;
1678 memset (reload_flags, 0, sizeof reload_flags);
1679 for (i = 0, j = 0; i < n_replacements; i++)
1681 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1682 reload_flags[replacements[i].what] |= 1;
1683 else
1685 replacements[j++] = replacements[i];
1686 reload_flags[replacements[i].what] |= 2;
1689 /* Note that the following store must be done before the recursive calls. */
1690 n_replacements = j;
1692 for (i = n_reloads - 1; i >= 0; i--)
1694 if (reload_flags[i] == 1)
1696 deallocate_reload_reg (i);
1697 remove_address_replacements (rld[i].in);
1698 rld[i].in = 0;
1699 something_changed = 1;
1702 return something_changed;
1705 /* If there is only one output reload, and it is not for an earlyclobber
1706 operand, try to combine it with a (logically unrelated) input reload
1707 to reduce the number of reload registers needed.
1709 This is safe if the input reload does not appear in
1710 the value being output-reloaded, because this implies
1711 it is not needed any more once the original insn completes.
1713 If that doesn't work, see we can use any of the registers that
1714 die in this insn as a reload register. We can if it is of the right
1715 class and does not appear in the value being output-reloaded. */
1717 static void
1718 combine_reloads (void)
1720 int i, regno;
1721 int output_reload = -1;
1722 int secondary_out = -1;
1723 rtx note;
1725 /* Find the output reload; return unless there is exactly one
1726 and that one is mandatory. */
1728 for (i = 0; i < n_reloads; i++)
1729 if (rld[i].out != 0)
1731 if (output_reload >= 0)
1732 return;
1733 output_reload = i;
1736 if (output_reload < 0 || rld[output_reload].optional)
1737 return;
1739 /* An input-output reload isn't combinable. */
1741 if (rld[output_reload].in != 0)
1742 return;
1744 /* If this reload is for an earlyclobber operand, we can't do anything. */
1745 if (earlyclobber_operand_p (rld[output_reload].out))
1746 return;
1748 /* If there is a reload for part of the address of this operand, we would
1749 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1750 its life to the point where doing this combine would not lower the
1751 number of spill registers needed. */
1752 for (i = 0; i < n_reloads; i++)
1753 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1754 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1755 && rld[i].opnum == rld[output_reload].opnum)
1756 return;
1758 /* Check each input reload; can we combine it? */
1760 for (i = 0; i < n_reloads; i++)
1761 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1762 /* Life span of this reload must not extend past main insn. */
1763 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1764 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1765 && rld[i].when_needed != RELOAD_OTHER
1766 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1767 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1768 [(int) rld[output_reload].outmode])
1769 && rld[i].inc == 0
1770 && rld[i].reg_rtx == 0
1771 #ifdef SECONDARY_MEMORY_NEEDED
1772 /* Don't combine two reloads with different secondary
1773 memory locations. */
1774 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1775 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1776 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1777 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1778 #endif
1779 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1780 ? (rld[i].rclass == rld[output_reload].rclass)
1781 : (reg_class_subset_p (rld[i].rclass,
1782 rld[output_reload].rclass)
1783 || reg_class_subset_p (rld[output_reload].rclass,
1784 rld[i].rclass)))
1785 && (MATCHES (rld[i].in, rld[output_reload].out)
1786 /* Args reversed because the first arg seems to be
1787 the one that we imagine being modified
1788 while the second is the one that might be affected. */
1789 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1790 rld[i].in)
1791 /* However, if the input is a register that appears inside
1792 the output, then we also can't share.
1793 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1794 If the same reload reg is used for both reg 69 and the
1795 result to be stored in memory, then that result
1796 will clobber the address of the memory ref. */
1797 && ! (REG_P (rld[i].in)
1798 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1799 rld[output_reload].out))))
1800 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1801 rld[i].when_needed != RELOAD_FOR_INPUT)
1802 && (reg_class_size[(int) rld[i].rclass]
1803 || targetm.small_register_classes_for_mode_p (VOIDmode))
1804 /* We will allow making things slightly worse by combining an
1805 input and an output, but no worse than that. */
1806 && (rld[i].when_needed == RELOAD_FOR_INPUT
1807 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1809 int j;
1811 /* We have found a reload to combine with! */
1812 rld[i].out = rld[output_reload].out;
1813 rld[i].out_reg = rld[output_reload].out_reg;
1814 rld[i].outmode = rld[output_reload].outmode;
1815 /* Mark the old output reload as inoperative. */
1816 rld[output_reload].out = 0;
1817 /* The combined reload is needed for the entire insn. */
1818 rld[i].when_needed = RELOAD_OTHER;
1819 /* If the output reload had a secondary reload, copy it. */
1820 if (rld[output_reload].secondary_out_reload != -1)
1822 rld[i].secondary_out_reload
1823 = rld[output_reload].secondary_out_reload;
1824 rld[i].secondary_out_icode
1825 = rld[output_reload].secondary_out_icode;
1828 #ifdef SECONDARY_MEMORY_NEEDED
1829 /* Copy any secondary MEM. */
1830 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1831 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1832 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1833 #endif
1834 /* If required, minimize the register class. */
1835 if (reg_class_subset_p (rld[output_reload].rclass,
1836 rld[i].rclass))
1837 rld[i].rclass = rld[output_reload].rclass;
1839 /* Transfer all replacements from the old reload to the combined. */
1840 for (j = 0; j < n_replacements; j++)
1841 if (replacements[j].what == output_reload)
1842 replacements[j].what = i;
1844 return;
1847 /* If this insn has only one operand that is modified or written (assumed
1848 to be the first), it must be the one corresponding to this reload. It
1849 is safe to use anything that dies in this insn for that output provided
1850 that it does not occur in the output (we already know it isn't an
1851 earlyclobber. If this is an asm insn, give up. */
1853 if (INSN_CODE (this_insn) == -1)
1854 return;
1856 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1857 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1858 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1859 return;
1861 /* See if some hard register that dies in this insn and is not used in
1862 the output is the right class. Only works if the register we pick
1863 up can fully hold our output reload. */
1864 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1865 if (REG_NOTE_KIND (note) == REG_DEAD
1866 && REG_P (XEXP (note, 0))
1867 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1868 rld[output_reload].out)
1869 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1870 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1871 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1872 regno)
1873 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1874 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1875 /* Ensure that a secondary or tertiary reload for this output
1876 won't want this register. */
1877 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1878 || (!(TEST_HARD_REG_BIT
1879 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1880 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1881 || !(TEST_HARD_REG_BIT
1882 (reg_class_contents[(int) rld[secondary_out].rclass],
1883 regno)))))
1884 && !fixed_regs[regno]
1885 /* Check that a former pseudo is valid; see find_dummy_reload. */
1886 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1887 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1888 ORIGINAL_REGNO (XEXP (note, 0)))
1889 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1891 rld[output_reload].reg_rtx
1892 = gen_rtx_REG (rld[output_reload].outmode, regno);
1893 return;
1897 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1898 See if one of IN and OUT is a register that may be used;
1899 this is desirable since a spill-register won't be needed.
1900 If so, return the register rtx that proves acceptable.
1902 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1903 RCLASS is the register class required for the reload.
1905 If FOR_REAL is >= 0, it is the number of the reload,
1906 and in some cases when it can be discovered that OUT doesn't need
1907 to be computed, clear out rld[FOR_REAL].out.
1909 If FOR_REAL is -1, this should not be done, because this call
1910 is just to see if a register can be found, not to find and install it.
1912 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1913 puts an additional constraint on being able to use IN for OUT since
1914 IN must not appear elsewhere in the insn (it is assumed that IN itself
1915 is safe from the earlyclobber). */
1917 static rtx
1918 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1919 enum machine_mode inmode, enum machine_mode outmode,
1920 reg_class_t rclass, int for_real, int earlyclobber)
1922 rtx in = real_in;
1923 rtx out = real_out;
1924 int in_offset = 0;
1925 int out_offset = 0;
1926 rtx value = 0;
1928 /* If operands exceed a word, we can't use either of them
1929 unless they have the same size. */
1930 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1931 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1932 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1933 return 0;
1935 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1936 respectively refers to a hard register. */
1938 /* Find the inside of any subregs. */
1939 while (GET_CODE (out) == SUBREG)
1941 if (REG_P (SUBREG_REG (out))
1942 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1943 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1944 GET_MODE (SUBREG_REG (out)),
1945 SUBREG_BYTE (out),
1946 GET_MODE (out));
1947 out = SUBREG_REG (out);
1949 while (GET_CODE (in) == SUBREG)
1951 if (REG_P (SUBREG_REG (in))
1952 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1953 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1954 GET_MODE (SUBREG_REG (in)),
1955 SUBREG_BYTE (in),
1956 GET_MODE (in));
1957 in = SUBREG_REG (in);
1960 /* Narrow down the reg class, the same way push_reload will;
1961 otherwise we might find a dummy now, but push_reload won't. */
1963 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
1964 if (preferred_class != NO_REGS)
1965 rclass = (enum reg_class) preferred_class;
1968 /* See if OUT will do. */
1969 if (REG_P (out)
1970 && REGNO (out) < FIRST_PSEUDO_REGISTER)
1972 unsigned int regno = REGNO (out) + out_offset;
1973 unsigned int nwords = hard_regno_nregs[regno][outmode];
1974 rtx saved_rtx;
1976 /* When we consider whether the insn uses OUT,
1977 ignore references within IN. They don't prevent us
1978 from copying IN into OUT, because those refs would
1979 move into the insn that reloads IN.
1981 However, we only ignore IN in its role as this reload.
1982 If the insn uses IN elsewhere and it contains OUT,
1983 that counts. We can't be sure it's the "same" operand
1984 so it might not go through this reload. */
1985 saved_rtx = *inloc;
1986 *inloc = const0_rtx;
1988 if (regno < FIRST_PSEUDO_REGISTER
1989 && HARD_REGNO_MODE_OK (regno, outmode)
1990 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
1991 PATTERN (this_insn), outloc))
1993 unsigned int i;
1995 for (i = 0; i < nwords; i++)
1996 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1997 regno + i))
1998 break;
2000 if (i == nwords)
2002 if (REG_P (real_out))
2003 value = real_out;
2004 else
2005 value = gen_rtx_REG (outmode, regno);
2009 *inloc = saved_rtx;
2012 /* Consider using IN if OUT was not acceptable
2013 or if OUT dies in this insn (like the quotient in a divmod insn).
2014 We can't use IN unless it is dies in this insn,
2015 which means we must know accurately which hard regs are live.
2016 Also, the result can't go in IN if IN is used within OUT,
2017 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2018 if (hard_regs_live_known
2019 && REG_P (in)
2020 && REGNO (in) < FIRST_PSEUDO_REGISTER
2021 && (value == 0
2022 || find_reg_note (this_insn, REG_UNUSED, real_out))
2023 && find_reg_note (this_insn, REG_DEAD, real_in)
2024 && !fixed_regs[REGNO (in)]
2025 && HARD_REGNO_MODE_OK (REGNO (in),
2026 /* The only case where out and real_out might
2027 have different modes is where real_out
2028 is a subreg, and in that case, out
2029 has a real mode. */
2030 (GET_MODE (out) != VOIDmode
2031 ? GET_MODE (out) : outmode))
2032 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2033 /* However only do this if we can be sure that this input
2034 operand doesn't correspond with an uninitialized pseudo.
2035 global can assign some hardreg to it that is the same as
2036 the one assigned to a different, also live pseudo (as it
2037 can ignore the conflict). We must never introduce writes
2038 to such hardregs, as they would clobber the other live
2039 pseudo. See PR 20973. */
2040 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
2041 ORIGINAL_REGNO (in))
2042 /* Similarly, only do this if we can be sure that the death
2043 note is still valid. global can assign some hardreg to
2044 the pseudo referenced in the note and simultaneously a
2045 subword of this hardreg to a different, also live pseudo,
2046 because only another subword of the hardreg is actually
2047 used in the insn. This cannot happen if the pseudo has
2048 been assigned exactly one hardreg. See PR 33732. */
2049 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2051 unsigned int regno = REGNO (in) + in_offset;
2052 unsigned int nwords = hard_regno_nregs[regno][inmode];
2054 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2055 && ! hard_reg_set_here_p (regno, regno + nwords,
2056 PATTERN (this_insn))
2057 && (! earlyclobber
2058 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2059 PATTERN (this_insn), inloc)))
2061 unsigned int i;
2063 for (i = 0; i < nwords; i++)
2064 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2065 regno + i))
2066 break;
2068 if (i == nwords)
2070 /* If we were going to use OUT as the reload reg
2071 and changed our mind, it means OUT is a dummy that
2072 dies here. So don't bother copying value to it. */
2073 if (for_real >= 0 && value == real_out)
2074 rld[for_real].out = 0;
2075 if (REG_P (real_in))
2076 value = real_in;
2077 else
2078 value = gen_rtx_REG (inmode, regno);
2083 return value;
2086 /* This page contains subroutines used mainly for determining
2087 whether the IN or an OUT of a reload can serve as the
2088 reload register. */
2090 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2093 earlyclobber_operand_p (rtx x)
2095 int i;
2097 for (i = 0; i < n_earlyclobbers; i++)
2098 if (reload_earlyclobbers[i] == x)
2099 return 1;
2101 return 0;
2104 /* Return 1 if expression X alters a hard reg in the range
2105 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2106 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2107 X should be the body of an instruction. */
2109 static int
2110 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2112 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2114 rtx op0 = SET_DEST (x);
2116 while (GET_CODE (op0) == SUBREG)
2117 op0 = SUBREG_REG (op0);
2118 if (REG_P (op0))
2120 unsigned int r = REGNO (op0);
2122 /* See if this reg overlaps range under consideration. */
2123 if (r < end_regno
2124 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2125 return 1;
2128 else if (GET_CODE (x) == PARALLEL)
2130 int i = XVECLEN (x, 0) - 1;
2132 for (; i >= 0; i--)
2133 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2134 return 1;
2137 return 0;
2140 /* Return 1 if ADDR is a valid memory address for mode MODE
2141 in address space AS, and check that each pseudo reg has the
2142 proper kind of hard reg. */
2145 strict_memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2146 rtx addr, addr_space_t as)
2148 #ifdef GO_IF_LEGITIMATE_ADDRESS
2149 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2150 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2151 return 0;
2153 win:
2154 return 1;
2155 #else
2156 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2157 #endif
2160 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2161 if they are the same hard reg, and has special hacks for
2162 autoincrement and autodecrement.
2163 This is specifically intended for find_reloads to use
2164 in determining whether two operands match.
2165 X is the operand whose number is the lower of the two.
2167 The value is 2 if Y contains a pre-increment that matches
2168 a non-incrementing address in X. */
2170 /* ??? To be completely correct, we should arrange to pass
2171 for X the output operand and for Y the input operand.
2172 For now, we assume that the output operand has the lower number
2173 because that is natural in (SET output (... input ...)). */
2176 operands_match_p (rtx x, rtx y)
2178 int i;
2179 RTX_CODE code = GET_CODE (x);
2180 const char *fmt;
2181 int success_2;
2183 if (x == y)
2184 return 1;
2185 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2186 && (REG_P (y) || (GET_CODE (y) == SUBREG
2187 && REG_P (SUBREG_REG (y)))))
2189 int j;
2191 if (code == SUBREG)
2193 i = REGNO (SUBREG_REG (x));
2194 if (i >= FIRST_PSEUDO_REGISTER)
2195 goto slow;
2196 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2197 GET_MODE (SUBREG_REG (x)),
2198 SUBREG_BYTE (x),
2199 GET_MODE (x));
2201 else
2202 i = REGNO (x);
2204 if (GET_CODE (y) == SUBREG)
2206 j = REGNO (SUBREG_REG (y));
2207 if (j >= FIRST_PSEUDO_REGISTER)
2208 goto slow;
2209 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2210 GET_MODE (SUBREG_REG (y)),
2211 SUBREG_BYTE (y),
2212 GET_MODE (y));
2214 else
2215 j = REGNO (y);
2217 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2218 multiple hard register group of scalar integer registers, so that
2219 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2220 register. */
2221 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2222 && SCALAR_INT_MODE_P (GET_MODE (x))
2223 && i < FIRST_PSEUDO_REGISTER)
2224 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2225 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2226 && SCALAR_INT_MODE_P (GET_MODE (y))
2227 && j < FIRST_PSEUDO_REGISTER)
2228 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2230 return i == j;
2232 /* If two operands must match, because they are really a single
2233 operand of an assembler insn, then two postincrements are invalid
2234 because the assembler insn would increment only once.
2235 On the other hand, a postincrement matches ordinary indexing
2236 if the postincrement is the output operand. */
2237 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2238 return operands_match_p (XEXP (x, 0), y);
2239 /* Two preincrements are invalid
2240 because the assembler insn would increment only once.
2241 On the other hand, a preincrement matches ordinary indexing
2242 if the preincrement is the input operand.
2243 In this case, return 2, since some callers need to do special
2244 things when this happens. */
2245 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2246 || GET_CODE (y) == PRE_MODIFY)
2247 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2249 slow:
2251 /* Now we have disposed of all the cases in which different rtx codes
2252 can match. */
2253 if (code != GET_CODE (y))
2254 return 0;
2256 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2257 if (GET_MODE (x) != GET_MODE (y))
2258 return 0;
2260 /* MEMs referring to different address space are not equivalent. */
2261 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2262 return 0;
2264 switch (code)
2266 case CONST_INT:
2267 case CONST_DOUBLE:
2268 case CONST_FIXED:
2269 return 0;
2271 case LABEL_REF:
2272 return XEXP (x, 0) == XEXP (y, 0);
2273 case SYMBOL_REF:
2274 return XSTR (x, 0) == XSTR (y, 0);
2276 default:
2277 break;
2280 /* Compare the elements. If any pair of corresponding elements
2281 fail to match, return 0 for the whole things. */
2283 success_2 = 0;
2284 fmt = GET_RTX_FORMAT (code);
2285 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2287 int val, j;
2288 switch (fmt[i])
2290 case 'w':
2291 if (XWINT (x, i) != XWINT (y, i))
2292 return 0;
2293 break;
2295 case 'i':
2296 if (XINT (x, i) != XINT (y, i))
2297 return 0;
2298 break;
2300 case 'e':
2301 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2302 if (val == 0)
2303 return 0;
2304 /* If any subexpression returns 2,
2305 we should return 2 if we are successful. */
2306 if (val == 2)
2307 success_2 = 1;
2308 break;
2310 case '0':
2311 break;
2313 case 'E':
2314 if (XVECLEN (x, i) != XVECLEN (y, i))
2315 return 0;
2316 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2318 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2319 if (val == 0)
2320 return 0;
2321 if (val == 2)
2322 success_2 = 1;
2324 break;
2326 /* It is believed that rtx's at this level will never
2327 contain anything but integers and other rtx's,
2328 except for within LABEL_REFs and SYMBOL_REFs. */
2329 default:
2330 gcc_unreachable ();
2333 return 1 + success_2;
2336 /* Describe the range of registers or memory referenced by X.
2337 If X is a register, set REG_FLAG and put the first register
2338 number into START and the last plus one into END.
2339 If X is a memory reference, put a base address into BASE
2340 and a range of integer offsets into START and END.
2341 If X is pushing on the stack, we can assume it causes no trouble,
2342 so we set the SAFE field. */
2344 static struct decomposition
2345 decompose (rtx x)
2347 struct decomposition val;
2348 int all_const = 0;
2350 memset (&val, 0, sizeof (val));
2352 switch (GET_CODE (x))
2354 case MEM:
2356 rtx base = NULL_RTX, offset = 0;
2357 rtx addr = XEXP (x, 0);
2359 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2360 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2362 val.base = XEXP (addr, 0);
2363 val.start = -GET_MODE_SIZE (GET_MODE (x));
2364 val.end = GET_MODE_SIZE (GET_MODE (x));
2365 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2366 return val;
2369 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2371 if (GET_CODE (XEXP (addr, 1)) == PLUS
2372 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2373 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2375 val.base = XEXP (addr, 0);
2376 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2377 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2378 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2379 return val;
2383 if (GET_CODE (addr) == CONST)
2385 addr = XEXP (addr, 0);
2386 all_const = 1;
2388 if (GET_CODE (addr) == PLUS)
2390 if (CONSTANT_P (XEXP (addr, 0)))
2392 base = XEXP (addr, 1);
2393 offset = XEXP (addr, 0);
2395 else if (CONSTANT_P (XEXP (addr, 1)))
2397 base = XEXP (addr, 0);
2398 offset = XEXP (addr, 1);
2402 if (offset == 0)
2404 base = addr;
2405 offset = const0_rtx;
2407 if (GET_CODE (offset) == CONST)
2408 offset = XEXP (offset, 0);
2409 if (GET_CODE (offset) == PLUS)
2411 if (CONST_INT_P (XEXP (offset, 0)))
2413 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2414 offset = XEXP (offset, 0);
2416 else if (CONST_INT_P (XEXP (offset, 1)))
2418 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2419 offset = XEXP (offset, 1);
2421 else
2423 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2424 offset = const0_rtx;
2427 else if (!CONST_INT_P (offset))
2429 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2430 offset = const0_rtx;
2433 if (all_const && GET_CODE (base) == PLUS)
2434 base = gen_rtx_CONST (GET_MODE (base), base);
2436 gcc_assert (CONST_INT_P (offset));
2438 val.start = INTVAL (offset);
2439 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2440 val.base = base;
2442 break;
2444 case REG:
2445 val.reg_flag = 1;
2446 val.start = true_regnum (x);
2447 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2449 /* A pseudo with no hard reg. */
2450 val.start = REGNO (x);
2451 val.end = val.start + 1;
2453 else
2454 /* A hard reg. */
2455 val.end = end_hard_regno (GET_MODE (x), val.start);
2456 break;
2458 case SUBREG:
2459 if (!REG_P (SUBREG_REG (x)))
2460 /* This could be more precise, but it's good enough. */
2461 return decompose (SUBREG_REG (x));
2462 val.reg_flag = 1;
2463 val.start = true_regnum (x);
2464 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2465 return decompose (SUBREG_REG (x));
2466 else
2467 /* A hard reg. */
2468 val.end = val.start + subreg_nregs (x);
2469 break;
2471 case SCRATCH:
2472 /* This hasn't been assigned yet, so it can't conflict yet. */
2473 val.safe = 1;
2474 break;
2476 default:
2477 gcc_assert (CONSTANT_P (x));
2478 val.safe = 1;
2479 break;
2481 return val;
2484 /* Return 1 if altering Y will not modify the value of X.
2485 Y is also described by YDATA, which should be decompose (Y). */
2487 static int
2488 immune_p (rtx x, rtx y, struct decomposition ydata)
2490 struct decomposition xdata;
2492 if (ydata.reg_flag)
2493 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2494 if (ydata.safe)
2495 return 1;
2497 gcc_assert (MEM_P (y));
2498 /* If Y is memory and X is not, Y can't affect X. */
2499 if (!MEM_P (x))
2500 return 1;
2502 xdata = decompose (x);
2504 if (! rtx_equal_p (xdata.base, ydata.base))
2506 /* If bases are distinct symbolic constants, there is no overlap. */
2507 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2508 return 1;
2509 /* Constants and stack slots never overlap. */
2510 if (CONSTANT_P (xdata.base)
2511 && (ydata.base == frame_pointer_rtx
2512 || ydata.base == hard_frame_pointer_rtx
2513 || ydata.base == stack_pointer_rtx))
2514 return 1;
2515 if (CONSTANT_P (ydata.base)
2516 && (xdata.base == frame_pointer_rtx
2517 || xdata.base == hard_frame_pointer_rtx
2518 || xdata.base == stack_pointer_rtx))
2519 return 1;
2520 /* If either base is variable, we don't know anything. */
2521 return 0;
2524 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2527 /* Similar, but calls decompose. */
2530 safe_from_earlyclobber (rtx op, rtx clobber)
2532 struct decomposition early_data;
2534 early_data = decompose (clobber);
2535 return immune_p (op, clobber, early_data);
2538 /* Main entry point of this file: search the body of INSN
2539 for values that need reloading and record them with push_reload.
2540 REPLACE nonzero means record also where the values occur
2541 so that subst_reloads can be used.
2543 IND_LEVELS says how many levels of indirection are supported by this
2544 machine; a value of zero means that a memory reference is not a valid
2545 memory address.
2547 LIVE_KNOWN says we have valid information about which hard
2548 regs are live at each point in the program; this is true when
2549 we are called from global_alloc but false when stupid register
2550 allocation has been done.
2552 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2553 which is nonnegative if the reg has been commandeered for reloading into.
2554 It is copied into STATIC_RELOAD_REG_P and referenced from there
2555 by various subroutines.
2557 Return TRUE if some operands need to be changed, because of swapping
2558 commutative operands, reg_equiv_address substitution, or whatever. */
2561 find_reloads (rtx insn, int replace, int ind_levels, int live_known,
2562 short *reload_reg_p)
2564 int insn_code_number;
2565 int i, j;
2566 int noperands;
2567 /* These start out as the constraints for the insn
2568 and they are chewed up as we consider alternatives. */
2569 const char *constraints[MAX_RECOG_OPERANDS];
2570 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2571 a register. */
2572 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2573 char pref_or_nothing[MAX_RECOG_OPERANDS];
2574 /* Nonzero for a MEM operand whose entire address needs a reload.
2575 May be -1 to indicate the entire address may or may not need a reload. */
2576 int address_reloaded[MAX_RECOG_OPERANDS];
2577 /* Nonzero for an address operand that needs to be completely reloaded.
2578 May be -1 to indicate the entire operand may or may not need a reload. */
2579 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2580 /* Value of enum reload_type to use for operand. */
2581 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2582 /* Value of enum reload_type to use within address of operand. */
2583 enum reload_type address_type[MAX_RECOG_OPERANDS];
2584 /* Save the usage of each operand. */
2585 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2586 int no_input_reloads = 0, no_output_reloads = 0;
2587 int n_alternatives;
2588 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2589 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2590 char this_alternative_win[MAX_RECOG_OPERANDS];
2591 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2592 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2593 int this_alternative_matches[MAX_RECOG_OPERANDS];
2594 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2595 int this_alternative_number;
2596 int goal_alternative_number = 0;
2597 int operand_reloadnum[MAX_RECOG_OPERANDS];
2598 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2599 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2600 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2601 char goal_alternative_win[MAX_RECOG_OPERANDS];
2602 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2603 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2604 int goal_alternative_swapped;
2605 int best;
2606 int commutative;
2607 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2608 rtx substed_operand[MAX_RECOG_OPERANDS];
2609 rtx body = PATTERN (insn);
2610 rtx set = single_set (insn);
2611 int goal_earlyclobber = 0, this_earlyclobber;
2612 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
2613 int retval = 0;
2615 this_insn = insn;
2616 n_reloads = 0;
2617 n_replacements = 0;
2618 n_earlyclobbers = 0;
2619 replace_reloads = replace;
2620 hard_regs_live_known = live_known;
2621 static_reload_reg_p = reload_reg_p;
2623 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2624 neither are insns that SET cc0. Insns that use CC0 are not allowed
2625 to have any input reloads. */
2626 if (JUMP_P (insn) || CALL_P (insn))
2627 no_output_reloads = 1;
2629 #ifdef HAVE_cc0
2630 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2631 no_input_reloads = 1;
2632 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2633 no_output_reloads = 1;
2634 #endif
2636 #ifdef SECONDARY_MEMORY_NEEDED
2637 /* The eliminated forms of any secondary memory locations are per-insn, so
2638 clear them out here. */
2640 if (secondary_memlocs_elim_used)
2642 memset (secondary_memlocs_elim, 0,
2643 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2644 secondary_memlocs_elim_used = 0;
2646 #endif
2648 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2649 is cheap to move between them. If it is not, there may not be an insn
2650 to do the copy, so we may need a reload. */
2651 if (GET_CODE (body) == SET
2652 && REG_P (SET_DEST (body))
2653 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2654 && REG_P (SET_SRC (body))
2655 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2656 && register_move_cost (GET_MODE (SET_SRC (body)),
2657 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2658 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2659 return 0;
2661 extract_insn (insn);
2663 noperands = reload_n_operands = recog_data.n_operands;
2664 n_alternatives = recog_data.n_alternatives;
2666 /* Just return "no reloads" if insn has no operands with constraints. */
2667 if (noperands == 0 || n_alternatives == 0)
2668 return 0;
2670 insn_code_number = INSN_CODE (insn);
2671 this_insn_is_asm = insn_code_number < 0;
2673 memcpy (operand_mode, recog_data.operand_mode,
2674 noperands * sizeof (enum machine_mode));
2675 memcpy (constraints, recog_data.constraints,
2676 noperands * sizeof (const char *));
2678 commutative = -1;
2680 /* If we will need to know, later, whether some pair of operands
2681 are the same, we must compare them now and save the result.
2682 Reloading the base and index registers will clobber them
2683 and afterward they will fail to match. */
2685 for (i = 0; i < noperands; i++)
2687 const char *p;
2688 int c;
2689 char *end;
2691 substed_operand[i] = recog_data.operand[i];
2692 p = constraints[i];
2694 modified[i] = RELOAD_READ;
2696 /* Scan this operand's constraint to see if it is an output operand,
2697 an in-out operand, is commutative, or should match another. */
2699 while ((c = *p))
2701 p += CONSTRAINT_LEN (c, p);
2702 switch (c)
2704 case '=':
2705 modified[i] = RELOAD_WRITE;
2706 break;
2707 case '+':
2708 modified[i] = RELOAD_READ_WRITE;
2709 break;
2710 case '%':
2712 /* The last operand should not be marked commutative. */
2713 gcc_assert (i != noperands - 1);
2715 /* We currently only support one commutative pair of
2716 operands. Some existing asm code currently uses more
2717 than one pair. Previously, that would usually work,
2718 but sometimes it would crash the compiler. We
2719 continue supporting that case as well as we can by
2720 silently ignoring all but the first pair. In the
2721 future we may handle it correctly. */
2722 if (commutative < 0)
2723 commutative = i;
2724 else
2725 gcc_assert (this_insn_is_asm);
2727 break;
2728 /* Use of ISDIGIT is tempting here, but it may get expensive because
2729 of locale support we don't want. */
2730 case '0': case '1': case '2': case '3': case '4':
2731 case '5': case '6': case '7': case '8': case '9':
2733 c = strtoul (p - 1, &end, 10);
2734 p = end;
2736 operands_match[c][i]
2737 = operands_match_p (recog_data.operand[c],
2738 recog_data.operand[i]);
2740 /* An operand may not match itself. */
2741 gcc_assert (c != i);
2743 /* If C can be commuted with C+1, and C might need to match I,
2744 then C+1 might also need to match I. */
2745 if (commutative >= 0)
2747 if (c == commutative || c == commutative + 1)
2749 int other = c + (c == commutative ? 1 : -1);
2750 operands_match[other][i]
2751 = operands_match_p (recog_data.operand[other],
2752 recog_data.operand[i]);
2754 if (i == commutative || i == commutative + 1)
2756 int other = i + (i == commutative ? 1 : -1);
2757 operands_match[c][other]
2758 = operands_match_p (recog_data.operand[c],
2759 recog_data.operand[other]);
2761 /* Note that C is supposed to be less than I.
2762 No need to consider altering both C and I because in
2763 that case we would alter one into the other. */
2770 /* Examine each operand that is a memory reference or memory address
2771 and reload parts of the addresses into index registers.
2772 Also here any references to pseudo regs that didn't get hard regs
2773 but are equivalent to constants get replaced in the insn itself
2774 with those constants. Nobody will ever see them again.
2776 Finally, set up the preferred classes of each operand. */
2778 for (i = 0; i < noperands; i++)
2780 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2782 address_reloaded[i] = 0;
2783 address_operand_reloaded[i] = 0;
2784 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2785 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2786 : RELOAD_OTHER);
2787 address_type[i]
2788 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2789 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2790 : RELOAD_OTHER);
2792 if (*constraints[i] == 0)
2793 /* Ignore things like match_operator operands. */
2795 else if (constraints[i][0] == 'p'
2796 || EXTRA_ADDRESS_CONSTRAINT (constraints[i][0], constraints[i]))
2798 address_operand_reloaded[i]
2799 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2800 recog_data.operand[i],
2801 recog_data.operand_loc[i],
2802 i, operand_type[i], ind_levels, insn);
2804 /* If we now have a simple operand where we used to have a
2805 PLUS or MULT, re-recognize and try again. */
2806 if ((OBJECT_P (*recog_data.operand_loc[i])
2807 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2808 && (GET_CODE (recog_data.operand[i]) == MULT
2809 || GET_CODE (recog_data.operand[i]) == PLUS))
2811 INSN_CODE (insn) = -1;
2812 retval = find_reloads (insn, replace, ind_levels, live_known,
2813 reload_reg_p);
2814 return retval;
2817 recog_data.operand[i] = *recog_data.operand_loc[i];
2818 substed_operand[i] = recog_data.operand[i];
2820 /* Address operands are reloaded in their existing mode,
2821 no matter what is specified in the machine description. */
2822 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2824 /* If the address is a single CONST_INT pick address mode
2825 instead otherwise we will later not know in which mode
2826 the reload should be performed. */
2827 if (operand_mode[i] == VOIDmode)
2828 operand_mode[i] = Pmode;
2831 else if (code == MEM)
2833 address_reloaded[i]
2834 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2835 recog_data.operand_loc[i],
2836 XEXP (recog_data.operand[i], 0),
2837 &XEXP (recog_data.operand[i], 0),
2838 i, address_type[i], ind_levels, insn);
2839 recog_data.operand[i] = *recog_data.operand_loc[i];
2840 substed_operand[i] = recog_data.operand[i];
2842 else if (code == SUBREG)
2844 rtx reg = SUBREG_REG (recog_data.operand[i]);
2845 rtx op
2846 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2847 ind_levels,
2848 set != 0
2849 && &SET_DEST (set) == recog_data.operand_loc[i],
2850 insn,
2851 &address_reloaded[i]);
2853 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2854 that didn't get a hard register, emit a USE with a REG_EQUAL
2855 note in front so that we might inherit a previous, possibly
2856 wider reload. */
2858 if (replace
2859 && MEM_P (op)
2860 && REG_P (reg)
2861 && (GET_MODE_SIZE (GET_MODE (reg))
2862 >= GET_MODE_SIZE (GET_MODE (op)))
2863 && reg_equiv_constant (REGNO (reg)) == 0)
2864 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2865 insn),
2866 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2868 substed_operand[i] = recog_data.operand[i] = op;
2870 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2871 /* We can get a PLUS as an "operand" as a result of register
2872 elimination. See eliminate_regs and gen_reload. We handle
2873 a unary operator by reloading the operand. */
2874 substed_operand[i] = recog_data.operand[i]
2875 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2876 ind_levels, 0, insn,
2877 &address_reloaded[i]);
2878 else if (code == REG)
2880 /* This is equivalent to calling find_reloads_toplev.
2881 The code is duplicated for speed.
2882 When we find a pseudo always equivalent to a constant,
2883 we replace it by the constant. We must be sure, however,
2884 that we don't try to replace it in the insn in which it
2885 is being set. */
2886 int regno = REGNO (recog_data.operand[i]);
2887 if (reg_equiv_constant (regno) != 0
2888 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2890 /* Record the existing mode so that the check if constants are
2891 allowed will work when operand_mode isn't specified. */
2893 if (operand_mode[i] == VOIDmode)
2894 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2896 substed_operand[i] = recog_data.operand[i]
2897 = reg_equiv_constant (regno);
2899 if (reg_equiv_memory_loc (regno) != 0
2900 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2901 /* We need not give a valid is_set_dest argument since the case
2902 of a constant equivalence was checked above. */
2903 substed_operand[i] = recog_data.operand[i]
2904 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2905 ind_levels, 0, insn,
2906 &address_reloaded[i]);
2908 /* If the operand is still a register (we didn't replace it with an
2909 equivalent), get the preferred class to reload it into. */
2910 code = GET_CODE (recog_data.operand[i]);
2911 preferred_class[i]
2912 = ((code == REG && REGNO (recog_data.operand[i])
2913 >= FIRST_PSEUDO_REGISTER)
2914 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2915 : NO_REGS);
2916 pref_or_nothing[i]
2917 = (code == REG
2918 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2919 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2922 /* If this is simply a copy from operand 1 to operand 0, merge the
2923 preferred classes for the operands. */
2924 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2925 && recog_data.operand[1] == SET_SRC (set))
2927 preferred_class[0] = preferred_class[1]
2928 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2929 pref_or_nothing[0] |= pref_or_nothing[1];
2930 pref_or_nothing[1] |= pref_or_nothing[0];
2933 /* Now see what we need for pseudo-regs that didn't get hard regs
2934 or got the wrong kind of hard reg. For this, we must consider
2935 all the operands together against the register constraints. */
2937 best = MAX_RECOG_OPERANDS * 2 + 600;
2939 goal_alternative_swapped = 0;
2941 /* The constraints are made of several alternatives.
2942 Each operand's constraint looks like foo,bar,... with commas
2943 separating the alternatives. The first alternatives for all
2944 operands go together, the second alternatives go together, etc.
2946 First loop over alternatives. */
2948 for (this_alternative_number = 0;
2949 this_alternative_number < n_alternatives;
2950 this_alternative_number++)
2952 int swapped;
2954 if (!recog_data.alternative_enabled_p[this_alternative_number])
2956 int i;
2958 for (i = 0; i < recog_data.n_operands; i++)
2959 constraints[i] = skip_alternative (constraints[i]);
2961 continue;
2964 /* If insn is commutative (it's safe to exchange a certain pair
2965 of operands) then we need to try each alternative twice, the
2966 second time matching those two operands as if we had
2967 exchanged them. To do this, really exchange them in
2968 operands. */
2969 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
2971 /* Loop over operands for one constraint alternative. */
2972 /* LOSERS counts those that don't fit this alternative
2973 and would require loading. */
2974 int losers = 0;
2975 /* BAD is set to 1 if it some operand can't fit this alternative
2976 even after reloading. */
2977 int bad = 0;
2978 /* REJECT is a count of how undesirable this alternative says it is
2979 if any reloading is required. If the alternative matches exactly
2980 then REJECT is ignored, but otherwise it gets this much
2981 counted against it in addition to the reloading needed. Each
2982 ? counts three times here since we want the disparaging caused by
2983 a bad register class to only count 1/3 as much. */
2984 int reject = 0;
2986 if (swapped)
2988 enum reg_class tclass;
2989 int t;
2991 recog_data.operand[commutative] = substed_operand[commutative + 1];
2992 recog_data.operand[commutative + 1] = substed_operand[commutative];
2993 /* Swap the duplicates too. */
2994 for (i = 0; i < recog_data.n_dups; i++)
2995 if (recog_data.dup_num[i] == commutative
2996 || recog_data.dup_num[i] == commutative + 1)
2997 *recog_data.dup_loc[i]
2998 = recog_data.operand[(int) recog_data.dup_num[i]];
3000 tclass = preferred_class[commutative];
3001 preferred_class[commutative] = preferred_class[commutative + 1];
3002 preferred_class[commutative + 1] = tclass;
3004 t = pref_or_nothing[commutative];
3005 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3006 pref_or_nothing[commutative + 1] = t;
3008 t = address_reloaded[commutative];
3009 address_reloaded[commutative] = address_reloaded[commutative + 1];
3010 address_reloaded[commutative + 1] = t;
3013 this_earlyclobber = 0;
3015 for (i = 0; i < noperands; i++)
3017 const char *p = constraints[i];
3018 char *end;
3019 int len;
3020 int win = 0;
3021 int did_match = 0;
3022 /* 0 => this operand can be reloaded somehow for this alternative. */
3023 int badop = 1;
3024 /* 0 => this operand can be reloaded if the alternative allows regs. */
3025 int winreg = 0;
3026 int c;
3027 int m;
3028 rtx operand = recog_data.operand[i];
3029 int offset = 0;
3030 /* Nonzero means this is a MEM that must be reloaded into a reg
3031 regardless of what the constraint says. */
3032 int force_reload = 0;
3033 int offmemok = 0;
3034 /* Nonzero if a constant forced into memory would be OK for this
3035 operand. */
3036 int constmemok = 0;
3037 int earlyclobber = 0;
3039 /* If the predicate accepts a unary operator, it means that
3040 we need to reload the operand, but do not do this for
3041 match_operator and friends. */
3042 if (UNARY_P (operand) && *p != 0)
3043 operand = XEXP (operand, 0);
3045 /* If the operand is a SUBREG, extract
3046 the REG or MEM (or maybe even a constant) within.
3047 (Constants can occur as a result of reg_equiv_constant.) */
3049 while (GET_CODE (operand) == SUBREG)
3051 /* Offset only matters when operand is a REG and
3052 it is a hard reg. This is because it is passed
3053 to reg_fits_class_p if it is a REG and all pseudos
3054 return 0 from that function. */
3055 if (REG_P (SUBREG_REG (operand))
3056 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3058 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3059 GET_MODE (SUBREG_REG (operand)),
3060 SUBREG_BYTE (operand),
3061 GET_MODE (operand)) < 0)
3062 force_reload = 1;
3063 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3064 GET_MODE (SUBREG_REG (operand)),
3065 SUBREG_BYTE (operand),
3066 GET_MODE (operand));
3068 operand = SUBREG_REG (operand);
3069 /* Force reload if this is a constant or PLUS or if there may
3070 be a problem accessing OPERAND in the outer mode. */
3071 if (CONSTANT_P (operand)
3072 || GET_CODE (operand) == PLUS
3073 /* We must force a reload of paradoxical SUBREGs
3074 of a MEM because the alignment of the inner value
3075 may not be enough to do the outer reference. On
3076 big-endian machines, it may also reference outside
3077 the object.
3079 On machines that extend byte operations and we have a
3080 SUBREG where both the inner and outer modes are no wider
3081 than a word and the inner mode is narrower, is integral,
3082 and gets extended when loaded from memory, combine.c has
3083 made assumptions about the behavior of the machine in such
3084 register access. If the data is, in fact, in memory we
3085 must always load using the size assumed to be in the
3086 register and let the insn do the different-sized
3087 accesses.
3089 This is doubly true if WORD_REGISTER_OPERATIONS. In
3090 this case eliminate_regs has left non-paradoxical
3091 subregs for push_reload to see. Make sure it does
3092 by forcing the reload.
3094 ??? When is it right at this stage to have a subreg
3095 of a mem that is _not_ to be handled specially? IMO
3096 those should have been reduced to just a mem. */
3097 || ((MEM_P (operand)
3098 || (REG_P (operand)
3099 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3100 #ifndef WORD_REGISTER_OPERATIONS
3101 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3102 < BIGGEST_ALIGNMENT)
3103 && (GET_MODE_SIZE (operand_mode[i])
3104 > GET_MODE_SIZE (GET_MODE (operand))))
3105 || BYTES_BIG_ENDIAN
3106 #ifdef LOAD_EXTEND_OP
3107 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3108 && (GET_MODE_SIZE (GET_MODE (operand))
3109 <= UNITS_PER_WORD)
3110 && (GET_MODE_SIZE (operand_mode[i])
3111 > GET_MODE_SIZE (GET_MODE (operand)))
3112 && INTEGRAL_MODE_P (GET_MODE (operand))
3113 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3114 #endif
3116 #endif
3119 force_reload = 1;
3122 this_alternative[i] = NO_REGS;
3123 this_alternative_win[i] = 0;
3124 this_alternative_match_win[i] = 0;
3125 this_alternative_offmemok[i] = 0;
3126 this_alternative_earlyclobber[i] = 0;
3127 this_alternative_matches[i] = -1;
3129 /* An empty constraint or empty alternative
3130 allows anything which matched the pattern. */
3131 if (*p == 0 || *p == ',')
3132 win = 1, badop = 0;
3134 /* Scan this alternative's specs for this operand;
3135 set WIN if the operand fits any letter in this alternative.
3136 Otherwise, clear BADOP if this operand could
3137 fit some letter after reloads,
3138 or set WINREG if this operand could fit after reloads
3139 provided the constraint allows some registers. */
3142 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3144 case '\0':
3145 len = 0;
3146 break;
3147 case ',':
3148 c = '\0';
3149 break;
3151 case '=': case '+': case '*':
3152 break;
3154 case '%':
3155 /* We only support one commutative marker, the first
3156 one. We already set commutative above. */
3157 break;
3159 case '?':
3160 reject += 6;
3161 break;
3163 case '!':
3164 reject = 600;
3165 break;
3167 case '#':
3168 /* Ignore rest of this alternative as far as
3169 reloading is concerned. */
3171 p++;
3172 while (*p && *p != ',');
3173 len = 0;
3174 break;
3176 case '0': case '1': case '2': case '3': case '4':
3177 case '5': case '6': case '7': case '8': case '9':
3178 m = strtoul (p, &end, 10);
3179 p = end;
3180 len = 0;
3182 this_alternative_matches[i] = m;
3183 /* We are supposed to match a previous operand.
3184 If we do, we win if that one did.
3185 If we do not, count both of the operands as losers.
3186 (This is too conservative, since most of the time
3187 only a single reload insn will be needed to make
3188 the two operands win. As a result, this alternative
3189 may be rejected when it is actually desirable.) */
3190 if ((swapped && (m != commutative || i != commutative + 1))
3191 /* If we are matching as if two operands were swapped,
3192 also pretend that operands_match had been computed
3193 with swapped.
3194 But if I is the second of those and C is the first,
3195 don't exchange them, because operands_match is valid
3196 only on one side of its diagonal. */
3197 ? (operands_match
3198 [(m == commutative || m == commutative + 1)
3199 ? 2 * commutative + 1 - m : m]
3200 [(i == commutative || i == commutative + 1)
3201 ? 2 * commutative + 1 - i : i])
3202 : operands_match[m][i])
3204 /* If we are matching a non-offsettable address where an
3205 offsettable address was expected, then we must reject
3206 this combination, because we can't reload it. */
3207 if (this_alternative_offmemok[m]
3208 && MEM_P (recog_data.operand[m])
3209 && this_alternative[m] == NO_REGS
3210 && ! this_alternative_win[m])
3211 bad = 1;
3213 did_match = this_alternative_win[m];
3215 else
3217 /* Operands don't match. */
3218 rtx value;
3219 int loc1, loc2;
3220 /* Retroactively mark the operand we had to match
3221 as a loser, if it wasn't already. */
3222 if (this_alternative_win[m])
3223 losers++;
3224 this_alternative_win[m] = 0;
3225 if (this_alternative[m] == NO_REGS)
3226 bad = 1;
3227 /* But count the pair only once in the total badness of
3228 this alternative, if the pair can be a dummy reload.
3229 The pointers in operand_loc are not swapped; swap
3230 them by hand if necessary. */
3231 if (swapped && i == commutative)
3232 loc1 = commutative + 1;
3233 else if (swapped && i == commutative + 1)
3234 loc1 = commutative;
3235 else
3236 loc1 = i;
3237 if (swapped && m == commutative)
3238 loc2 = commutative + 1;
3239 else if (swapped && m == commutative + 1)
3240 loc2 = commutative;
3241 else
3242 loc2 = m;
3243 value
3244 = find_dummy_reload (recog_data.operand[i],
3245 recog_data.operand[m],
3246 recog_data.operand_loc[loc1],
3247 recog_data.operand_loc[loc2],
3248 operand_mode[i], operand_mode[m],
3249 this_alternative[m], -1,
3250 this_alternative_earlyclobber[m]);
3252 if (value != 0)
3253 losers--;
3255 /* This can be fixed with reloads if the operand
3256 we are supposed to match can be fixed with reloads. */
3257 badop = 0;
3258 this_alternative[i] = this_alternative[m];
3260 /* If we have to reload this operand and some previous
3261 operand also had to match the same thing as this
3262 operand, we don't know how to do that. So reject this
3263 alternative. */
3264 if (! did_match || force_reload)
3265 for (j = 0; j < i; j++)
3266 if (this_alternative_matches[j]
3267 == this_alternative_matches[i])
3268 badop = 1;
3269 break;
3271 case 'p':
3272 /* All necessary reloads for an address_operand
3273 were handled in find_reloads_address. */
3274 this_alternative[i]
3275 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3276 ADDRESS, SCRATCH);
3277 win = 1;
3278 badop = 0;
3279 break;
3281 case TARGET_MEM_CONSTRAINT:
3282 if (force_reload)
3283 break;
3284 if (MEM_P (operand)
3285 || (REG_P (operand)
3286 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3287 && reg_renumber[REGNO (operand)] < 0))
3288 win = 1;
3289 if (CONST_POOL_OK_P (operand_mode[i], operand))
3290 badop = 0;
3291 constmemok = 1;
3292 break;
3294 case '<':
3295 if (MEM_P (operand)
3296 && ! address_reloaded[i]
3297 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3298 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3299 win = 1;
3300 break;
3302 case '>':
3303 if (MEM_P (operand)
3304 && ! address_reloaded[i]
3305 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3306 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3307 win = 1;
3308 break;
3310 /* Memory operand whose address is not offsettable. */
3311 case 'V':
3312 if (force_reload)
3313 break;
3314 if (MEM_P (operand)
3315 && ! (ind_levels ? offsettable_memref_p (operand)
3316 : offsettable_nonstrict_memref_p (operand))
3317 /* Certain mem addresses will become offsettable
3318 after they themselves are reloaded. This is important;
3319 we don't want our own handling of unoffsettables
3320 to override the handling of reg_equiv_address. */
3321 && !(REG_P (XEXP (operand, 0))
3322 && (ind_levels == 0
3323 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3324 win = 1;
3325 break;
3327 /* Memory operand whose address is offsettable. */
3328 case 'o':
3329 if (force_reload)
3330 break;
3331 if ((MEM_P (operand)
3332 /* If IND_LEVELS, find_reloads_address won't reload a
3333 pseudo that didn't get a hard reg, so we have to
3334 reject that case. */
3335 && ((ind_levels ? offsettable_memref_p (operand)
3336 : offsettable_nonstrict_memref_p (operand))
3337 /* A reloaded address is offsettable because it is now
3338 just a simple register indirect. */
3339 || address_reloaded[i] == 1))
3340 || (REG_P (operand)
3341 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3342 && reg_renumber[REGNO (operand)] < 0
3343 /* If reg_equiv_address is nonzero, we will be
3344 loading it into a register; hence it will be
3345 offsettable, but we cannot say that reg_equiv_mem
3346 is offsettable without checking. */
3347 && ((reg_equiv_mem (REGNO (operand)) != 0
3348 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3349 || (reg_equiv_address (REGNO (operand)) != 0))))
3350 win = 1;
3351 if (CONST_POOL_OK_P (operand_mode[i], operand)
3352 || MEM_P (operand))
3353 badop = 0;
3354 constmemok = 1;
3355 offmemok = 1;
3356 break;
3358 case '&':
3359 /* Output operand that is stored before the need for the
3360 input operands (and their index registers) is over. */
3361 earlyclobber = 1, this_earlyclobber = 1;
3362 break;
3364 case 'E':
3365 case 'F':
3366 if (GET_CODE (operand) == CONST_DOUBLE
3367 || (GET_CODE (operand) == CONST_VECTOR
3368 && (GET_MODE_CLASS (GET_MODE (operand))
3369 == MODE_VECTOR_FLOAT)))
3370 win = 1;
3371 break;
3373 case 'G':
3374 case 'H':
3375 if (GET_CODE (operand) == CONST_DOUBLE
3376 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (operand, c, p))
3377 win = 1;
3378 break;
3380 case 's':
3381 if (CONST_INT_P (operand)
3382 || (GET_CODE (operand) == CONST_DOUBLE
3383 && GET_MODE (operand) == VOIDmode))
3384 break;
3385 case 'i':
3386 if (CONSTANT_P (operand)
3387 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (operand)))
3388 win = 1;
3389 break;
3391 case 'n':
3392 if (CONST_INT_P (operand)
3393 || (GET_CODE (operand) == CONST_DOUBLE
3394 && GET_MODE (operand) == VOIDmode))
3395 win = 1;
3396 break;
3398 case 'I':
3399 case 'J':
3400 case 'K':
3401 case 'L':
3402 case 'M':
3403 case 'N':
3404 case 'O':
3405 case 'P':
3406 if (CONST_INT_P (operand)
3407 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (operand), c, p))
3408 win = 1;
3409 break;
3411 case 'X':
3412 force_reload = 0;
3413 win = 1;
3414 break;
3416 case 'g':
3417 if (! force_reload
3418 /* A PLUS is never a valid operand, but reload can make
3419 it from a register when eliminating registers. */
3420 && GET_CODE (operand) != PLUS
3421 /* A SCRATCH is not a valid operand. */
3422 && GET_CODE (operand) != SCRATCH
3423 && (! CONSTANT_P (operand)
3424 || ! flag_pic
3425 || LEGITIMATE_PIC_OPERAND_P (operand))
3426 && (GENERAL_REGS == ALL_REGS
3427 || !REG_P (operand)
3428 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3429 && reg_renumber[REGNO (operand)] < 0)))
3430 win = 1;
3431 /* Drop through into 'r' case. */
3433 case 'r':
3434 this_alternative[i]
3435 = reg_class_subunion[this_alternative[i]][(int) GENERAL_REGS];
3436 goto reg;
3438 default:
3439 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS)
3441 #ifdef EXTRA_CONSTRAINT_STR
3442 if (EXTRA_MEMORY_CONSTRAINT (c, p))
3444 if (force_reload)
3445 break;
3446 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3447 win = 1;
3448 /* If the address was already reloaded,
3449 we win as well. */
3450 else if (MEM_P (operand)
3451 && address_reloaded[i] == 1)
3452 win = 1;
3453 /* Likewise if the address will be reloaded because
3454 reg_equiv_address is nonzero. For reg_equiv_mem
3455 we have to check. */
3456 else if (REG_P (operand)
3457 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3458 && reg_renumber[REGNO (operand)] < 0
3459 && ((reg_equiv_mem (REGNO (operand)) != 0
3460 && EXTRA_CONSTRAINT_STR (reg_equiv_mem (REGNO (operand)), c, p))
3461 || (reg_equiv_address (REGNO (operand)) != 0)))
3462 win = 1;
3464 /* If we didn't already win, we can reload
3465 constants via force_const_mem, and other
3466 MEMs by reloading the address like for 'o'. */
3467 if (CONST_POOL_OK_P (operand_mode[i], operand)
3468 || MEM_P (operand))
3469 badop = 0;
3470 constmemok = 1;
3471 offmemok = 1;
3472 break;
3474 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
3476 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3477 win = 1;
3479 /* If we didn't already win, we can reload
3480 the address into a base register. */
3481 this_alternative[i]
3482 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3483 ADDRESS, SCRATCH);
3484 badop = 0;
3485 break;
3488 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3489 win = 1;
3490 #endif
3491 break;
3494 this_alternative[i]
3495 = (reg_class_subunion
3496 [this_alternative[i]]
3497 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)]);
3498 reg:
3499 if (GET_MODE (operand) == BLKmode)
3500 break;
3501 winreg = 1;
3502 if (REG_P (operand)
3503 && reg_fits_class_p (operand, this_alternative[i],
3504 offset, GET_MODE (recog_data.operand[i])))
3505 win = 1;
3506 break;
3508 while ((p += len), c);
3510 if (swapped == (commutative >= 0 ? 1 : 0))
3511 constraints[i] = p;
3513 /* If this operand could be handled with a reg,
3514 and some reg is allowed, then this operand can be handled. */
3515 if (winreg && this_alternative[i] != NO_REGS
3516 && (win || !class_only_fixed_regs[this_alternative[i]]))
3517 badop = 0;
3519 /* Record which operands fit this alternative. */
3520 this_alternative_earlyclobber[i] = earlyclobber;
3521 if (win && ! force_reload)
3522 this_alternative_win[i] = 1;
3523 else if (did_match && ! force_reload)
3524 this_alternative_match_win[i] = 1;
3525 else
3527 int const_to_mem = 0;
3529 this_alternative_offmemok[i] = offmemok;
3530 losers++;
3531 if (badop)
3532 bad = 1;
3533 /* Alternative loses if it has no regs for a reg operand. */
3534 if (REG_P (operand)
3535 && this_alternative[i] == NO_REGS
3536 && this_alternative_matches[i] < 0)
3537 bad = 1;
3539 /* If this is a constant that is reloaded into the desired
3540 class by copying it to memory first, count that as another
3541 reload. This is consistent with other code and is
3542 required to avoid choosing another alternative when
3543 the constant is moved into memory by this function on
3544 an early reload pass. Note that the test here is
3545 precisely the same as in the code below that calls
3546 force_const_mem. */
3547 if (CONST_POOL_OK_P (operand_mode[i], operand)
3548 && ((targetm.preferred_reload_class (operand,
3549 this_alternative[i])
3550 == NO_REGS)
3551 || no_input_reloads))
3553 const_to_mem = 1;
3554 if (this_alternative[i] != NO_REGS)
3555 losers++;
3558 /* Alternative loses if it requires a type of reload not
3559 permitted for this insn. We can always reload SCRATCH
3560 and objects with a REG_UNUSED note. */
3561 if (GET_CODE (operand) != SCRATCH
3562 && modified[i] != RELOAD_READ && no_output_reloads
3563 && ! find_reg_note (insn, REG_UNUSED, operand))
3564 bad = 1;
3565 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3566 && ! const_to_mem)
3567 bad = 1;
3569 /* If we can't reload this value at all, reject this
3570 alternative. Note that we could also lose due to
3571 LIMIT_RELOAD_CLASS, but we don't check that
3572 here. */
3574 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3576 if (targetm.preferred_reload_class (operand,
3577 this_alternative[i])
3578 == NO_REGS)
3579 reject = 600;
3581 if (operand_type[i] == RELOAD_FOR_OUTPUT
3582 && (targetm.preferred_output_reload_class (operand,
3583 this_alternative[i])
3584 == NO_REGS))
3585 reject = 600;
3588 /* We prefer to reload pseudos over reloading other things,
3589 since such reloads may be able to be eliminated later.
3590 If we are reloading a SCRATCH, we won't be generating any
3591 insns, just using a register, so it is also preferred.
3592 So bump REJECT in other cases. Don't do this in the
3593 case where we are forcing a constant into memory and
3594 it will then win since we don't want to have a different
3595 alternative match then. */
3596 if (! (REG_P (operand)
3597 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3598 && GET_CODE (operand) != SCRATCH
3599 && ! (const_to_mem && constmemok))
3600 reject += 2;
3602 /* Input reloads can be inherited more often than output
3603 reloads can be removed, so penalize output reloads. */
3604 if (operand_type[i] != RELOAD_FOR_INPUT
3605 && GET_CODE (operand) != SCRATCH)
3606 reject++;
3609 /* If this operand is a pseudo register that didn't get
3610 a hard reg and this alternative accepts some
3611 register, see if the class that we want is a subset
3612 of the preferred class for this register. If not,
3613 but it intersects that class, use the preferred class
3614 instead. If it does not intersect the preferred
3615 class, show that usage of this alternative should be
3616 discouraged; it will be discouraged more still if the
3617 register is `preferred or nothing'. We do this
3618 because it increases the chance of reusing our spill
3619 register in a later insn and avoiding a pair of
3620 memory stores and loads.
3622 Don't bother with this if this alternative will
3623 accept this operand.
3625 Don't do this for a multiword operand, since it is
3626 only a small win and has the risk of requiring more
3627 spill registers, which could cause a large loss.
3629 Don't do this if the preferred class has only one
3630 register because we might otherwise exhaust the
3631 class. */
3633 if (! win && ! did_match
3634 && this_alternative[i] != NO_REGS
3635 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3636 && reg_class_size [(int) preferred_class[i]] > 0
3637 && ! small_register_class_p (preferred_class[i]))
3639 if (! reg_class_subset_p (this_alternative[i],
3640 preferred_class[i]))
3642 /* Since we don't have a way of forming the intersection,
3643 we just do something special if the preferred class
3644 is a subset of the class we have; that's the most
3645 common case anyway. */
3646 if (reg_class_subset_p (preferred_class[i],
3647 this_alternative[i]))
3648 this_alternative[i] = preferred_class[i];
3649 else
3650 reject += (2 + 2 * pref_or_nothing[i]);
3655 /* Now see if any output operands that are marked "earlyclobber"
3656 in this alternative conflict with any input operands
3657 or any memory addresses. */
3659 for (i = 0; i < noperands; i++)
3660 if (this_alternative_earlyclobber[i]
3661 && (this_alternative_win[i] || this_alternative_match_win[i]))
3663 struct decomposition early_data;
3665 early_data = decompose (recog_data.operand[i]);
3667 gcc_assert (modified[i] != RELOAD_READ);
3669 if (this_alternative[i] == NO_REGS)
3671 this_alternative_earlyclobber[i] = 0;
3672 gcc_assert (this_insn_is_asm);
3673 error_for_asm (this_insn,
3674 "%<&%> constraint used with no register class");
3677 for (j = 0; j < noperands; j++)
3678 /* Is this an input operand or a memory ref? */
3679 if ((MEM_P (recog_data.operand[j])
3680 || modified[j] != RELOAD_WRITE)
3681 && j != i
3682 /* Ignore things like match_operator operands. */
3683 && !recog_data.is_operator[j]
3684 /* Don't count an input operand that is constrained to match
3685 the early clobber operand. */
3686 && ! (this_alternative_matches[j] == i
3687 && rtx_equal_p (recog_data.operand[i],
3688 recog_data.operand[j]))
3689 /* Is it altered by storing the earlyclobber operand? */
3690 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3691 early_data))
3693 /* If the output is in a non-empty few-regs class,
3694 it's costly to reload it, so reload the input instead. */
3695 if (small_register_class_p (this_alternative[i])
3696 && (REG_P (recog_data.operand[j])
3697 || GET_CODE (recog_data.operand[j]) == SUBREG))
3699 losers++;
3700 this_alternative_win[j] = 0;
3701 this_alternative_match_win[j] = 0;
3703 else
3704 break;
3706 /* If an earlyclobber operand conflicts with something,
3707 it must be reloaded, so request this and count the cost. */
3708 if (j != noperands)
3710 losers++;
3711 this_alternative_win[i] = 0;
3712 this_alternative_match_win[j] = 0;
3713 for (j = 0; j < noperands; j++)
3714 if (this_alternative_matches[j] == i
3715 && this_alternative_match_win[j])
3717 this_alternative_win[j] = 0;
3718 this_alternative_match_win[j] = 0;
3719 losers++;
3724 /* If one alternative accepts all the operands, no reload required,
3725 choose that alternative; don't consider the remaining ones. */
3726 if (losers == 0)
3728 /* Unswap these so that they are never swapped at `finish'. */
3729 if (swapped)
3731 recog_data.operand[commutative] = substed_operand[commutative];
3732 recog_data.operand[commutative + 1]
3733 = substed_operand[commutative + 1];
3735 for (i = 0; i < noperands; i++)
3737 goal_alternative_win[i] = this_alternative_win[i];
3738 goal_alternative_match_win[i] = this_alternative_match_win[i];
3739 goal_alternative[i] = this_alternative[i];
3740 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3741 goal_alternative_matches[i] = this_alternative_matches[i];
3742 goal_alternative_earlyclobber[i]
3743 = this_alternative_earlyclobber[i];
3745 goal_alternative_number = this_alternative_number;
3746 goal_alternative_swapped = swapped;
3747 goal_earlyclobber = this_earlyclobber;
3748 goto finish;
3751 /* REJECT, set by the ! and ? constraint characters and when a register
3752 would be reloaded into a non-preferred class, discourages the use of
3753 this alternative for a reload goal. REJECT is incremented by six
3754 for each ? and two for each non-preferred class. */
3755 losers = losers * 6 + reject;
3757 /* If this alternative can be made to work by reloading,
3758 and it needs less reloading than the others checked so far,
3759 record it as the chosen goal for reloading. */
3760 if (! bad)
3762 if (best > losers)
3764 for (i = 0; i < noperands; i++)
3766 goal_alternative[i] = this_alternative[i];
3767 goal_alternative_win[i] = this_alternative_win[i];
3768 goal_alternative_match_win[i]
3769 = this_alternative_match_win[i];
3770 goal_alternative_offmemok[i]
3771 = this_alternative_offmemok[i];
3772 goal_alternative_matches[i] = this_alternative_matches[i];
3773 goal_alternative_earlyclobber[i]
3774 = this_alternative_earlyclobber[i];
3776 goal_alternative_swapped = swapped;
3777 best = losers;
3778 goal_alternative_number = this_alternative_number;
3779 goal_earlyclobber = this_earlyclobber;
3783 if (swapped)
3785 enum reg_class tclass;
3786 int t;
3788 /* If the commutative operands have been swapped, swap
3789 them back in order to check the next alternative. */
3790 recog_data.operand[commutative] = substed_operand[commutative];
3791 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3792 /* Unswap the duplicates too. */
3793 for (i = 0; i < recog_data.n_dups; i++)
3794 if (recog_data.dup_num[i] == commutative
3795 || recog_data.dup_num[i] == commutative + 1)
3796 *recog_data.dup_loc[i]
3797 = recog_data.operand[(int) recog_data.dup_num[i]];
3799 /* Unswap the operand related information as well. */
3800 tclass = preferred_class[commutative];
3801 preferred_class[commutative] = preferred_class[commutative + 1];
3802 preferred_class[commutative + 1] = tclass;
3804 t = pref_or_nothing[commutative];
3805 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3806 pref_or_nothing[commutative + 1] = t;
3808 t = address_reloaded[commutative];
3809 address_reloaded[commutative] = address_reloaded[commutative + 1];
3810 address_reloaded[commutative + 1] = t;
3815 /* The operands don't meet the constraints.
3816 goal_alternative describes the alternative
3817 that we could reach by reloading the fewest operands.
3818 Reload so as to fit it. */
3820 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3822 /* No alternative works with reloads?? */
3823 if (insn_code_number >= 0)
3824 fatal_insn ("unable to generate reloads for:", insn);
3825 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3826 /* Avoid further trouble with this insn. */
3827 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3828 n_reloads = 0;
3829 return 0;
3832 /* Jump to `finish' from above if all operands are valid already.
3833 In that case, goal_alternative_win is all 1. */
3834 finish:
3836 /* Right now, for any pair of operands I and J that are required to match,
3837 with I < J,
3838 goal_alternative_matches[J] is I.
3839 Set up goal_alternative_matched as the inverse function:
3840 goal_alternative_matched[I] = J. */
3842 for (i = 0; i < noperands; i++)
3843 goal_alternative_matched[i] = -1;
3845 for (i = 0; i < noperands; i++)
3846 if (! goal_alternative_win[i]
3847 && goal_alternative_matches[i] >= 0)
3848 goal_alternative_matched[goal_alternative_matches[i]] = i;
3850 for (i = 0; i < noperands; i++)
3851 goal_alternative_win[i] |= goal_alternative_match_win[i];
3853 /* If the best alternative is with operands 1 and 2 swapped,
3854 consider them swapped before reporting the reloads. Update the
3855 operand numbers of any reloads already pushed. */
3857 if (goal_alternative_swapped)
3859 rtx tem;
3861 tem = substed_operand[commutative];
3862 substed_operand[commutative] = substed_operand[commutative + 1];
3863 substed_operand[commutative + 1] = tem;
3864 tem = recog_data.operand[commutative];
3865 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3866 recog_data.operand[commutative + 1] = tem;
3867 tem = *recog_data.operand_loc[commutative];
3868 *recog_data.operand_loc[commutative]
3869 = *recog_data.operand_loc[commutative + 1];
3870 *recog_data.operand_loc[commutative + 1] = tem;
3872 for (i = 0; i < n_reloads; i++)
3874 if (rld[i].opnum == commutative)
3875 rld[i].opnum = commutative + 1;
3876 else if (rld[i].opnum == commutative + 1)
3877 rld[i].opnum = commutative;
3881 for (i = 0; i < noperands; i++)
3883 operand_reloadnum[i] = -1;
3885 /* If this is an earlyclobber operand, we need to widen the scope.
3886 The reload must remain valid from the start of the insn being
3887 reloaded until after the operand is stored into its destination.
3888 We approximate this with RELOAD_OTHER even though we know that we
3889 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3891 One special case that is worth checking is when we have an
3892 output that is earlyclobber but isn't used past the insn (typically
3893 a SCRATCH). In this case, we only need have the reload live
3894 through the insn itself, but not for any of our input or output
3895 reloads.
3896 But we must not accidentally narrow the scope of an existing
3897 RELOAD_OTHER reload - leave these alone.
3899 In any case, anything needed to address this operand can remain
3900 however they were previously categorized. */
3902 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3903 operand_type[i]
3904 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3905 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3908 /* Any constants that aren't allowed and can't be reloaded
3909 into registers are here changed into memory references. */
3910 for (i = 0; i < noperands; i++)
3911 if (! goal_alternative_win[i])
3913 rtx op = recog_data.operand[i];
3914 rtx subreg = NULL_RTX;
3915 rtx plus = NULL_RTX;
3916 enum machine_mode mode = operand_mode[i];
3918 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3919 push_reload so we have to let them pass here. */
3920 if (GET_CODE (op) == SUBREG)
3922 subreg = op;
3923 op = SUBREG_REG (op);
3924 mode = GET_MODE (op);
3927 if (GET_CODE (op) == PLUS)
3929 plus = op;
3930 op = XEXP (op, 1);
3933 if (CONST_POOL_OK_P (mode, op)
3934 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3935 == NO_REGS)
3936 || no_input_reloads))
3938 int this_address_reloaded;
3939 rtx tem = force_const_mem (mode, op);
3941 /* If we stripped a SUBREG or a PLUS above add it back. */
3942 if (plus != NULL_RTX)
3943 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3945 if (subreg != NULL_RTX)
3946 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3948 this_address_reloaded = 0;
3949 substed_operand[i] = recog_data.operand[i]
3950 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3951 0, insn, &this_address_reloaded);
3953 /* If the alternative accepts constant pool refs directly
3954 there will be no reload needed at all. */
3955 if (plus == NULL_RTX
3956 && subreg == NULL_RTX
3957 && alternative_allows_const_pool_ref (this_address_reloaded == 0
3958 ? substed_operand[i]
3959 : NULL,
3960 recog_data.constraints[i],
3961 goal_alternative_number))
3962 goal_alternative_win[i] = 1;
3966 /* Record the values of the earlyclobber operands for the caller. */
3967 if (goal_earlyclobber)
3968 for (i = 0; i < noperands; i++)
3969 if (goal_alternative_earlyclobber[i])
3970 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3972 /* Now record reloads for all the operands that need them. */
3973 for (i = 0; i < noperands; i++)
3974 if (! goal_alternative_win[i])
3976 /* Operands that match previous ones have already been handled. */
3977 if (goal_alternative_matches[i] >= 0)
3979 /* Handle an operand with a nonoffsettable address
3980 appearing where an offsettable address will do
3981 by reloading the address into a base register.
3983 ??? We can also do this when the operand is a register and
3984 reg_equiv_mem is not offsettable, but this is a bit tricky,
3985 so we don't bother with it. It may not be worth doing. */
3986 else if (goal_alternative_matched[i] == -1
3987 && goal_alternative_offmemok[i]
3988 && MEM_P (recog_data.operand[i]))
3990 /* If the address to be reloaded is a VOIDmode constant,
3991 use the default address mode as mode of the reload register,
3992 as would have been done by find_reloads_address. */
3993 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
3994 enum machine_mode address_mode;
3996 address_mode = get_address_mode (recog_data.operand[i]);
3997 operand_reloadnum[i]
3998 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
3999 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
4000 base_reg_class (VOIDmode, as, MEM, SCRATCH),
4001 address_mode,
4002 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
4003 rld[operand_reloadnum[i]].inc
4004 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4006 /* If this operand is an output, we will have made any
4007 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4008 now we are treating part of the operand as an input, so
4009 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
4011 if (modified[i] == RELOAD_WRITE)
4013 for (j = 0; j < n_reloads; j++)
4015 if (rld[j].opnum == i)
4017 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4018 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4019 else if (rld[j].when_needed
4020 == RELOAD_FOR_OUTADDR_ADDRESS)
4021 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4026 else if (goal_alternative_matched[i] == -1)
4028 operand_reloadnum[i]
4029 = push_reload ((modified[i] != RELOAD_WRITE
4030 ? recog_data.operand[i] : 0),
4031 (modified[i] != RELOAD_READ
4032 ? recog_data.operand[i] : 0),
4033 (modified[i] != RELOAD_WRITE
4034 ? recog_data.operand_loc[i] : 0),
4035 (modified[i] != RELOAD_READ
4036 ? recog_data.operand_loc[i] : 0),
4037 (enum reg_class) goal_alternative[i],
4038 (modified[i] == RELOAD_WRITE
4039 ? VOIDmode : operand_mode[i]),
4040 (modified[i] == RELOAD_READ
4041 ? VOIDmode : operand_mode[i]),
4042 (insn_code_number < 0 ? 0
4043 : insn_data[insn_code_number].operand[i].strict_low),
4044 0, i, operand_type[i]);
4046 /* In a matching pair of operands, one must be input only
4047 and the other must be output only.
4048 Pass the input operand as IN and the other as OUT. */
4049 else if (modified[i] == RELOAD_READ
4050 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4052 operand_reloadnum[i]
4053 = push_reload (recog_data.operand[i],
4054 recog_data.operand[goal_alternative_matched[i]],
4055 recog_data.operand_loc[i],
4056 recog_data.operand_loc[goal_alternative_matched[i]],
4057 (enum reg_class) goal_alternative[i],
4058 operand_mode[i],
4059 operand_mode[goal_alternative_matched[i]],
4060 0, 0, i, RELOAD_OTHER);
4061 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4063 else if (modified[i] == RELOAD_WRITE
4064 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4066 operand_reloadnum[goal_alternative_matched[i]]
4067 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4068 recog_data.operand[i],
4069 recog_data.operand_loc[goal_alternative_matched[i]],
4070 recog_data.operand_loc[i],
4071 (enum reg_class) goal_alternative[i],
4072 operand_mode[goal_alternative_matched[i]],
4073 operand_mode[i],
4074 0, 0, i, RELOAD_OTHER);
4075 operand_reloadnum[i] = output_reloadnum;
4077 else
4079 gcc_assert (insn_code_number < 0);
4080 error_for_asm (insn, "inconsistent operand constraints "
4081 "in an %<asm%>");
4082 /* Avoid further trouble with this insn. */
4083 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4084 n_reloads = 0;
4085 return 0;
4088 else if (goal_alternative_matched[i] < 0
4089 && goal_alternative_matches[i] < 0
4090 && address_operand_reloaded[i] != 1
4091 && optimize)
4093 /* For each non-matching operand that's a MEM or a pseudo-register
4094 that didn't get a hard register, make an optional reload.
4095 This may get done even if the insn needs no reloads otherwise. */
4097 rtx operand = recog_data.operand[i];
4099 while (GET_CODE (operand) == SUBREG)
4100 operand = SUBREG_REG (operand);
4101 if ((MEM_P (operand)
4102 || (REG_P (operand)
4103 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4104 /* If this is only for an output, the optional reload would not
4105 actually cause us to use a register now, just note that
4106 something is stored here. */
4107 && (goal_alternative[i] != NO_REGS
4108 || modified[i] == RELOAD_WRITE)
4109 && ! no_input_reloads
4110 /* An optional output reload might allow to delete INSN later.
4111 We mustn't make in-out reloads on insns that are not permitted
4112 output reloads.
4113 If this is an asm, we can't delete it; we must not even call
4114 push_reload for an optional output reload in this case,
4115 because we can't be sure that the constraint allows a register,
4116 and push_reload verifies the constraints for asms. */
4117 && (modified[i] == RELOAD_READ
4118 || (! no_output_reloads && ! this_insn_is_asm)))
4119 operand_reloadnum[i]
4120 = push_reload ((modified[i] != RELOAD_WRITE
4121 ? recog_data.operand[i] : 0),
4122 (modified[i] != RELOAD_READ
4123 ? recog_data.operand[i] : 0),
4124 (modified[i] != RELOAD_WRITE
4125 ? recog_data.operand_loc[i] : 0),
4126 (modified[i] != RELOAD_READ
4127 ? recog_data.operand_loc[i] : 0),
4128 (enum reg_class) goal_alternative[i],
4129 (modified[i] == RELOAD_WRITE
4130 ? VOIDmode : operand_mode[i]),
4131 (modified[i] == RELOAD_READ
4132 ? VOIDmode : operand_mode[i]),
4133 (insn_code_number < 0 ? 0
4134 : insn_data[insn_code_number].operand[i].strict_low),
4135 1, i, operand_type[i]);
4136 /* If a memory reference remains (either as a MEM or a pseudo that
4137 did not get a hard register), yet we can't make an optional
4138 reload, check if this is actually a pseudo register reference;
4139 we then need to emit a USE and/or a CLOBBER so that reload
4140 inheritance will do the right thing. */
4141 else if (replace
4142 && (MEM_P (operand)
4143 || (REG_P (operand)
4144 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4145 && reg_renumber [REGNO (operand)] < 0)))
4147 operand = *recog_data.operand_loc[i];
4149 while (GET_CODE (operand) == SUBREG)
4150 operand = SUBREG_REG (operand);
4151 if (REG_P (operand))
4153 if (modified[i] != RELOAD_WRITE)
4154 /* We mark the USE with QImode so that we recognize
4155 it as one that can be safely deleted at the end
4156 of reload. */
4157 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4158 insn), QImode);
4159 if (modified[i] != RELOAD_READ)
4160 emit_insn_after (gen_clobber (operand), insn);
4164 else if (goal_alternative_matches[i] >= 0
4165 && goal_alternative_win[goal_alternative_matches[i]]
4166 && modified[i] == RELOAD_READ
4167 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4168 && ! no_input_reloads && ! no_output_reloads
4169 && optimize)
4171 /* Similarly, make an optional reload for a pair of matching
4172 objects that are in MEM or a pseudo that didn't get a hard reg. */
4174 rtx operand = recog_data.operand[i];
4176 while (GET_CODE (operand) == SUBREG)
4177 operand = SUBREG_REG (operand);
4178 if ((MEM_P (operand)
4179 || (REG_P (operand)
4180 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4181 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4182 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4183 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4184 recog_data.operand[i],
4185 recog_data.operand_loc[goal_alternative_matches[i]],
4186 recog_data.operand_loc[i],
4187 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4188 operand_mode[goal_alternative_matches[i]],
4189 operand_mode[i],
4190 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4193 /* Perform whatever substitutions on the operands we are supposed
4194 to make due to commutativity or replacement of registers
4195 with equivalent constants or memory slots. */
4197 for (i = 0; i < noperands; i++)
4199 /* We only do this on the last pass through reload, because it is
4200 possible for some data (like reg_equiv_address) to be changed during
4201 later passes. Moreover, we lose the opportunity to get a useful
4202 reload_{in,out}_reg when we do these replacements. */
4204 if (replace)
4206 rtx substitution = substed_operand[i];
4208 *recog_data.operand_loc[i] = substitution;
4210 /* If we're replacing an operand with a LABEL_REF, we need to
4211 make sure that there's a REG_LABEL_OPERAND note attached to
4212 this instruction. */
4213 if (GET_CODE (substitution) == LABEL_REF
4214 && !find_reg_note (insn, REG_LABEL_OPERAND,
4215 XEXP (substitution, 0))
4216 /* For a JUMP_P, if it was a branch target it must have
4217 already been recorded as such. */
4218 && (!JUMP_P (insn)
4219 || !label_is_jump_target_p (XEXP (substitution, 0),
4220 insn)))
4222 add_reg_note (insn, REG_LABEL_OPERAND, XEXP (substitution, 0));
4223 if (LABEL_P (XEXP (substitution, 0)))
4224 ++LABEL_NUSES (XEXP (substitution, 0));
4228 else
4229 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4232 /* If this insn pattern contains any MATCH_DUP's, make sure that
4233 they will be substituted if the operands they match are substituted.
4234 Also do now any substitutions we already did on the operands.
4236 Don't do this if we aren't making replacements because we might be
4237 propagating things allocated by frame pointer elimination into places
4238 it doesn't expect. */
4240 if (insn_code_number >= 0 && replace)
4241 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4243 int opno = recog_data.dup_num[i];
4244 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4245 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4248 #if 0
4249 /* This loses because reloading of prior insns can invalidate the equivalence
4250 (or at least find_equiv_reg isn't smart enough to find it any more),
4251 causing this insn to need more reload regs than it needed before.
4252 It may be too late to make the reload regs available.
4253 Now this optimization is done safely in choose_reload_regs. */
4255 /* For each reload of a reg into some other class of reg,
4256 search for an existing equivalent reg (same value now) in the right class.
4257 We can use it as long as we don't need to change its contents. */
4258 for (i = 0; i < n_reloads; i++)
4259 if (rld[i].reg_rtx == 0
4260 && rld[i].in != 0
4261 && REG_P (rld[i].in)
4262 && rld[i].out == 0)
4264 rld[i].reg_rtx
4265 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4266 static_reload_reg_p, 0, rld[i].inmode);
4267 /* Prevent generation of insn to load the value
4268 because the one we found already has the value. */
4269 if (rld[i].reg_rtx)
4270 rld[i].in = rld[i].reg_rtx;
4272 #endif
4274 /* If we detected error and replaced asm instruction by USE, forget about the
4275 reloads. */
4276 if (GET_CODE (PATTERN (insn)) == USE
4277 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4278 n_reloads = 0;
4280 /* Perhaps an output reload can be combined with another
4281 to reduce needs by one. */
4282 if (!goal_earlyclobber)
4283 combine_reloads ();
4285 /* If we have a pair of reloads for parts of an address, they are reloading
4286 the same object, the operands themselves were not reloaded, and they
4287 are for two operands that are supposed to match, merge the reloads and
4288 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4290 for (i = 0; i < n_reloads; i++)
4292 int k;
4294 for (j = i + 1; j < n_reloads; j++)
4295 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4296 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4297 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4298 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4299 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4300 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4301 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4302 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4303 && rtx_equal_p (rld[i].in, rld[j].in)
4304 && (operand_reloadnum[rld[i].opnum] < 0
4305 || rld[operand_reloadnum[rld[i].opnum]].optional)
4306 && (operand_reloadnum[rld[j].opnum] < 0
4307 || rld[operand_reloadnum[rld[j].opnum]].optional)
4308 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4309 || (goal_alternative_matches[rld[j].opnum]
4310 == rld[i].opnum)))
4312 for (k = 0; k < n_replacements; k++)
4313 if (replacements[k].what == j)
4314 replacements[k].what = i;
4316 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4317 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4318 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4319 else
4320 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4321 rld[j].in = 0;
4325 /* Scan all the reloads and update their type.
4326 If a reload is for the address of an operand and we didn't reload
4327 that operand, change the type. Similarly, change the operand number
4328 of a reload when two operands match. If a reload is optional, treat it
4329 as though the operand isn't reloaded.
4331 ??? This latter case is somewhat odd because if we do the optional
4332 reload, it means the object is hanging around. Thus we need only
4333 do the address reload if the optional reload was NOT done.
4335 Change secondary reloads to be the address type of their operand, not
4336 the normal type.
4338 If an operand's reload is now RELOAD_OTHER, change any
4339 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4340 RELOAD_FOR_OTHER_ADDRESS. */
4342 for (i = 0; i < n_reloads; i++)
4344 if (rld[i].secondary_p
4345 && rld[i].when_needed == operand_type[rld[i].opnum])
4346 rld[i].when_needed = address_type[rld[i].opnum];
4348 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4349 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4350 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4351 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4352 && (operand_reloadnum[rld[i].opnum] < 0
4353 || rld[operand_reloadnum[rld[i].opnum]].optional))
4355 /* If we have a secondary reload to go along with this reload,
4356 change its type to RELOAD_FOR_OPADDR_ADDR. */
4358 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4359 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4360 && rld[i].secondary_in_reload != -1)
4362 int secondary_in_reload = rld[i].secondary_in_reload;
4364 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4366 /* If there's a tertiary reload we have to change it also. */
4367 if (secondary_in_reload > 0
4368 && rld[secondary_in_reload].secondary_in_reload != -1)
4369 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4370 = RELOAD_FOR_OPADDR_ADDR;
4373 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4374 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4375 && rld[i].secondary_out_reload != -1)
4377 int secondary_out_reload = rld[i].secondary_out_reload;
4379 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4381 /* If there's a tertiary reload we have to change it also. */
4382 if (secondary_out_reload
4383 && rld[secondary_out_reload].secondary_out_reload != -1)
4384 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4385 = RELOAD_FOR_OPADDR_ADDR;
4388 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4389 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4390 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4391 else
4392 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4395 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4396 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4397 && operand_reloadnum[rld[i].opnum] >= 0
4398 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4399 == RELOAD_OTHER))
4400 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4402 if (goal_alternative_matches[rld[i].opnum] >= 0)
4403 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4406 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4407 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4408 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4410 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4411 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4412 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4413 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4414 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4415 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4416 This is complicated by the fact that a single operand can have more
4417 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4418 choose_reload_regs without affecting code quality, and cases that
4419 actually fail are extremely rare, so it turns out to be better to fix
4420 the problem here by not generating cases that choose_reload_regs will
4421 fail for. */
4422 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4423 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4424 a single operand.
4425 We can reduce the register pressure by exploiting that a
4426 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4427 does not conflict with any of them, if it is only used for the first of
4428 the RELOAD_FOR_X_ADDRESS reloads. */
4430 int first_op_addr_num = -2;
4431 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4432 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4433 int need_change = 0;
4434 /* We use last_op_addr_reload and the contents of the above arrays
4435 first as flags - -2 means no instance encountered, -1 means exactly
4436 one instance encountered.
4437 If more than one instance has been encountered, we store the reload
4438 number of the first reload of the kind in question; reload numbers
4439 are known to be non-negative. */
4440 for (i = 0; i < noperands; i++)
4441 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4442 for (i = n_reloads - 1; i >= 0; i--)
4444 switch (rld[i].when_needed)
4446 case RELOAD_FOR_OPERAND_ADDRESS:
4447 if (++first_op_addr_num >= 0)
4449 first_op_addr_num = i;
4450 need_change = 1;
4452 break;
4453 case RELOAD_FOR_INPUT_ADDRESS:
4454 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4456 first_inpaddr_num[rld[i].opnum] = i;
4457 need_change = 1;
4459 break;
4460 case RELOAD_FOR_OUTPUT_ADDRESS:
4461 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4463 first_outpaddr_num[rld[i].opnum] = i;
4464 need_change = 1;
4466 break;
4467 default:
4468 break;
4472 if (need_change)
4474 for (i = 0; i < n_reloads; i++)
4476 int first_num;
4477 enum reload_type type;
4479 switch (rld[i].when_needed)
4481 case RELOAD_FOR_OPADDR_ADDR:
4482 first_num = first_op_addr_num;
4483 type = RELOAD_FOR_OPERAND_ADDRESS;
4484 break;
4485 case RELOAD_FOR_INPADDR_ADDRESS:
4486 first_num = first_inpaddr_num[rld[i].opnum];
4487 type = RELOAD_FOR_INPUT_ADDRESS;
4488 break;
4489 case RELOAD_FOR_OUTADDR_ADDRESS:
4490 first_num = first_outpaddr_num[rld[i].opnum];
4491 type = RELOAD_FOR_OUTPUT_ADDRESS;
4492 break;
4493 default:
4494 continue;
4496 if (first_num < 0)
4497 continue;
4498 else if (i > first_num)
4499 rld[i].when_needed = type;
4500 else
4502 /* Check if the only TYPE reload that uses reload I is
4503 reload FIRST_NUM. */
4504 for (j = n_reloads - 1; j > first_num; j--)
4506 if (rld[j].when_needed == type
4507 && (rld[i].secondary_p
4508 ? rld[j].secondary_in_reload == i
4509 : reg_mentioned_p (rld[i].in, rld[j].in)))
4511 rld[i].when_needed = type;
4512 break;
4520 /* See if we have any reloads that are now allowed to be merged
4521 because we've changed when the reload is needed to
4522 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4523 check for the most common cases. */
4525 for (i = 0; i < n_reloads; i++)
4526 if (rld[i].in != 0 && rld[i].out == 0
4527 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4528 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4529 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4530 for (j = 0; j < n_reloads; j++)
4531 if (i != j && rld[j].in != 0 && rld[j].out == 0
4532 && rld[j].when_needed == rld[i].when_needed
4533 && MATCHES (rld[i].in, rld[j].in)
4534 && rld[i].rclass == rld[j].rclass
4535 && !rld[i].nocombine && !rld[j].nocombine
4536 && rld[i].reg_rtx == rld[j].reg_rtx)
4538 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4539 transfer_replacements (i, j);
4540 rld[j].in = 0;
4543 #ifdef HAVE_cc0
4544 /* If we made any reloads for addresses, see if they violate a
4545 "no input reloads" requirement for this insn. But loads that we
4546 do after the insn (such as for output addresses) are fine. */
4547 if (no_input_reloads)
4548 for (i = 0; i < n_reloads; i++)
4549 gcc_assert (rld[i].in == 0
4550 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4551 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4552 #endif
4554 /* Compute reload_mode and reload_nregs. */
4555 for (i = 0; i < n_reloads; i++)
4557 rld[i].mode
4558 = (rld[i].inmode == VOIDmode
4559 || (GET_MODE_SIZE (rld[i].outmode)
4560 > GET_MODE_SIZE (rld[i].inmode)))
4561 ? rld[i].outmode : rld[i].inmode;
4563 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4566 /* Special case a simple move with an input reload and a
4567 destination of a hard reg, if the hard reg is ok, use it. */
4568 for (i = 0; i < n_reloads; i++)
4569 if (rld[i].when_needed == RELOAD_FOR_INPUT
4570 && GET_CODE (PATTERN (insn)) == SET
4571 && REG_P (SET_DEST (PATTERN (insn)))
4572 && (SET_SRC (PATTERN (insn)) == rld[i].in
4573 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4574 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4576 rtx dest = SET_DEST (PATTERN (insn));
4577 unsigned int regno = REGNO (dest);
4579 if (regno < FIRST_PSEUDO_REGISTER
4580 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4581 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4583 int nr = hard_regno_nregs[regno][rld[i].mode];
4584 int ok = 1, nri;
4586 for (nri = 1; nri < nr; nri ++)
4587 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4588 ok = 0;
4590 if (ok)
4591 rld[i].reg_rtx = dest;
4595 return retval;
4598 /* Return true if alternative number ALTNUM in constraint-string
4599 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4600 MEM gives the reference if it didn't need any reloads, otherwise it
4601 is null. */
4603 static bool
4604 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4605 const char *constraint, int altnum)
4607 int c;
4609 /* Skip alternatives before the one requested. */
4610 while (altnum > 0)
4612 while (*constraint++ != ',')
4614 altnum--;
4616 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4617 If one of them is present, this alternative accepts the result of
4618 passing a constant-pool reference through find_reloads_toplev.
4620 The same is true of extra memory constraints if the address
4621 was reloaded into a register. However, the target may elect
4622 to disallow the original constant address, forcing it to be
4623 reloaded into a register instead. */
4624 for (; (c = *constraint) && c != ',' && c != '#';
4625 constraint += CONSTRAINT_LEN (c, constraint))
4627 if (c == TARGET_MEM_CONSTRAINT || c == 'o')
4628 return true;
4629 #ifdef EXTRA_CONSTRAINT_STR
4630 if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
4631 && (mem == NULL || EXTRA_CONSTRAINT_STR (mem, c, constraint)))
4632 return true;
4633 #endif
4635 return false;
4638 /* Scan X for memory references and scan the addresses for reloading.
4639 Also checks for references to "constant" regs that we want to eliminate
4640 and replaces them with the values they stand for.
4641 We may alter X destructively if it contains a reference to such.
4642 If X is just a constant reg, we return the equivalent value
4643 instead of X.
4645 IND_LEVELS says how many levels of indirect addressing this machine
4646 supports.
4648 OPNUM and TYPE identify the purpose of the reload.
4650 IS_SET_DEST is true if X is the destination of a SET, which is not
4651 appropriate to be replaced by a constant.
4653 INSN, if nonzero, is the insn in which we do the reload. It is used
4654 to determine if we may generate output reloads, and where to put USEs
4655 for pseudos that we have to replace with stack slots.
4657 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4658 result of find_reloads_address. */
4660 static rtx
4661 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4662 int ind_levels, int is_set_dest, rtx insn,
4663 int *address_reloaded)
4665 RTX_CODE code = GET_CODE (x);
4667 const char *fmt = GET_RTX_FORMAT (code);
4668 int i;
4669 int copied;
4671 if (code == REG)
4673 /* This code is duplicated for speed in find_reloads. */
4674 int regno = REGNO (x);
4675 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4676 x = reg_equiv_constant (regno);
4677 #if 0
4678 /* This creates (subreg (mem...)) which would cause an unnecessary
4679 reload of the mem. */
4680 else if (reg_equiv_mem (regno) != 0)
4681 x = reg_equiv_mem (regno);
4682 #endif
4683 else if (reg_equiv_memory_loc (regno)
4684 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4686 rtx mem = make_memloc (x, regno);
4687 if (reg_equiv_address (regno)
4688 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4690 /* If this is not a toplevel operand, find_reloads doesn't see
4691 this substitution. We have to emit a USE of the pseudo so
4692 that delete_output_reload can see it. */
4693 if (replace_reloads && recog_data.operand[opnum] != x)
4694 /* We mark the USE with QImode so that we recognize it
4695 as one that can be safely deleted at the end of
4696 reload. */
4697 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4698 QImode);
4699 x = mem;
4700 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4701 opnum, type, ind_levels, insn);
4702 if (!rtx_equal_p (x, mem))
4703 push_reg_equiv_alt_mem (regno, x);
4704 if (address_reloaded)
4705 *address_reloaded = i;
4708 return x;
4710 if (code == MEM)
4712 rtx tem = x;
4714 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4715 opnum, type, ind_levels, insn);
4716 if (address_reloaded)
4717 *address_reloaded = i;
4719 return tem;
4722 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4724 /* Check for SUBREG containing a REG that's equivalent to a
4725 constant. If the constant has a known value, truncate it
4726 right now. Similarly if we are extracting a single-word of a
4727 multi-word constant. If the constant is symbolic, allow it
4728 to be substituted normally. push_reload will strip the
4729 subreg later. The constant must not be VOIDmode, because we
4730 will lose the mode of the register (this should never happen
4731 because one of the cases above should handle it). */
4733 int regno = REGNO (SUBREG_REG (x));
4734 rtx tem;
4736 if (regno >= FIRST_PSEUDO_REGISTER
4737 && reg_renumber[regno] < 0
4738 && reg_equiv_constant (regno) != 0)
4740 tem =
4741 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4742 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4743 gcc_assert (tem);
4744 if (CONSTANT_P (tem)
4745 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4747 tem = force_const_mem (GET_MODE (x), tem);
4748 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4749 &XEXP (tem, 0), opnum, type,
4750 ind_levels, insn);
4751 if (address_reloaded)
4752 *address_reloaded = i;
4754 return tem;
4757 /* If the subreg contains a reg that will be converted to a mem,
4758 convert the subreg to a narrower memref now.
4759 Otherwise, we would get (subreg (mem ...) ...),
4760 which would force reload of the mem.
4762 We also need to do this if there is an equivalent MEM that is
4763 not offsettable. In that case, alter_subreg would produce an
4764 invalid address on big-endian machines.
4766 For machines that extend byte loads, we must not reload using
4767 a wider mode if we have a paradoxical SUBREG. find_reloads will
4768 force a reload in that case. So we should not do anything here. */
4770 if (regno >= FIRST_PSEUDO_REGISTER
4771 #ifdef LOAD_EXTEND_OP
4772 && !paradoxical_subreg_p (x)
4773 #endif
4774 && (reg_equiv_address (regno) != 0
4775 || (reg_equiv_mem (regno) != 0
4776 && (! strict_memory_address_addr_space_p
4777 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
4778 MEM_ADDR_SPACE (reg_equiv_mem (regno)))
4779 || ! offsettable_memref_p (reg_equiv_mem (regno))
4780 || num_not_at_initial_offset))))
4781 x = find_reloads_subreg_address (x, 1, opnum, type, ind_levels,
4782 insn, address_reloaded);
4785 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4787 if (fmt[i] == 'e')
4789 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4790 ind_levels, is_set_dest, insn,
4791 address_reloaded);
4792 /* If we have replaced a reg with it's equivalent memory loc -
4793 that can still be handled here e.g. if it's in a paradoxical
4794 subreg - we must make the change in a copy, rather than using
4795 a destructive change. This way, find_reloads can still elect
4796 not to do the change. */
4797 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4799 x = shallow_copy_rtx (x);
4800 copied = 1;
4802 XEXP (x, i) = new_part;
4805 return x;
4808 /* Return a mem ref for the memory equivalent of reg REGNO.
4809 This mem ref is not shared with anything. */
4811 static rtx
4812 make_memloc (rtx ad, int regno)
4814 /* We must rerun eliminate_regs, in case the elimination
4815 offsets have changed. */
4816 rtx tem
4817 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4820 /* If TEM might contain a pseudo, we must copy it to avoid
4821 modifying it when we do the substitution for the reload. */
4822 if (rtx_varies_p (tem, 0))
4823 tem = copy_rtx (tem);
4825 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4826 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4828 /* Copy the result if it's still the same as the equivalence, to avoid
4829 modifying it when we do the substitution for the reload. */
4830 if (tem == reg_equiv_memory_loc (regno))
4831 tem = copy_rtx (tem);
4832 return tem;
4835 /* Returns true if AD could be turned into a valid memory reference
4836 to mode MODE in address space AS by reloading the part pointed to
4837 by PART into a register. */
4839 static int
4840 maybe_memory_address_addr_space_p (enum machine_mode mode, rtx ad,
4841 addr_space_t as, rtx *part)
4843 int retv;
4844 rtx tem = *part;
4845 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4847 *part = reg;
4848 retv = memory_address_addr_space_p (mode, ad, as);
4849 *part = tem;
4851 return retv;
4854 /* Record all reloads needed for handling memory address AD
4855 which appears in *LOC in a memory reference to mode MODE
4856 which itself is found in location *MEMREFLOC.
4857 Note that we take shortcuts assuming that no multi-reg machine mode
4858 occurs as part of an address.
4860 OPNUM and TYPE specify the purpose of this reload.
4862 IND_LEVELS says how many levels of indirect addressing this machine
4863 supports.
4865 INSN, if nonzero, is the insn in which we do the reload. It is used
4866 to determine if we may generate output reloads, and where to put USEs
4867 for pseudos that we have to replace with stack slots.
4869 Value is one if this address is reloaded or replaced as a whole; it is
4870 zero if the top level of this address was not reloaded or replaced, and
4871 it is -1 if it may or may not have been reloaded or replaced.
4873 Note that there is no verification that the address will be valid after
4874 this routine does its work. Instead, we rely on the fact that the address
4875 was valid when reload started. So we need only undo things that reload
4876 could have broken. These are wrong register types, pseudos not allocated
4877 to a hard register, and frame pointer elimination. */
4879 static int
4880 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
4881 rtx *loc, int opnum, enum reload_type type,
4882 int ind_levels, rtx insn)
4884 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4885 : ADDR_SPACE_GENERIC;
4886 int regno;
4887 int removed_and = 0;
4888 int op_index;
4889 rtx tem;
4891 /* If the address is a register, see if it is a legitimate address and
4892 reload if not. We first handle the cases where we need not reload
4893 or where we must reload in a non-standard way. */
4895 if (REG_P (ad))
4897 regno = REGNO (ad);
4899 if (reg_equiv_constant (regno) != 0)
4901 find_reloads_address_part (reg_equiv_constant (regno), loc,
4902 base_reg_class (mode, as, MEM, SCRATCH),
4903 GET_MODE (ad), opnum, type, ind_levels);
4904 return 1;
4907 tem = reg_equiv_memory_loc (regno);
4908 if (tem != 0)
4910 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4912 tem = make_memloc (ad, regno);
4913 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4914 XEXP (tem, 0),
4915 MEM_ADDR_SPACE (tem)))
4917 rtx orig = tem;
4919 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4920 &XEXP (tem, 0), opnum,
4921 ADDR_TYPE (type), ind_levels, insn);
4922 if (!rtx_equal_p (tem, orig))
4923 push_reg_equiv_alt_mem (regno, tem);
4925 /* We can avoid a reload if the register's equivalent memory
4926 expression is valid as an indirect memory address.
4927 But not all addresses are valid in a mem used as an indirect
4928 address: only reg or reg+constant. */
4930 if (ind_levels > 0
4931 && strict_memory_address_addr_space_p (mode, tem, as)
4932 && (REG_P (XEXP (tem, 0))
4933 || (GET_CODE (XEXP (tem, 0)) == PLUS
4934 && REG_P (XEXP (XEXP (tem, 0), 0))
4935 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4937 /* TEM is not the same as what we'll be replacing the
4938 pseudo with after reload, put a USE in front of INSN
4939 in the final reload pass. */
4940 if (replace_reloads
4941 && num_not_at_initial_offset
4942 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4944 *loc = tem;
4945 /* We mark the USE with QImode so that we
4946 recognize it as one that can be safely
4947 deleted at the end of reload. */
4948 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4949 insn), QImode);
4951 /* This doesn't really count as replacing the address
4952 as a whole, since it is still a memory access. */
4954 return 0;
4956 ad = tem;
4960 /* The only remaining case where we can avoid a reload is if this is a
4961 hard register that is valid as a base register and which is not the
4962 subject of a CLOBBER in this insn. */
4964 else if (regno < FIRST_PSEUDO_REGISTER
4965 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4966 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4967 return 0;
4969 /* If we do not have one of the cases above, we must do the reload. */
4970 push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4971 base_reg_class (mode, as, MEM, SCRATCH),
4972 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4973 return 1;
4976 if (strict_memory_address_addr_space_p (mode, ad, as))
4978 /* The address appears valid, so reloads are not needed.
4979 But the address may contain an eliminable register.
4980 This can happen because a machine with indirect addressing
4981 may consider a pseudo register by itself a valid address even when
4982 it has failed to get a hard reg.
4983 So do a tree-walk to find and eliminate all such regs. */
4985 /* But first quickly dispose of a common case. */
4986 if (GET_CODE (ad) == PLUS
4987 && CONST_INT_P (XEXP (ad, 1))
4988 && REG_P (XEXP (ad, 0))
4989 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4990 return 0;
4992 subst_reg_equivs_changed = 0;
4993 *loc = subst_reg_equivs (ad, insn);
4995 if (! subst_reg_equivs_changed)
4996 return 0;
4998 /* Check result for validity after substitution. */
4999 if (strict_memory_address_addr_space_p (mode, ad, as))
5000 return 0;
5003 #ifdef LEGITIMIZE_RELOAD_ADDRESS
5006 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
5008 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5009 ind_levels, win);
5011 break;
5012 win:
5013 *memrefloc = copy_rtx (*memrefloc);
5014 XEXP (*memrefloc, 0) = ad;
5015 move_replacements (&ad, &XEXP (*memrefloc, 0));
5016 return -1;
5018 while (0);
5019 #endif
5021 /* The address is not valid. We have to figure out why. First see if
5022 we have an outer AND and remove it if so. Then analyze what's inside. */
5024 if (GET_CODE (ad) == AND)
5026 removed_and = 1;
5027 loc = &XEXP (ad, 0);
5028 ad = *loc;
5031 /* One possibility for why the address is invalid is that it is itself
5032 a MEM. This can happen when the frame pointer is being eliminated, a
5033 pseudo is not allocated to a hard register, and the offset between the
5034 frame and stack pointers is not its initial value. In that case the
5035 pseudo will have been replaced by a MEM referring to the
5036 stack pointer. */
5037 if (MEM_P (ad))
5039 /* First ensure that the address in this MEM is valid. Then, unless
5040 indirect addresses are valid, reload the MEM into a register. */
5041 tem = ad;
5042 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5043 opnum, ADDR_TYPE (type),
5044 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5046 /* If tem was changed, then we must create a new memory reference to
5047 hold it and store it back into memrefloc. */
5048 if (tem != ad && memrefloc)
5050 *memrefloc = copy_rtx (*memrefloc);
5051 copy_replacements (tem, XEXP (*memrefloc, 0));
5052 loc = &XEXP (*memrefloc, 0);
5053 if (removed_and)
5054 loc = &XEXP (*loc, 0);
5057 /* Check similar cases as for indirect addresses as above except
5058 that we can allow pseudos and a MEM since they should have been
5059 taken care of above. */
5061 if (ind_levels == 0
5062 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5063 || MEM_P (XEXP (tem, 0))
5064 || ! (REG_P (XEXP (tem, 0))
5065 || (GET_CODE (XEXP (tem, 0)) == PLUS
5066 && REG_P (XEXP (XEXP (tem, 0), 0))
5067 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5069 /* Must use TEM here, not AD, since it is the one that will
5070 have any subexpressions reloaded, if needed. */
5071 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5072 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5073 VOIDmode, 0,
5074 0, opnum, type);
5075 return ! removed_and;
5077 else
5078 return 0;
5081 /* If we have address of a stack slot but it's not valid because the
5082 displacement is too large, compute the sum in a register.
5083 Handle all base registers here, not just fp/ap/sp, because on some
5084 targets (namely SH) we can also get too large displacements from
5085 big-endian corrections. */
5086 else if (GET_CODE (ad) == PLUS
5087 && REG_P (XEXP (ad, 0))
5088 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5089 && CONST_INT_P (XEXP (ad, 1))
5090 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5091 CONST_INT)
5092 /* Similarly, if we were to reload the base register and the
5093 mem+offset address is still invalid, then we want to reload
5094 the whole address, not just the base register. */
5095 || ! maybe_memory_address_addr_space_p
5096 (mode, ad, as, &(XEXP (ad, 0)))))
5099 /* Unshare the MEM rtx so we can safely alter it. */
5100 if (memrefloc)
5102 *memrefloc = copy_rtx (*memrefloc);
5103 loc = &XEXP (*memrefloc, 0);
5104 if (removed_and)
5105 loc = &XEXP (*loc, 0);
5108 if (double_reg_address_ok
5109 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5110 PLUS, CONST_INT))
5112 /* Unshare the sum as well. */
5113 *loc = ad = copy_rtx (ad);
5115 /* Reload the displacement into an index reg.
5116 We assume the frame pointer or arg pointer is a base reg. */
5117 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5118 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5119 type, ind_levels);
5120 return 0;
5122 else
5124 /* If the sum of two regs is not necessarily valid,
5125 reload the sum into a base reg.
5126 That will at least work. */
5127 find_reloads_address_part (ad, loc,
5128 base_reg_class (mode, as, MEM, SCRATCH),
5129 GET_MODE (ad), opnum, type, ind_levels);
5131 return ! removed_and;
5134 /* If we have an indexed stack slot, there are three possible reasons why
5135 it might be invalid: The index might need to be reloaded, the address
5136 might have been made by frame pointer elimination and hence have a
5137 constant out of range, or both reasons might apply.
5139 We can easily check for an index needing reload, but even if that is the
5140 case, we might also have an invalid constant. To avoid making the
5141 conservative assumption and requiring two reloads, we see if this address
5142 is valid when not interpreted strictly. If it is, the only problem is
5143 that the index needs a reload and find_reloads_address_1 will take care
5144 of it.
5146 Handle all base registers here, not just fp/ap/sp, because on some
5147 targets (namely SPARC) we can also get invalid addresses from preventive
5148 subreg big-endian corrections made by find_reloads_toplev. We
5149 can also get expressions involving LO_SUM (rather than PLUS) from
5150 find_reloads_subreg_address.
5152 If we decide to do something, it must be that `double_reg_address_ok'
5153 is true. We generate a reload of the base register + constant and
5154 rework the sum so that the reload register will be added to the index.
5155 This is safe because we know the address isn't shared.
5157 We check for the base register as both the first and second operand of
5158 the innermost PLUS and/or LO_SUM. */
5160 for (op_index = 0; op_index < 2; ++op_index)
5162 rtx operand, addend;
5163 enum rtx_code inner_code;
5165 if (GET_CODE (ad) != PLUS)
5166 continue;
5168 inner_code = GET_CODE (XEXP (ad, 0));
5169 if (!(GET_CODE (ad) == PLUS
5170 && CONST_INT_P (XEXP (ad, 1))
5171 && (inner_code == PLUS || inner_code == LO_SUM)))
5172 continue;
5174 operand = XEXP (XEXP (ad, 0), op_index);
5175 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5176 continue;
5178 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5180 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5181 GET_CODE (addend))
5182 || operand == frame_pointer_rtx
5183 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
5184 || operand == hard_frame_pointer_rtx
5185 #endif
5186 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5187 || operand == arg_pointer_rtx
5188 #endif
5189 || operand == stack_pointer_rtx)
5190 && ! maybe_memory_address_addr_space_p
5191 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5193 rtx offset_reg;
5194 enum reg_class cls;
5196 offset_reg = plus_constant (GET_MODE (ad), operand,
5197 INTVAL (XEXP (ad, 1)));
5199 /* Form the adjusted address. */
5200 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5201 ad = gen_rtx_PLUS (GET_MODE (ad),
5202 op_index == 0 ? offset_reg : addend,
5203 op_index == 0 ? addend : offset_reg);
5204 else
5205 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5206 op_index == 0 ? offset_reg : addend,
5207 op_index == 0 ? addend : offset_reg);
5208 *loc = ad;
5210 cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5211 find_reloads_address_part (XEXP (ad, op_index),
5212 &XEXP (ad, op_index), cls,
5213 GET_MODE (ad), opnum, type, ind_levels);
5214 find_reloads_address_1 (mode, as,
5215 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5216 GET_CODE (XEXP (ad, op_index)),
5217 &XEXP (ad, 1 - op_index), opnum,
5218 type, 0, insn);
5220 return 0;
5224 /* See if address becomes valid when an eliminable register
5225 in a sum is replaced. */
5227 tem = ad;
5228 if (GET_CODE (ad) == PLUS)
5229 tem = subst_indexed_address (ad);
5230 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5232 /* Ok, we win that way. Replace any additional eliminable
5233 registers. */
5235 subst_reg_equivs_changed = 0;
5236 tem = subst_reg_equivs (tem, insn);
5238 /* Make sure that didn't make the address invalid again. */
5240 if (! subst_reg_equivs_changed
5241 || strict_memory_address_addr_space_p (mode, tem, as))
5243 *loc = tem;
5244 return 0;
5248 /* If constants aren't valid addresses, reload the constant address
5249 into a register. */
5250 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5252 enum machine_mode address_mode = GET_MODE (ad);
5253 if (address_mode == VOIDmode)
5254 address_mode = targetm.addr_space.address_mode (as);
5256 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5257 Unshare it so we can safely alter it. */
5258 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5259 && CONSTANT_POOL_ADDRESS_P (ad))
5261 *memrefloc = copy_rtx (*memrefloc);
5262 loc = &XEXP (*memrefloc, 0);
5263 if (removed_and)
5264 loc = &XEXP (*loc, 0);
5267 find_reloads_address_part (ad, loc,
5268 base_reg_class (mode, as, MEM, SCRATCH),
5269 address_mode, opnum, type, ind_levels);
5270 return ! removed_and;
5273 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5274 opnum, type, ind_levels, insn);
5277 /* Find all pseudo regs appearing in AD
5278 that are eliminable in favor of equivalent values
5279 and do not have hard regs; replace them by their equivalents.
5280 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5281 front of it for pseudos that we have to replace with stack slots. */
5283 static rtx
5284 subst_reg_equivs (rtx ad, rtx insn)
5286 RTX_CODE code = GET_CODE (ad);
5287 int i;
5288 const char *fmt;
5290 switch (code)
5292 case HIGH:
5293 case CONST_INT:
5294 case CONST:
5295 case CONST_DOUBLE:
5296 case CONST_FIXED:
5297 case CONST_VECTOR:
5298 case SYMBOL_REF:
5299 case LABEL_REF:
5300 case PC:
5301 case CC0:
5302 return ad;
5304 case REG:
5306 int regno = REGNO (ad);
5308 if (reg_equiv_constant (regno) != 0)
5310 subst_reg_equivs_changed = 1;
5311 return reg_equiv_constant (regno);
5313 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5315 rtx mem = make_memloc (ad, regno);
5316 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5318 subst_reg_equivs_changed = 1;
5319 /* We mark the USE with QImode so that we recognize it
5320 as one that can be safely deleted at the end of
5321 reload. */
5322 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5323 QImode);
5324 return mem;
5328 return ad;
5330 case PLUS:
5331 /* Quickly dispose of a common case. */
5332 if (XEXP (ad, 0) == frame_pointer_rtx
5333 && CONST_INT_P (XEXP (ad, 1)))
5334 return ad;
5335 break;
5337 default:
5338 break;
5341 fmt = GET_RTX_FORMAT (code);
5342 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5343 if (fmt[i] == 'e')
5344 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5345 return ad;
5348 /* Compute the sum of X and Y, making canonicalizations assumed in an
5349 address, namely: sum constant integers, surround the sum of two
5350 constants with a CONST, put the constant as the second operand, and
5351 group the constant on the outermost sum.
5353 This routine assumes both inputs are already in canonical form. */
5356 form_sum (enum machine_mode mode, rtx x, rtx y)
5358 rtx tem;
5360 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5361 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5363 if (CONST_INT_P (x))
5364 return plus_constant (mode, y, INTVAL (x));
5365 else if (CONST_INT_P (y))
5366 return plus_constant (mode, x, INTVAL (y));
5367 else if (CONSTANT_P (x))
5368 tem = x, x = y, y = tem;
5370 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5371 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5373 /* Note that if the operands of Y are specified in the opposite
5374 order in the recursive calls below, infinite recursion will occur. */
5375 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5376 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5378 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5379 constant will have been placed second. */
5380 if (CONSTANT_P (x) && CONSTANT_P (y))
5382 if (GET_CODE (x) == CONST)
5383 x = XEXP (x, 0);
5384 if (GET_CODE (y) == CONST)
5385 y = XEXP (y, 0);
5387 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5390 return gen_rtx_PLUS (mode, x, y);
5393 /* If ADDR is a sum containing a pseudo register that should be
5394 replaced with a constant (from reg_equiv_constant),
5395 return the result of doing so, and also apply the associative
5396 law so that the result is more likely to be a valid address.
5397 (But it is not guaranteed to be one.)
5399 Note that at most one register is replaced, even if more are
5400 replaceable. Also, we try to put the result into a canonical form
5401 so it is more likely to be a valid address.
5403 In all other cases, return ADDR. */
5405 static rtx
5406 subst_indexed_address (rtx addr)
5408 rtx op0 = 0, op1 = 0, op2 = 0;
5409 rtx tem;
5410 int regno;
5412 if (GET_CODE (addr) == PLUS)
5414 /* Try to find a register to replace. */
5415 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5416 if (REG_P (op0)
5417 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5418 && reg_renumber[regno] < 0
5419 && reg_equiv_constant (regno) != 0)
5420 op0 = reg_equiv_constant (regno);
5421 else if (REG_P (op1)
5422 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5423 && reg_renumber[regno] < 0
5424 && reg_equiv_constant (regno) != 0)
5425 op1 = reg_equiv_constant (regno);
5426 else if (GET_CODE (op0) == PLUS
5427 && (tem = subst_indexed_address (op0)) != op0)
5428 op0 = tem;
5429 else if (GET_CODE (op1) == PLUS
5430 && (tem = subst_indexed_address (op1)) != op1)
5431 op1 = tem;
5432 else
5433 return addr;
5435 /* Pick out up to three things to add. */
5436 if (GET_CODE (op1) == PLUS)
5437 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5438 else if (GET_CODE (op0) == PLUS)
5439 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5441 /* Compute the sum. */
5442 if (op2 != 0)
5443 op1 = form_sum (GET_MODE (addr), op1, op2);
5444 if (op1 != 0)
5445 op0 = form_sum (GET_MODE (addr), op0, op1);
5447 return op0;
5449 return addr;
5452 /* Update the REG_INC notes for an insn. It updates all REG_INC
5453 notes for the instruction which refer to REGNO the to refer
5454 to the reload number.
5456 INSN is the insn for which any REG_INC notes need updating.
5458 REGNO is the register number which has been reloaded.
5460 RELOADNUM is the reload number. */
5462 static void
5463 update_auto_inc_notes (rtx insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5464 int reloadnum ATTRIBUTE_UNUSED)
5466 #ifdef AUTO_INC_DEC
5467 rtx link;
5469 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5470 if (REG_NOTE_KIND (link) == REG_INC
5471 && (int) REGNO (XEXP (link, 0)) == regno)
5472 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5473 #endif
5476 /* Record the pseudo registers we must reload into hard registers in a
5477 subexpression of a would-be memory address, X referring to a value
5478 in mode MODE. (This function is not called if the address we find
5479 is strictly valid.)
5481 CONTEXT = 1 means we are considering regs as index regs,
5482 = 0 means we are considering them as base regs.
5483 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5484 or an autoinc code.
5485 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5486 is the code of the index part of the address. Otherwise, pass SCRATCH
5487 for this argument.
5488 OPNUM and TYPE specify the purpose of any reloads made.
5490 IND_LEVELS says how many levels of indirect addressing are
5491 supported at this point in the address.
5493 INSN, if nonzero, is the insn in which we do the reload. It is used
5494 to determine if we may generate output reloads.
5496 We return nonzero if X, as a whole, is reloaded or replaced. */
5498 /* Note that we take shortcuts assuming that no multi-reg machine mode
5499 occurs as part of an address.
5500 Also, this is not fully machine-customizable; it works for machines
5501 such as VAXen and 68000's and 32000's, but other possible machines
5502 could have addressing modes that this does not handle right.
5503 If you add push_reload calls here, you need to make sure gen_reload
5504 handles those cases gracefully. */
5506 static int
5507 find_reloads_address_1 (enum machine_mode mode, addr_space_t as,
5508 rtx x, int context,
5509 enum rtx_code outer_code, enum rtx_code index_code,
5510 rtx *loc, int opnum, enum reload_type type,
5511 int ind_levels, rtx insn)
5513 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5514 ((CONTEXT) == 0 \
5515 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5516 : REGNO_OK_FOR_INDEX_P (REGNO))
5518 enum reg_class context_reg_class;
5519 RTX_CODE code = GET_CODE (x);
5521 if (context == 1)
5522 context_reg_class = INDEX_REG_CLASS;
5523 else
5524 context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5526 switch (code)
5528 case PLUS:
5530 rtx orig_op0 = XEXP (x, 0);
5531 rtx orig_op1 = XEXP (x, 1);
5532 RTX_CODE code0 = GET_CODE (orig_op0);
5533 RTX_CODE code1 = GET_CODE (orig_op1);
5534 rtx op0 = orig_op0;
5535 rtx op1 = orig_op1;
5537 if (GET_CODE (op0) == SUBREG)
5539 op0 = SUBREG_REG (op0);
5540 code0 = GET_CODE (op0);
5541 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5542 op0 = gen_rtx_REG (word_mode,
5543 (REGNO (op0) +
5544 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5545 GET_MODE (SUBREG_REG (orig_op0)),
5546 SUBREG_BYTE (orig_op0),
5547 GET_MODE (orig_op0))));
5550 if (GET_CODE (op1) == SUBREG)
5552 op1 = SUBREG_REG (op1);
5553 code1 = GET_CODE (op1);
5554 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5555 /* ??? Why is this given op1's mode and above for
5556 ??? op0 SUBREGs we use word_mode? */
5557 op1 = gen_rtx_REG (GET_MODE (op1),
5558 (REGNO (op1) +
5559 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5560 GET_MODE (SUBREG_REG (orig_op1)),
5561 SUBREG_BYTE (orig_op1),
5562 GET_MODE (orig_op1))));
5564 /* Plus in the index register may be created only as a result of
5565 register rematerialization for expression like &localvar*4. Reload it.
5566 It may be possible to combine the displacement on the outer level,
5567 but it is probably not worthwhile to do so. */
5568 if (context == 1)
5570 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5571 opnum, ADDR_TYPE (type), ind_levels, insn);
5572 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5573 context_reg_class,
5574 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5575 return 1;
5578 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5579 || code0 == ZERO_EXTEND || code1 == MEM)
5581 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5582 &XEXP (x, 0), opnum, type, ind_levels,
5583 insn);
5584 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5585 &XEXP (x, 1), opnum, type, ind_levels,
5586 insn);
5589 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5590 || code1 == ZERO_EXTEND || code0 == MEM)
5592 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5593 &XEXP (x, 0), opnum, type, ind_levels,
5594 insn);
5595 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5596 &XEXP (x, 1), opnum, type, ind_levels,
5597 insn);
5600 else if (code0 == CONST_INT || code0 == CONST
5601 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5602 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5603 &XEXP (x, 1), opnum, type, ind_levels,
5604 insn);
5606 else if (code1 == CONST_INT || code1 == CONST
5607 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5608 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5609 &XEXP (x, 0), opnum, type, ind_levels,
5610 insn);
5612 else if (code0 == REG && code1 == REG)
5614 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5615 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5616 return 0;
5617 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5618 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5619 return 0;
5620 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5621 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5622 &XEXP (x, 1), opnum, type, ind_levels,
5623 insn);
5624 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5625 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5626 &XEXP (x, 0), opnum, type, ind_levels,
5627 insn);
5628 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5629 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5630 &XEXP (x, 0), opnum, type, ind_levels,
5631 insn);
5632 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5633 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5634 &XEXP (x, 1), opnum, type, ind_levels,
5635 insn);
5636 else
5638 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5639 &XEXP (x, 0), opnum, type, ind_levels,
5640 insn);
5641 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5642 &XEXP (x, 1), opnum, type, ind_levels,
5643 insn);
5647 else if (code0 == REG)
5649 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5650 &XEXP (x, 0), opnum, type, ind_levels,
5651 insn);
5652 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5653 &XEXP (x, 1), opnum, type, ind_levels,
5654 insn);
5657 else if (code1 == REG)
5659 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5660 &XEXP (x, 1), opnum, type, ind_levels,
5661 insn);
5662 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5663 &XEXP (x, 0), opnum, type, ind_levels,
5664 insn);
5668 return 0;
5670 case POST_MODIFY:
5671 case PRE_MODIFY:
5673 rtx op0 = XEXP (x, 0);
5674 rtx op1 = XEXP (x, 1);
5675 enum rtx_code index_code;
5676 int regno;
5677 int reloadnum;
5679 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5680 return 0;
5682 /* Currently, we only support {PRE,POST}_MODIFY constructs
5683 where a base register is {inc,dec}remented by the contents
5684 of another register or by a constant value. Thus, these
5685 operands must match. */
5686 gcc_assert (op0 == XEXP (op1, 0));
5688 /* Require index register (or constant). Let's just handle the
5689 register case in the meantime... If the target allows
5690 auto-modify by a constant then we could try replacing a pseudo
5691 register with its equivalent constant where applicable.
5693 We also handle the case where the register was eliminated
5694 resulting in a PLUS subexpression.
5696 If we later decide to reload the whole PRE_MODIFY or
5697 POST_MODIFY, inc_for_reload might clobber the reload register
5698 before reading the index. The index register might therefore
5699 need to live longer than a TYPE reload normally would, so be
5700 conservative and class it as RELOAD_OTHER. */
5701 if ((REG_P (XEXP (op1, 1))
5702 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5703 || GET_CODE (XEXP (op1, 1)) == PLUS)
5704 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5705 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5706 ind_levels, insn);
5708 gcc_assert (REG_P (XEXP (op1, 0)));
5710 regno = REGNO (XEXP (op1, 0));
5711 index_code = GET_CODE (XEXP (op1, 1));
5713 /* A register that is incremented cannot be constant! */
5714 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5715 || reg_equiv_constant (regno) == 0);
5717 /* Handle a register that is equivalent to a memory location
5718 which cannot be addressed directly. */
5719 if (reg_equiv_memory_loc (regno) != 0
5720 && (reg_equiv_address (regno) != 0
5721 || num_not_at_initial_offset))
5723 rtx tem = make_memloc (XEXP (x, 0), regno);
5725 if (reg_equiv_address (regno)
5726 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5728 rtx orig = tem;
5730 /* First reload the memory location's address.
5731 We can't use ADDR_TYPE (type) here, because we need to
5732 write back the value after reading it, hence we actually
5733 need two registers. */
5734 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5735 &XEXP (tem, 0), opnum,
5736 RELOAD_OTHER,
5737 ind_levels, insn);
5739 if (!rtx_equal_p (tem, orig))
5740 push_reg_equiv_alt_mem (regno, tem);
5742 /* Then reload the memory location into a base
5743 register. */
5744 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5745 &XEXP (op1, 0),
5746 base_reg_class (mode, as,
5747 code, index_code),
5748 GET_MODE (x), GET_MODE (x), 0,
5749 0, opnum, RELOAD_OTHER);
5751 update_auto_inc_notes (this_insn, regno, reloadnum);
5752 return 0;
5756 if (reg_renumber[regno] >= 0)
5757 regno = reg_renumber[regno];
5759 /* We require a base register here... */
5760 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5762 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5763 &XEXP (op1, 0), &XEXP (x, 0),
5764 base_reg_class (mode, as,
5765 code, index_code),
5766 GET_MODE (x), GET_MODE (x), 0, 0,
5767 opnum, RELOAD_OTHER);
5769 update_auto_inc_notes (this_insn, regno, reloadnum);
5770 return 0;
5773 return 0;
5775 case POST_INC:
5776 case POST_DEC:
5777 case PRE_INC:
5778 case PRE_DEC:
5779 if (REG_P (XEXP (x, 0)))
5781 int regno = REGNO (XEXP (x, 0));
5782 int value = 0;
5783 rtx x_orig = x;
5785 /* A register that is incremented cannot be constant! */
5786 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5787 || reg_equiv_constant (regno) == 0);
5789 /* Handle a register that is equivalent to a memory location
5790 which cannot be addressed directly. */
5791 if (reg_equiv_memory_loc (regno) != 0
5792 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5794 rtx tem = make_memloc (XEXP (x, 0), regno);
5795 if (reg_equiv_address (regno)
5796 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5798 rtx orig = tem;
5800 /* First reload the memory location's address.
5801 We can't use ADDR_TYPE (type) here, because we need to
5802 write back the value after reading it, hence we actually
5803 need two registers. */
5804 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5805 &XEXP (tem, 0), opnum, type,
5806 ind_levels, insn);
5807 if (!rtx_equal_p (tem, orig))
5808 push_reg_equiv_alt_mem (regno, tem);
5809 /* Put this inside a new increment-expression. */
5810 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5811 /* Proceed to reload that, as if it contained a register. */
5815 /* If we have a hard register that is ok in this incdec context,
5816 don't make a reload. If the register isn't nice enough for
5817 autoincdec, we can reload it. But, if an autoincrement of a
5818 register that we here verified as playing nice, still outside
5819 isn't "valid", it must be that no autoincrement is "valid".
5820 If that is true and something made an autoincrement anyway,
5821 this must be a special context where one is allowed.
5822 (For example, a "push" instruction.)
5823 We can't improve this address, so leave it alone. */
5825 /* Otherwise, reload the autoincrement into a suitable hard reg
5826 and record how much to increment by. */
5828 if (reg_renumber[regno] >= 0)
5829 regno = reg_renumber[regno];
5830 if (regno >= FIRST_PSEUDO_REGISTER
5831 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5832 index_code))
5834 int reloadnum;
5836 /* If we can output the register afterwards, do so, this
5837 saves the extra update.
5838 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5839 CALL_INSN - and it does not set CC0.
5840 But don't do this if we cannot directly address the
5841 memory location, since this will make it harder to
5842 reuse address reloads, and increases register pressure.
5843 Also don't do this if we can probably update x directly. */
5844 rtx equiv = (MEM_P (XEXP (x, 0))
5845 ? XEXP (x, 0)
5846 : reg_equiv_mem (regno));
5847 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5848 if (insn && NONJUMP_INSN_P (insn) && equiv
5849 && memory_operand (equiv, GET_MODE (equiv))
5850 #ifdef HAVE_cc0
5851 && ! sets_cc0_p (PATTERN (insn))
5852 #endif
5853 && ! (icode != CODE_FOR_nothing
5854 && insn_operand_matches (icode, 0, equiv)
5855 && insn_operand_matches (icode, 1, equiv)))
5857 /* We use the original pseudo for loc, so that
5858 emit_reload_insns() knows which pseudo this
5859 reload refers to and updates the pseudo rtx, not
5860 its equivalent memory location, as well as the
5861 corresponding entry in reg_last_reload_reg. */
5862 loc = &XEXP (x_orig, 0);
5863 x = XEXP (x, 0);
5864 reloadnum
5865 = push_reload (x, x, loc, loc,
5866 context_reg_class,
5867 GET_MODE (x), GET_MODE (x), 0, 0,
5868 opnum, RELOAD_OTHER);
5870 else
5872 reloadnum
5873 = push_reload (x, x, loc, (rtx*) 0,
5874 context_reg_class,
5875 GET_MODE (x), GET_MODE (x), 0, 0,
5876 opnum, type);
5877 rld[reloadnum].inc
5878 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5880 value = 1;
5883 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5884 reloadnum);
5886 return value;
5888 return 0;
5890 case TRUNCATE:
5891 case SIGN_EXTEND:
5892 case ZERO_EXTEND:
5893 /* Look for parts to reload in the inner expression and reload them
5894 too, in addition to this operation. Reloading all inner parts in
5895 addition to this one shouldn't be necessary, but at this point,
5896 we don't know if we can possibly omit any part that *can* be
5897 reloaded. Targets that are better off reloading just either part
5898 (or perhaps even a different part of an outer expression), should
5899 define LEGITIMIZE_RELOAD_ADDRESS. */
5900 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5901 context, code, SCRATCH, &XEXP (x, 0), opnum,
5902 type, ind_levels, insn);
5903 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5904 context_reg_class,
5905 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5906 return 1;
5908 case MEM:
5909 /* This is probably the result of a substitution, by eliminate_regs, of
5910 an equivalent address for a pseudo that was not allocated to a hard
5911 register. Verify that the specified address is valid and reload it
5912 into a register.
5914 Since we know we are going to reload this item, don't decrement for
5915 the indirection level.
5917 Note that this is actually conservative: it would be slightly more
5918 efficient to use the value of SPILL_INDIRECT_LEVELS from
5919 reload1.c here. */
5921 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5922 opnum, ADDR_TYPE (type), ind_levels, insn);
5923 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5924 context_reg_class,
5925 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5926 return 1;
5928 case REG:
5930 int regno = REGNO (x);
5932 if (reg_equiv_constant (regno) != 0)
5934 find_reloads_address_part (reg_equiv_constant (regno), loc,
5935 context_reg_class,
5936 GET_MODE (x), opnum, type, ind_levels);
5937 return 1;
5940 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5941 that feeds this insn. */
5942 if (reg_equiv_mem (regno) != 0)
5944 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5945 context_reg_class,
5946 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5947 return 1;
5949 #endif
5951 if (reg_equiv_memory_loc (regno)
5952 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5954 rtx tem = make_memloc (x, regno);
5955 if (reg_equiv_address (regno) != 0
5956 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5958 x = tem;
5959 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5960 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5961 ind_levels, insn);
5962 if (!rtx_equal_p (x, tem))
5963 push_reg_equiv_alt_mem (regno, x);
5967 if (reg_renumber[regno] >= 0)
5968 regno = reg_renumber[regno];
5970 if (regno >= FIRST_PSEUDO_REGISTER
5971 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5972 index_code))
5974 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5975 context_reg_class,
5976 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5977 return 1;
5980 /* If a register appearing in an address is the subject of a CLOBBER
5981 in this insn, reload it into some other register to be safe.
5982 The CLOBBER is supposed to make the register unavailable
5983 from before this insn to after it. */
5984 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5986 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5987 context_reg_class,
5988 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5989 return 1;
5992 return 0;
5994 case SUBREG:
5995 if (REG_P (SUBREG_REG (x)))
5997 /* If this is a SUBREG of a hard register and the resulting register
5998 is of the wrong class, reload the whole SUBREG. This avoids
5999 needless copies if SUBREG_REG is multi-word. */
6000 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6002 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
6004 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6005 index_code))
6007 push_reload (x, NULL_RTX, loc, (rtx*) 0,
6008 context_reg_class,
6009 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6010 return 1;
6013 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6014 is larger than the class size, then reload the whole SUBREG. */
6015 else
6017 enum reg_class rclass = context_reg_class;
6018 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6019 > reg_class_size[(int) rclass])
6021 x = find_reloads_subreg_address (x, 0, opnum,
6022 ADDR_TYPE (type),
6023 ind_levels, insn, NULL);
6024 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6025 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6026 return 1;
6030 break;
6032 default:
6033 break;
6037 const char *fmt = GET_RTX_FORMAT (code);
6038 int i;
6040 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6042 if (fmt[i] == 'e')
6043 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6044 we get here. */
6045 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6046 code, SCRATCH, &XEXP (x, i),
6047 opnum, type, ind_levels, insn);
6051 #undef REG_OK_FOR_CONTEXT
6052 return 0;
6055 /* X, which is found at *LOC, is a part of an address that needs to be
6056 reloaded into a register of class RCLASS. If X is a constant, or if
6057 X is a PLUS that contains a constant, check that the constant is a
6058 legitimate operand and that we are supposed to be able to load
6059 it into the register.
6061 If not, force the constant into memory and reload the MEM instead.
6063 MODE is the mode to use, in case X is an integer constant.
6065 OPNUM and TYPE describe the purpose of any reloads made.
6067 IND_LEVELS says how many levels of indirect addressing this machine
6068 supports. */
6070 static void
6071 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6072 enum machine_mode mode, int opnum,
6073 enum reload_type type, int ind_levels)
6075 if (CONSTANT_P (x)
6076 && (!targetm.legitimate_constant_p (mode, x)
6077 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6079 x = force_const_mem (mode, x);
6080 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6081 opnum, type, ind_levels, 0);
6084 else if (GET_CODE (x) == PLUS
6085 && CONSTANT_P (XEXP (x, 1))
6086 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6087 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6088 == NO_REGS))
6090 rtx tem;
6092 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6093 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6094 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6095 opnum, type, ind_levels, 0);
6098 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6099 mode, VOIDmode, 0, 0, opnum, type);
6102 /* X, a subreg of a pseudo, is a part of an address that needs to be
6103 reloaded.
6105 If the pseudo is equivalent to a memory location that cannot be directly
6106 addressed, make the necessary address reloads.
6108 If address reloads have been necessary, or if the address is changed
6109 by register elimination, return the rtx of the memory location;
6110 otherwise, return X.
6112 If FORCE_REPLACE is nonzero, unconditionally replace the subreg with the
6113 memory location.
6115 OPNUM and TYPE identify the purpose of the reload.
6117 IND_LEVELS says how many levels of indirect addressing are
6118 supported at this point in the address.
6120 INSN, if nonzero, is the insn in which we do the reload. It is used
6121 to determine where to put USEs for pseudos that we have to replace with
6122 stack slots. */
6124 static rtx
6125 find_reloads_subreg_address (rtx x, int force_replace, int opnum,
6126 enum reload_type type, int ind_levels, rtx insn,
6127 int *address_reloaded)
6129 int regno = REGNO (SUBREG_REG (x));
6130 int reloaded = 0;
6132 if (reg_equiv_memory_loc (regno))
6134 /* If the address is not directly addressable, or if the address is not
6135 offsettable, then it must be replaced. */
6136 if (! force_replace
6137 && (reg_equiv_address (regno)
6138 || ! offsettable_memref_p (reg_equiv_mem (regno))))
6139 force_replace = 1;
6141 if (force_replace || num_not_at_initial_offset)
6143 rtx tem = make_memloc (SUBREG_REG (x), regno);
6145 /* If the address changes because of register elimination, then
6146 it must be replaced. */
6147 if (force_replace
6148 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
6150 unsigned outer_size = GET_MODE_SIZE (GET_MODE (x));
6151 unsigned inner_size = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
6152 int offset;
6153 rtx orig = tem;
6155 /* For big-endian paradoxical subregs, SUBREG_BYTE does not
6156 hold the correct (negative) byte offset. */
6157 if (BYTES_BIG_ENDIAN && outer_size > inner_size)
6158 offset = inner_size - outer_size;
6159 else
6160 offset = SUBREG_BYTE (x);
6162 XEXP (tem, 0) = plus_constant (GET_MODE (XEXP (tem, 0)),
6163 XEXP (tem, 0), offset);
6164 PUT_MODE (tem, GET_MODE (x));
6165 if (MEM_OFFSET_KNOWN_P (tem))
6166 set_mem_offset (tem, MEM_OFFSET (tem) + offset);
6167 if (MEM_SIZE_KNOWN_P (tem)
6168 && MEM_SIZE (tem) != (HOST_WIDE_INT) outer_size)
6169 set_mem_size (tem, outer_size);
6171 /* If this was a paradoxical subreg that we replaced, the
6172 resulting memory must be sufficiently aligned to allow
6173 us to widen the mode of the memory. */
6174 if (outer_size > inner_size)
6176 rtx base;
6178 base = XEXP (tem, 0);
6179 if (GET_CODE (base) == PLUS)
6181 if (CONST_INT_P (XEXP (base, 1))
6182 && INTVAL (XEXP (base, 1)) % outer_size != 0)
6183 return x;
6184 base = XEXP (base, 0);
6186 if (!REG_P (base)
6187 || (REGNO_POINTER_ALIGN (REGNO (base))
6188 < outer_size * BITS_PER_UNIT))
6189 return x;
6192 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6193 XEXP (tem, 0), &XEXP (tem, 0),
6194 opnum, type, ind_levels, insn);
6195 /* ??? Do we need to handle nonzero offsets somehow? */
6196 if (!offset && !rtx_equal_p (tem, orig))
6197 push_reg_equiv_alt_mem (regno, tem);
6199 /* For some processors an address may be valid in the
6200 original mode but not in a smaller mode. For
6201 example, ARM accepts a scaled index register in
6202 SImode but not in HImode. Note that this is only
6203 a problem if the address in reg_equiv_mem is already
6204 invalid in the new mode; other cases would be fixed
6205 by find_reloads_address as usual.
6207 ??? We attempt to handle such cases here by doing an
6208 additional reload of the full address after the
6209 usual processing by find_reloads_address. Note that
6210 this may not work in the general case, but it seems
6211 to cover the cases where this situation currently
6212 occurs. A more general fix might be to reload the
6213 *value* instead of the address, but this would not
6214 be expected by the callers of this routine as-is.
6216 If find_reloads_address already completed replaced
6217 the address, there is nothing further to do. */
6218 if (reloaded == 0
6219 && reg_equiv_mem (regno) != 0
6220 && !strict_memory_address_addr_space_p
6221 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6222 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6224 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6225 base_reg_class (GET_MODE (tem),
6226 MEM_ADDR_SPACE (tem),
6227 MEM, SCRATCH),
6228 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0,
6229 opnum, type);
6230 reloaded = 1;
6232 /* If this is not a toplevel operand, find_reloads doesn't see
6233 this substitution. We have to emit a USE of the pseudo so
6234 that delete_output_reload can see it. */
6235 if (replace_reloads && recog_data.operand[opnum] != x)
6236 /* We mark the USE with QImode so that we recognize it
6237 as one that can be safely deleted at the end of
6238 reload. */
6239 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode,
6240 SUBREG_REG (x)),
6241 insn), QImode);
6242 x = tem;
6246 if (address_reloaded)
6247 *address_reloaded = reloaded;
6249 return x;
6252 /* Substitute into the current INSN the registers into which we have reloaded
6253 the things that need reloading. The array `replacements'
6254 contains the locations of all pointers that must be changed
6255 and says what to replace them with.
6257 Return the rtx that X translates into; usually X, but modified. */
6259 void
6260 subst_reloads (rtx insn)
6262 int i;
6264 for (i = 0; i < n_replacements; i++)
6266 struct replacement *r = &replacements[i];
6267 rtx reloadreg = rld[r->what].reg_rtx;
6268 if (reloadreg)
6270 #ifdef DEBUG_RELOAD
6271 /* This checking takes a very long time on some platforms
6272 causing the gcc.c-torture/compile/limits-fnargs.c test
6273 to time out during testing. See PR 31850.
6275 Internal consistency test. Check that we don't modify
6276 anything in the equivalence arrays. Whenever something from
6277 those arrays needs to be reloaded, it must be unshared before
6278 being substituted into; the equivalence must not be modified.
6279 Otherwise, if the equivalence is used after that, it will
6280 have been modified, and the thing substituted (probably a
6281 register) is likely overwritten and not a usable equivalence. */
6282 int check_regno;
6284 for (check_regno = 0; check_regno < max_regno; check_regno++)
6286 #define CHECK_MODF(ARRAY) \
6287 gcc_assert (!VEC_index (reg_equivs_t, reg_equivs, check_regno).ARRAY \
6288 || !loc_mentioned_in_p (r->where, \
6289 VEC_index (reg_equivs_t, reg_equivs, check_regno).ARRAY))
6291 CHECK_MODF (equiv_constant);
6292 CHECK_MODF (equiv_memory_loc);
6293 CHECK_MODF (equiv_address);
6294 CHECK_MODF (equiv_mem);
6295 #undef CHECK_MODF
6297 #endif /* DEBUG_RELOAD */
6299 /* If we're replacing a LABEL_REF with a register, there must
6300 already be an indication (to e.g. flow) which label this
6301 register refers to. */
6302 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6303 || !JUMP_P (insn)
6304 || find_reg_note (insn,
6305 REG_LABEL_OPERAND,
6306 XEXP (*r->where, 0))
6307 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6309 /* Encapsulate RELOADREG so its machine mode matches what
6310 used to be there. Note that gen_lowpart_common will
6311 do the wrong thing if RELOADREG is multi-word. RELOADREG
6312 will always be a REG here. */
6313 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6314 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6316 *r->where = reloadreg;
6318 /* If reload got no reg and isn't optional, something's wrong. */
6319 else
6320 gcc_assert (rld[r->what].optional);
6324 /* Make a copy of any replacements being done into X and move those
6325 copies to locations in Y, a copy of X. */
6327 void
6328 copy_replacements (rtx x, rtx y)
6330 copy_replacements_1 (&x, &y, n_replacements);
6333 static void
6334 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6336 int i, j;
6337 rtx x, y;
6338 struct replacement *r;
6339 enum rtx_code code;
6340 const char *fmt;
6342 for (j = 0; j < orig_replacements; j++)
6343 if (replacements[j].where == px)
6345 r = &replacements[n_replacements++];
6346 r->where = py;
6347 r->what = replacements[j].what;
6348 r->mode = replacements[j].mode;
6351 x = *px;
6352 y = *py;
6353 code = GET_CODE (x);
6354 fmt = GET_RTX_FORMAT (code);
6356 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6358 if (fmt[i] == 'e')
6359 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6360 else if (fmt[i] == 'E')
6361 for (j = XVECLEN (x, i); --j >= 0; )
6362 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6363 orig_replacements);
6367 /* Change any replacements being done to *X to be done to *Y. */
6369 void
6370 move_replacements (rtx *x, rtx *y)
6372 int i;
6374 for (i = 0; i < n_replacements; i++)
6375 if (replacements[i].where == x)
6376 replacements[i].where = y;
6379 /* If LOC was scheduled to be replaced by something, return the replacement.
6380 Otherwise, return *LOC. */
6383 find_replacement (rtx *loc)
6385 struct replacement *r;
6387 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6389 rtx reloadreg = rld[r->what].reg_rtx;
6391 if (reloadreg && r->where == loc)
6393 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6394 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6396 return reloadreg;
6398 else if (reloadreg && GET_CODE (*loc) == SUBREG
6399 && r->where == &SUBREG_REG (*loc))
6401 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6402 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6404 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6405 GET_MODE (SUBREG_REG (*loc)),
6406 SUBREG_BYTE (*loc));
6410 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6411 what's inside and make a new rtl if so. */
6412 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6413 || GET_CODE (*loc) == MULT)
6415 rtx x = find_replacement (&XEXP (*loc, 0));
6416 rtx y = find_replacement (&XEXP (*loc, 1));
6418 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6419 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6422 return *loc;
6425 /* Return nonzero if register in range [REGNO, ENDREGNO)
6426 appears either explicitly or implicitly in X
6427 other than being stored into (except for earlyclobber operands).
6429 References contained within the substructure at LOC do not count.
6430 LOC may be zero, meaning don't ignore anything.
6432 This is similar to refers_to_regno_p in rtlanal.c except that we
6433 look at equivalences for pseudos that didn't get hard registers. */
6435 static int
6436 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6437 rtx x, rtx *loc)
6439 int i;
6440 unsigned int r;
6441 RTX_CODE code;
6442 const char *fmt;
6444 if (x == 0)
6445 return 0;
6447 repeat:
6448 code = GET_CODE (x);
6450 switch (code)
6452 case REG:
6453 r = REGNO (x);
6455 /* If this is a pseudo, a hard register must not have been allocated.
6456 X must therefore either be a constant or be in memory. */
6457 if (r >= FIRST_PSEUDO_REGISTER)
6459 if (reg_equiv_memory_loc (r))
6460 return refers_to_regno_for_reload_p (regno, endregno,
6461 reg_equiv_memory_loc (r),
6462 (rtx*) 0);
6464 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6465 return 0;
6468 return (endregno > r
6469 && regno < r + (r < FIRST_PSEUDO_REGISTER
6470 ? hard_regno_nregs[r][GET_MODE (x)]
6471 : 1));
6473 case SUBREG:
6474 /* If this is a SUBREG of a hard reg, we can see exactly which
6475 registers are being modified. Otherwise, handle normally. */
6476 if (REG_P (SUBREG_REG (x))
6477 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6479 unsigned int inner_regno = subreg_regno (x);
6480 unsigned int inner_endregno
6481 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6482 ? subreg_nregs (x) : 1);
6484 return endregno > inner_regno && regno < inner_endregno;
6486 break;
6488 case CLOBBER:
6489 case SET:
6490 if (&SET_DEST (x) != loc
6491 /* Note setting a SUBREG counts as referring to the REG it is in for
6492 a pseudo but not for hard registers since we can
6493 treat each word individually. */
6494 && ((GET_CODE (SET_DEST (x)) == SUBREG
6495 && loc != &SUBREG_REG (SET_DEST (x))
6496 && REG_P (SUBREG_REG (SET_DEST (x)))
6497 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6498 && refers_to_regno_for_reload_p (regno, endregno,
6499 SUBREG_REG (SET_DEST (x)),
6500 loc))
6501 /* If the output is an earlyclobber operand, this is
6502 a conflict. */
6503 || ((!REG_P (SET_DEST (x))
6504 || earlyclobber_operand_p (SET_DEST (x)))
6505 && refers_to_regno_for_reload_p (regno, endregno,
6506 SET_DEST (x), loc))))
6507 return 1;
6509 if (code == CLOBBER || loc == &SET_SRC (x))
6510 return 0;
6511 x = SET_SRC (x);
6512 goto repeat;
6514 default:
6515 break;
6518 /* X does not match, so try its subexpressions. */
6520 fmt = GET_RTX_FORMAT (code);
6521 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6523 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6525 if (i == 0)
6527 x = XEXP (x, 0);
6528 goto repeat;
6530 else
6531 if (refers_to_regno_for_reload_p (regno, endregno,
6532 XEXP (x, i), loc))
6533 return 1;
6535 else if (fmt[i] == 'E')
6537 int j;
6538 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6539 if (loc != &XVECEXP (x, i, j)
6540 && refers_to_regno_for_reload_p (regno, endregno,
6541 XVECEXP (x, i, j), loc))
6542 return 1;
6545 return 0;
6548 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6549 we check if any register number in X conflicts with the relevant register
6550 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6551 contains a MEM (we don't bother checking for memory addresses that can't
6552 conflict because we expect this to be a rare case.
6554 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6555 that we look at equivalences for pseudos that didn't get hard registers. */
6558 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6560 int regno, endregno;
6562 /* Overly conservative. */
6563 if (GET_CODE (x) == STRICT_LOW_PART
6564 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6565 x = XEXP (x, 0);
6567 /* If either argument is a constant, then modifying X can not affect IN. */
6568 if (CONSTANT_P (x) || CONSTANT_P (in))
6569 return 0;
6570 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6571 return refers_to_mem_for_reload_p (in);
6572 else if (GET_CODE (x) == SUBREG)
6574 regno = REGNO (SUBREG_REG (x));
6575 if (regno < FIRST_PSEUDO_REGISTER)
6576 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6577 GET_MODE (SUBREG_REG (x)),
6578 SUBREG_BYTE (x),
6579 GET_MODE (x));
6580 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6581 ? subreg_nregs (x) : 1);
6583 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6585 else if (REG_P (x))
6587 regno = REGNO (x);
6589 /* If this is a pseudo, it must not have been assigned a hard register.
6590 Therefore, it must either be in memory or be a constant. */
6592 if (regno >= FIRST_PSEUDO_REGISTER)
6594 if (reg_equiv_memory_loc (regno))
6595 return refers_to_mem_for_reload_p (in);
6596 gcc_assert (reg_equiv_constant (regno));
6597 return 0;
6600 endregno = END_HARD_REGNO (x);
6602 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6604 else if (MEM_P (x))
6605 return refers_to_mem_for_reload_p (in);
6606 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6607 || GET_CODE (x) == CC0)
6608 return reg_mentioned_p (x, in);
6609 else
6611 gcc_assert (GET_CODE (x) == PLUS);
6613 /* We actually want to know if X is mentioned somewhere inside IN.
6614 We must not say that (plus (sp) (const_int 124)) is in
6615 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6616 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6617 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6618 while (MEM_P (in))
6619 in = XEXP (in, 0);
6620 if (REG_P (in))
6621 return 0;
6622 else if (GET_CODE (in) == PLUS)
6623 return (rtx_equal_p (x, in)
6624 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6625 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6626 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6627 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6630 gcc_unreachable ();
6633 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6634 registers. */
6636 static int
6637 refers_to_mem_for_reload_p (rtx x)
6639 const char *fmt;
6640 int i;
6642 if (MEM_P (x))
6643 return 1;
6645 if (REG_P (x))
6646 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6647 && reg_equiv_memory_loc (REGNO (x)));
6649 fmt = GET_RTX_FORMAT (GET_CODE (x));
6650 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6651 if (fmt[i] == 'e'
6652 && (MEM_P (XEXP (x, i))
6653 || refers_to_mem_for_reload_p (XEXP (x, i))))
6654 return 1;
6656 return 0;
6659 /* Check the insns before INSN to see if there is a suitable register
6660 containing the same value as GOAL.
6661 If OTHER is -1, look for a register in class RCLASS.
6662 Otherwise, just see if register number OTHER shares GOAL's value.
6664 Return an rtx for the register found, or zero if none is found.
6666 If RELOAD_REG_P is (short *)1,
6667 we reject any hard reg that appears in reload_reg_rtx
6668 because such a hard reg is also needed coming into this insn.
6670 If RELOAD_REG_P is any other nonzero value,
6671 it is a vector indexed by hard reg number
6672 and we reject any hard reg whose element in the vector is nonnegative
6673 as well as any that appears in reload_reg_rtx.
6675 If GOAL is zero, then GOALREG is a register number; we look
6676 for an equivalent for that register.
6678 MODE is the machine mode of the value we want an equivalence for.
6679 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6681 This function is used by jump.c as well as in the reload pass.
6683 If GOAL is the sum of the stack pointer and a constant, we treat it
6684 as if it were a constant except that sp is required to be unchanging. */
6687 find_equiv_reg (rtx goal, rtx insn, enum reg_class rclass, int other,
6688 short *reload_reg_p, int goalreg, enum machine_mode mode)
6690 rtx p = insn;
6691 rtx goaltry, valtry, value, where;
6692 rtx pat;
6693 int regno = -1;
6694 int valueno;
6695 int goal_mem = 0;
6696 int goal_const = 0;
6697 int goal_mem_addr_varies = 0;
6698 int need_stable_sp = 0;
6699 int nregs;
6700 int valuenregs;
6701 int num = 0;
6703 if (goal == 0)
6704 regno = goalreg;
6705 else if (REG_P (goal))
6706 regno = REGNO (goal);
6707 else if (MEM_P (goal))
6709 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6710 if (MEM_VOLATILE_P (goal))
6711 return 0;
6712 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6713 return 0;
6714 /* An address with side effects must be reexecuted. */
6715 switch (code)
6717 case POST_INC:
6718 case PRE_INC:
6719 case POST_DEC:
6720 case PRE_DEC:
6721 case POST_MODIFY:
6722 case PRE_MODIFY:
6723 return 0;
6724 default:
6725 break;
6727 goal_mem = 1;
6729 else if (CONSTANT_P (goal))
6730 goal_const = 1;
6731 else if (GET_CODE (goal) == PLUS
6732 && XEXP (goal, 0) == stack_pointer_rtx
6733 && CONSTANT_P (XEXP (goal, 1)))
6734 goal_const = need_stable_sp = 1;
6735 else if (GET_CODE (goal) == PLUS
6736 && XEXP (goal, 0) == frame_pointer_rtx
6737 && CONSTANT_P (XEXP (goal, 1)))
6738 goal_const = 1;
6739 else
6740 return 0;
6742 num = 0;
6743 /* Scan insns back from INSN, looking for one that copies
6744 a value into or out of GOAL.
6745 Stop and give up if we reach a label. */
6747 while (1)
6749 p = PREV_INSN (p);
6750 if (p && DEBUG_INSN_P (p))
6751 continue;
6752 num++;
6753 if (p == 0 || LABEL_P (p)
6754 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6755 return 0;
6757 /* Don't reuse register contents from before a setjmp-type
6758 function call; on the second return (from the longjmp) it
6759 might have been clobbered by a later reuse. It doesn't
6760 seem worthwhile to actually go and see if it is actually
6761 reused even if that information would be readily available;
6762 just don't reuse it across the setjmp call. */
6763 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6764 return 0;
6766 if (NONJUMP_INSN_P (p)
6767 /* If we don't want spill regs ... */
6768 && (! (reload_reg_p != 0
6769 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6770 /* ... then ignore insns introduced by reload; they aren't
6771 useful and can cause results in reload_as_needed to be
6772 different from what they were when calculating the need for
6773 spills. If we notice an input-reload insn here, we will
6774 reject it below, but it might hide a usable equivalent.
6775 That makes bad code. It may even fail: perhaps no reg was
6776 spilled for this insn because it was assumed we would find
6777 that equivalent. */
6778 || INSN_UID (p) < reload_first_uid))
6780 rtx tem;
6781 pat = single_set (p);
6783 /* First check for something that sets some reg equal to GOAL. */
6784 if (pat != 0
6785 && ((regno >= 0
6786 && true_regnum (SET_SRC (pat)) == regno
6787 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6789 (regno >= 0
6790 && true_regnum (SET_DEST (pat)) == regno
6791 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6793 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6794 /* When looking for stack pointer + const,
6795 make sure we don't use a stack adjust. */
6796 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6797 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6798 || (goal_mem
6799 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6800 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6801 || (goal_mem
6802 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6803 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6804 /* If we are looking for a constant,
6805 and something equivalent to that constant was copied
6806 into a reg, we can use that reg. */
6807 || (goal_const && REG_NOTES (p) != 0
6808 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6809 && ((rtx_equal_p (XEXP (tem, 0), goal)
6810 && (valueno
6811 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6812 || (REG_P (SET_DEST (pat))
6813 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6814 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6815 && CONST_INT_P (goal)
6816 && 0 != (goaltry
6817 = operand_subword (XEXP (tem, 0), 0, 0,
6818 VOIDmode))
6819 && rtx_equal_p (goal, goaltry)
6820 && (valtry
6821 = operand_subword (SET_DEST (pat), 0, 0,
6822 VOIDmode))
6823 && (valueno = true_regnum (valtry)) >= 0)))
6824 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6825 NULL_RTX))
6826 && REG_P (SET_DEST (pat))
6827 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6828 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6829 && CONST_INT_P (goal)
6830 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6831 VOIDmode))
6832 && rtx_equal_p (goal, goaltry)
6833 && (valtry
6834 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6835 && (valueno = true_regnum (valtry)) >= 0)))
6837 if (other >= 0)
6839 if (valueno != other)
6840 continue;
6842 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6843 continue;
6844 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6845 mode, valueno))
6846 continue;
6847 value = valtry;
6848 where = p;
6849 break;
6854 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6855 (or copying VALUE into GOAL, if GOAL is also a register).
6856 Now verify that VALUE is really valid. */
6858 /* VALUENO is the register number of VALUE; a hard register. */
6860 /* Don't try to re-use something that is killed in this insn. We want
6861 to be able to trust REG_UNUSED notes. */
6862 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6863 return 0;
6865 /* If we propose to get the value from the stack pointer or if GOAL is
6866 a MEM based on the stack pointer, we need a stable SP. */
6867 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6868 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6869 goal)))
6870 need_stable_sp = 1;
6872 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6873 if (GET_MODE (value) != mode)
6874 return 0;
6876 /* Reject VALUE if it was loaded from GOAL
6877 and is also a register that appears in the address of GOAL. */
6879 if (goal_mem && value == SET_DEST (single_set (where))
6880 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6881 goal, (rtx*) 0))
6882 return 0;
6884 /* Reject registers that overlap GOAL. */
6886 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6887 nregs = hard_regno_nregs[regno][mode];
6888 else
6889 nregs = 1;
6890 valuenregs = hard_regno_nregs[valueno][mode];
6892 if (!goal_mem && !goal_const
6893 && regno + nregs > valueno && regno < valueno + valuenregs)
6894 return 0;
6896 /* Reject VALUE if it is one of the regs reserved for reloads.
6897 Reload1 knows how to reuse them anyway, and it would get
6898 confused if we allocated one without its knowledge.
6899 (Now that insns introduced by reload are ignored above,
6900 this case shouldn't happen, but I'm not positive.) */
6902 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6904 int i;
6905 for (i = 0; i < valuenregs; ++i)
6906 if (reload_reg_p[valueno + i] >= 0)
6907 return 0;
6910 /* Reject VALUE if it is a register being used for an input reload
6911 even if it is not one of those reserved. */
6913 if (reload_reg_p != 0)
6915 int i;
6916 for (i = 0; i < n_reloads; i++)
6917 if (rld[i].reg_rtx != 0 && rld[i].in)
6919 int regno1 = REGNO (rld[i].reg_rtx);
6920 int nregs1 = hard_regno_nregs[regno1]
6921 [GET_MODE (rld[i].reg_rtx)];
6922 if (regno1 < valueno + valuenregs
6923 && regno1 + nregs1 > valueno)
6924 return 0;
6928 if (goal_mem)
6929 /* We must treat frame pointer as varying here,
6930 since it can vary--in a nonlocal goto as generated by expand_goto. */
6931 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6933 /* Now verify that the values of GOAL and VALUE remain unaltered
6934 until INSN is reached. */
6936 p = insn;
6937 while (1)
6939 p = PREV_INSN (p);
6940 if (p == where)
6941 return value;
6943 /* Don't trust the conversion past a function call
6944 if either of the two is in a call-clobbered register, or memory. */
6945 if (CALL_P (p))
6947 int i;
6949 if (goal_mem || need_stable_sp)
6950 return 0;
6952 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6953 for (i = 0; i < nregs; ++i)
6954 if (call_used_regs[regno + i]
6955 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6956 return 0;
6958 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6959 for (i = 0; i < valuenregs; ++i)
6960 if (call_used_regs[valueno + i]
6961 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6962 return 0;
6965 if (INSN_P (p))
6967 pat = PATTERN (p);
6969 /* Watch out for unspec_volatile, and volatile asms. */
6970 if (volatile_insn_p (pat))
6971 return 0;
6973 /* If this insn P stores in either GOAL or VALUE, return 0.
6974 If GOAL is a memory ref and this insn writes memory, return 0.
6975 If GOAL is a memory ref and its address is not constant,
6976 and this insn P changes a register used in GOAL, return 0. */
6978 if (GET_CODE (pat) == COND_EXEC)
6979 pat = COND_EXEC_CODE (pat);
6980 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6982 rtx dest = SET_DEST (pat);
6983 while (GET_CODE (dest) == SUBREG
6984 || GET_CODE (dest) == ZERO_EXTRACT
6985 || GET_CODE (dest) == STRICT_LOW_PART)
6986 dest = XEXP (dest, 0);
6987 if (REG_P (dest))
6989 int xregno = REGNO (dest);
6990 int xnregs;
6991 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6992 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6993 else
6994 xnregs = 1;
6995 if (xregno < regno + nregs && xregno + xnregs > regno)
6996 return 0;
6997 if (xregno < valueno + valuenregs
6998 && xregno + xnregs > valueno)
6999 return 0;
7000 if (goal_mem_addr_varies
7001 && reg_overlap_mentioned_for_reload_p (dest, goal))
7002 return 0;
7003 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7004 return 0;
7006 else if (goal_mem && MEM_P (dest)
7007 && ! push_operand (dest, GET_MODE (dest)))
7008 return 0;
7009 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7010 && reg_equiv_memory_loc (regno) != 0)
7011 return 0;
7012 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
7013 return 0;
7015 else if (GET_CODE (pat) == PARALLEL)
7017 int i;
7018 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7020 rtx v1 = XVECEXP (pat, 0, i);
7021 if (GET_CODE (v1) == COND_EXEC)
7022 v1 = COND_EXEC_CODE (v1);
7023 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7025 rtx dest = SET_DEST (v1);
7026 while (GET_CODE (dest) == SUBREG
7027 || GET_CODE (dest) == ZERO_EXTRACT
7028 || GET_CODE (dest) == STRICT_LOW_PART)
7029 dest = XEXP (dest, 0);
7030 if (REG_P (dest))
7032 int xregno = REGNO (dest);
7033 int xnregs;
7034 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7035 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7036 else
7037 xnregs = 1;
7038 if (xregno < regno + nregs
7039 && xregno + xnregs > regno)
7040 return 0;
7041 if (xregno < valueno + valuenregs
7042 && xregno + xnregs > valueno)
7043 return 0;
7044 if (goal_mem_addr_varies
7045 && reg_overlap_mentioned_for_reload_p (dest,
7046 goal))
7047 return 0;
7048 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7049 return 0;
7051 else if (goal_mem && MEM_P (dest)
7052 && ! push_operand (dest, GET_MODE (dest)))
7053 return 0;
7054 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7055 && reg_equiv_memory_loc (regno) != 0)
7056 return 0;
7057 else if (need_stable_sp
7058 && push_operand (dest, GET_MODE (dest)))
7059 return 0;
7064 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7066 rtx link;
7068 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7069 link = XEXP (link, 1))
7071 pat = XEXP (link, 0);
7072 if (GET_CODE (pat) == CLOBBER)
7074 rtx dest = SET_DEST (pat);
7076 if (REG_P (dest))
7078 int xregno = REGNO (dest);
7079 int xnregs
7080 = hard_regno_nregs[xregno][GET_MODE (dest)];
7082 if (xregno < regno + nregs
7083 && xregno + xnregs > regno)
7084 return 0;
7085 else if (xregno < valueno + valuenregs
7086 && xregno + xnregs > valueno)
7087 return 0;
7088 else if (goal_mem_addr_varies
7089 && reg_overlap_mentioned_for_reload_p (dest,
7090 goal))
7091 return 0;
7094 else if (goal_mem && MEM_P (dest)
7095 && ! push_operand (dest, GET_MODE (dest)))
7096 return 0;
7097 else if (need_stable_sp
7098 && push_operand (dest, GET_MODE (dest)))
7099 return 0;
7104 #ifdef AUTO_INC_DEC
7105 /* If this insn auto-increments or auto-decrements
7106 either regno or valueno, return 0 now.
7107 If GOAL is a memory ref and its address is not constant,
7108 and this insn P increments a register used in GOAL, return 0. */
7110 rtx link;
7112 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7113 if (REG_NOTE_KIND (link) == REG_INC
7114 && REG_P (XEXP (link, 0)))
7116 int incno = REGNO (XEXP (link, 0));
7117 if (incno < regno + nregs && incno >= regno)
7118 return 0;
7119 if (incno < valueno + valuenregs && incno >= valueno)
7120 return 0;
7121 if (goal_mem_addr_varies
7122 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7123 goal))
7124 return 0;
7127 #endif
7132 /* Find a place where INCED appears in an increment or decrement operator
7133 within X, and return the amount INCED is incremented or decremented by.
7134 The value is always positive. */
7136 static int
7137 find_inc_amount (rtx x, rtx inced)
7139 enum rtx_code code = GET_CODE (x);
7140 const char *fmt;
7141 int i;
7143 if (code == MEM)
7145 rtx addr = XEXP (x, 0);
7146 if ((GET_CODE (addr) == PRE_DEC
7147 || GET_CODE (addr) == POST_DEC
7148 || GET_CODE (addr) == PRE_INC
7149 || GET_CODE (addr) == POST_INC)
7150 && XEXP (addr, 0) == inced)
7151 return GET_MODE_SIZE (GET_MODE (x));
7152 else if ((GET_CODE (addr) == PRE_MODIFY
7153 || GET_CODE (addr) == POST_MODIFY)
7154 && GET_CODE (XEXP (addr, 1)) == PLUS
7155 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7156 && XEXP (addr, 0) == inced
7157 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7159 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7160 return i < 0 ? -i : i;
7164 fmt = GET_RTX_FORMAT (code);
7165 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7167 if (fmt[i] == 'e')
7169 int tem = find_inc_amount (XEXP (x, i), inced);
7170 if (tem != 0)
7171 return tem;
7173 if (fmt[i] == 'E')
7175 int j;
7176 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7178 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7179 if (tem != 0)
7180 return tem;
7185 return 0;
7188 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7189 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7191 #ifdef AUTO_INC_DEC
7192 static int
7193 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7194 rtx insn)
7196 rtx link;
7198 gcc_assert (insn);
7200 if (! INSN_P (insn))
7201 return 0;
7203 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7204 if (REG_NOTE_KIND (link) == REG_INC)
7206 unsigned int test = (int) REGNO (XEXP (link, 0));
7207 if (test >= regno && test < endregno)
7208 return 1;
7210 return 0;
7212 #else
7214 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7216 #endif
7218 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7219 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7220 REG_INC. REGNO must refer to a hard register. */
7223 regno_clobbered_p (unsigned int regno, rtx insn, enum machine_mode mode,
7224 int sets)
7226 unsigned int nregs, endregno;
7228 /* regno must be a hard register. */
7229 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7231 nregs = hard_regno_nregs[regno][mode];
7232 endregno = regno + nregs;
7234 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7235 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7236 && REG_P (XEXP (PATTERN (insn), 0)))
7238 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7240 return test >= regno && test < endregno;
7243 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7244 return 1;
7246 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7248 int i = XVECLEN (PATTERN (insn), 0) - 1;
7250 for (; i >= 0; i--)
7252 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7253 if ((GET_CODE (elt) == CLOBBER
7254 || (sets == 1 && GET_CODE (elt) == SET))
7255 && REG_P (XEXP (elt, 0)))
7257 unsigned int test = REGNO (XEXP (elt, 0));
7259 if (test >= regno && test < endregno)
7260 return 1;
7262 if (sets == 2
7263 && reg_inc_found_and_valid_p (regno, endregno, elt))
7264 return 1;
7268 return 0;
7271 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7273 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
7275 int regno;
7277 if (GET_MODE (reloadreg) == mode)
7278 return reloadreg;
7280 regno = REGNO (reloadreg);
7282 if (REG_WORDS_BIG_ENDIAN)
7283 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7284 - (int) hard_regno_nregs[regno][mode];
7286 return gen_rtx_REG (mode, regno);
7289 static const char *const reload_when_needed_name[] =
7291 "RELOAD_FOR_INPUT",
7292 "RELOAD_FOR_OUTPUT",
7293 "RELOAD_FOR_INSN",
7294 "RELOAD_FOR_INPUT_ADDRESS",
7295 "RELOAD_FOR_INPADDR_ADDRESS",
7296 "RELOAD_FOR_OUTPUT_ADDRESS",
7297 "RELOAD_FOR_OUTADDR_ADDRESS",
7298 "RELOAD_FOR_OPERAND_ADDRESS",
7299 "RELOAD_FOR_OPADDR_ADDR",
7300 "RELOAD_OTHER",
7301 "RELOAD_FOR_OTHER_ADDRESS"
7304 /* These functions are used to print the variables set by 'find_reloads' */
7306 DEBUG_FUNCTION void
7307 debug_reload_to_stream (FILE *f)
7309 int r;
7310 const char *prefix;
7312 if (! f)
7313 f = stderr;
7314 for (r = 0; r < n_reloads; r++)
7316 fprintf (f, "Reload %d: ", r);
7318 if (rld[r].in != 0)
7320 fprintf (f, "reload_in (%s) = ",
7321 GET_MODE_NAME (rld[r].inmode));
7322 print_inline_rtx (f, rld[r].in, 24);
7323 fprintf (f, "\n\t");
7326 if (rld[r].out != 0)
7328 fprintf (f, "reload_out (%s) = ",
7329 GET_MODE_NAME (rld[r].outmode));
7330 print_inline_rtx (f, rld[r].out, 24);
7331 fprintf (f, "\n\t");
7334 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7336 fprintf (f, "%s (opnum = %d)",
7337 reload_when_needed_name[(int) rld[r].when_needed],
7338 rld[r].opnum);
7340 if (rld[r].optional)
7341 fprintf (f, ", optional");
7343 if (rld[r].nongroup)
7344 fprintf (f, ", nongroup");
7346 if (rld[r].inc != 0)
7347 fprintf (f, ", inc by %d", rld[r].inc);
7349 if (rld[r].nocombine)
7350 fprintf (f, ", can't combine");
7352 if (rld[r].secondary_p)
7353 fprintf (f, ", secondary_reload_p");
7355 if (rld[r].in_reg != 0)
7357 fprintf (f, "\n\treload_in_reg: ");
7358 print_inline_rtx (f, rld[r].in_reg, 24);
7361 if (rld[r].out_reg != 0)
7363 fprintf (f, "\n\treload_out_reg: ");
7364 print_inline_rtx (f, rld[r].out_reg, 24);
7367 if (rld[r].reg_rtx != 0)
7369 fprintf (f, "\n\treload_reg_rtx: ");
7370 print_inline_rtx (f, rld[r].reg_rtx, 24);
7373 prefix = "\n\t";
7374 if (rld[r].secondary_in_reload != -1)
7376 fprintf (f, "%ssecondary_in_reload = %d",
7377 prefix, rld[r].secondary_in_reload);
7378 prefix = ", ";
7381 if (rld[r].secondary_out_reload != -1)
7382 fprintf (f, "%ssecondary_out_reload = %d\n",
7383 prefix, rld[r].secondary_out_reload);
7385 prefix = "\n\t";
7386 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7388 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7389 insn_data[rld[r].secondary_in_icode].name);
7390 prefix = ", ";
7393 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7394 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7395 insn_data[rld[r].secondary_out_icode].name);
7397 fprintf (f, "\n");
7401 DEBUG_FUNCTION void
7402 debug_reload (void)
7404 debug_reload_to_stream (stderr);