* configure: Regenerated.
[official-gcc.git] / gcc / reload.c
blob2e41ed6498e6a394dc01c6c4ed3d1f6df6da6ec6
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains subroutines used only from the file reload1.c.
23 It knows how to scan one insn for operands and values
24 that need to be copied into registers to make valid code.
25 It also finds other operands and values which are valid
26 but for which equivalent values in registers exist and
27 ought to be used instead.
29 Before processing the first insn of the function, call `init_reload'.
30 init_reload actually has to be called earlier anyway.
32 To scan an insn, call `find_reloads'. This does two things:
33 1. sets up tables describing which values must be reloaded
34 for this insn, and what kind of hard regs they must be reloaded into;
35 2. optionally record the locations where those values appear in
36 the data, so they can be replaced properly later.
37 This is done only if the second arg to `find_reloads' is nonzero.
39 The third arg to `find_reloads' specifies the number of levels
40 of indirect addressing supported by the machine. If it is zero,
41 indirect addressing is not valid. If it is one, (MEM (REG n))
42 is valid even if (REG n) did not get a hard register; if it is two,
43 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
44 hard register, and similarly for higher values.
46 Then you must choose the hard regs to reload those pseudo regs into,
47 and generate appropriate load insns before this insn and perhaps
48 also store insns after this insn. Set up the array `reload_reg_rtx'
49 to contain the REG rtx's for the registers you used. In some
50 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
51 for certain reloads. Then that tells you which register to use,
52 so you do not need to allocate one. But you still do need to add extra
53 instructions to copy the value into and out of that register.
55 Finally you must call `subst_reloads' to substitute the reload reg rtx's
56 into the locations already recorded.
58 NOTE SIDE EFFECTS:
60 find_reloads can alter the operands of the instruction it is called on.
62 1. Two operands of any sort may be interchanged, if they are in a
63 commutative instruction.
64 This happens only if find_reloads thinks the instruction will compile
65 better that way.
67 2. Pseudo-registers that are equivalent to constants are replaced
68 with those constants if they are not in hard registers.
70 1 happens every time find_reloads is called.
71 2 happens only when REPLACE is 1, which is only when
72 actually doing the reloads, not when just counting them.
74 Using a reload register for several reloads in one insn:
76 When an insn has reloads, it is considered as having three parts:
77 the input reloads, the insn itself after reloading, and the output reloads.
78 Reloads of values used in memory addresses are often needed for only one part.
80 When this is so, reload_when_needed records which part needs the reload.
81 Two reloads for different parts of the insn can share the same reload
82 register.
84 When a reload is used for addresses in multiple parts, or when it is
85 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
86 a register with any other reload. */
88 #define REG_OK_STRICT
90 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
91 #undef DEBUG_RELOAD
93 #include "config.h"
94 #include "system.h"
95 #include "coretypes.h"
96 #include "tm.h"
97 #include "rtl-error.h"
98 #include "tm_p.h"
99 #include "insn-config.h"
100 #include "expr.h"
101 #include "optabs.h"
102 #include "recog.h"
103 #include "df.h"
104 #include "reload.h"
105 #include "regs.h"
106 #include "addresses.h"
107 #include "hard-reg-set.h"
108 #include "flags.h"
109 #include "function.h"
110 #include "params.h"
111 #include "target.h"
112 #include "ira.h"
114 /* True if X is a constant that can be forced into the constant pool.
115 MODE is the mode of the operand, or VOIDmode if not known. */
116 #define CONST_POOL_OK_P(MODE, X) \
117 ((MODE) != VOIDmode \
118 && CONSTANT_P (X) \
119 && GET_CODE (X) != HIGH \
120 && !targetm.cannot_force_const_mem (MODE, X))
122 /* True if C is a non-empty register class that has too few registers
123 to be safely used as a reload target class. */
125 static inline bool
126 small_register_class_p (reg_class_t rclass)
128 return (reg_class_size [(int) rclass] == 1
129 || (reg_class_size [(int) rclass] >= 1
130 && targetm.class_likely_spilled_p (rclass)));
134 /* All reloads of the current insn are recorded here. See reload.h for
135 comments. */
136 int n_reloads;
137 struct reload rld[MAX_RELOADS];
139 /* All the "earlyclobber" operands of the current insn
140 are recorded here. */
141 int n_earlyclobbers;
142 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
144 int reload_n_operands;
146 /* Replacing reloads.
148 If `replace_reloads' is nonzero, then as each reload is recorded
149 an entry is made for it in the table `replacements'.
150 Then later `subst_reloads' can look through that table and
151 perform all the replacements needed. */
153 /* Nonzero means record the places to replace. */
154 static int replace_reloads;
156 /* Each replacement is recorded with a structure like this. */
157 struct replacement
159 rtx *where; /* Location to store in */
160 int what; /* which reload this is for */
161 enum machine_mode mode; /* mode it must have */
164 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
166 /* Number of replacements currently recorded. */
167 static int n_replacements;
169 /* Used to track what is modified by an operand. */
170 struct decomposition
172 int reg_flag; /* Nonzero if referencing a register. */
173 int safe; /* Nonzero if this can't conflict with anything. */
174 rtx base; /* Base address for MEM. */
175 HOST_WIDE_INT start; /* Starting offset or register number. */
176 HOST_WIDE_INT end; /* Ending offset or register number. */
179 #ifdef SECONDARY_MEMORY_NEEDED
181 /* Save MEMs needed to copy from one class of registers to another. One MEM
182 is used per mode, but normally only one or two modes are ever used.
184 We keep two versions, before and after register elimination. The one
185 after register elimination is record separately for each operand. This
186 is done in case the address is not valid to be sure that we separately
187 reload each. */
189 static rtx secondary_memlocs[NUM_MACHINE_MODES];
190 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
191 static int secondary_memlocs_elim_used = 0;
192 #endif
194 /* The instruction we are doing reloads for;
195 so we can test whether a register dies in it. */
196 static rtx this_insn;
198 /* Nonzero if this instruction is a user-specified asm with operands. */
199 static int this_insn_is_asm;
201 /* If hard_regs_live_known is nonzero,
202 we can tell which hard regs are currently live,
203 at least enough to succeed in choosing dummy reloads. */
204 static int hard_regs_live_known;
206 /* Indexed by hard reg number,
207 element is nonnegative if hard reg has been spilled.
208 This vector is passed to `find_reloads' as an argument
209 and is not changed here. */
210 static short *static_reload_reg_p;
212 /* Set to 1 in subst_reg_equivs if it changes anything. */
213 static int subst_reg_equivs_changed;
215 /* On return from push_reload, holds the reload-number for the OUT
216 operand, which can be different for that from the input operand. */
217 static int output_reloadnum;
219 /* Compare two RTX's. */
220 #define MATCHES(x, y) \
221 (x == y || (x != 0 && (REG_P (x) \
222 ? REG_P (y) && REGNO (x) == REGNO (y) \
223 : rtx_equal_p (x, y) && ! side_effects_p (x))))
225 /* Indicates if two reloads purposes are for similar enough things that we
226 can merge their reloads. */
227 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
228 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
229 || ((when1) == (when2) && (op1) == (op2)) \
230 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
231 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
232 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
233 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
234 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
236 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
237 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
238 ((when1) != (when2) \
239 || ! ((op1) == (op2) \
240 || (when1) == RELOAD_FOR_INPUT \
241 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
242 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
244 /* If we are going to reload an address, compute the reload type to
245 use. */
246 #define ADDR_TYPE(type) \
247 ((type) == RELOAD_FOR_INPUT_ADDRESS \
248 ? RELOAD_FOR_INPADDR_ADDRESS \
249 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
250 ? RELOAD_FOR_OUTADDR_ADDRESS \
251 : (type)))
253 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
254 enum machine_mode, enum reload_type,
255 enum insn_code *, secondary_reload_info *);
256 static enum reg_class find_valid_class (enum machine_mode, enum machine_mode,
257 int, unsigned int);
258 static void push_replacement (rtx *, int, enum machine_mode);
259 static void dup_replacements (rtx *, rtx *);
260 static void combine_reloads (void);
261 static int find_reusable_reload (rtx *, rtx, enum reg_class,
262 enum reload_type, int, int);
263 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
264 enum machine_mode, reg_class_t, int, int);
265 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
266 static struct decomposition decompose (rtx);
267 static int immune_p (rtx, rtx, struct decomposition);
268 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
269 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int, rtx,
270 int *);
271 static rtx make_memloc (rtx, int);
272 static int maybe_memory_address_addr_space_p (enum machine_mode, rtx,
273 addr_space_t, rtx *);
274 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
275 int, enum reload_type, int, rtx);
276 static rtx subst_reg_equivs (rtx, rtx);
277 static rtx subst_indexed_address (rtx);
278 static void update_auto_inc_notes (rtx, int, int);
279 static int find_reloads_address_1 (enum machine_mode, addr_space_t, rtx, int,
280 enum rtx_code, enum rtx_code, rtx *,
281 int, enum reload_type,int, rtx);
282 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
283 enum machine_mode, int,
284 enum reload_type, int);
285 static rtx find_reloads_subreg_address (rtx, int, int, enum reload_type,
286 int, rtx, int *);
287 static void copy_replacements_1 (rtx *, rtx *, int);
288 static int find_inc_amount (rtx, rtx);
289 static int refers_to_mem_for_reload_p (rtx);
290 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
291 rtx, rtx *);
293 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
294 list yet. */
296 static void
297 push_reg_equiv_alt_mem (int regno, rtx mem)
299 rtx it;
301 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
302 if (rtx_equal_p (XEXP (it, 0), mem))
303 return;
305 reg_equiv_alt_mem_list (regno)
306 = alloc_EXPR_LIST (REG_EQUIV, mem,
307 reg_equiv_alt_mem_list (regno));
310 /* Determine if any secondary reloads are needed for loading (if IN_P is
311 nonzero) or storing (if IN_P is zero) X to or from a reload register of
312 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
313 are needed, push them.
315 Return the reload number of the secondary reload we made, or -1 if
316 we didn't need one. *PICODE is set to the insn_code to use if we do
317 need a secondary reload. */
319 static int
320 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
321 enum reg_class reload_class,
322 enum machine_mode reload_mode, enum reload_type type,
323 enum insn_code *picode, secondary_reload_info *prev_sri)
325 enum reg_class rclass = NO_REGS;
326 enum reg_class scratch_class;
327 enum machine_mode mode = reload_mode;
328 enum insn_code icode = CODE_FOR_nothing;
329 enum insn_code t_icode = CODE_FOR_nothing;
330 enum reload_type secondary_type;
331 int s_reload, t_reload = -1;
332 const char *scratch_constraint;
333 char letter;
334 secondary_reload_info sri;
336 if (type == RELOAD_FOR_INPUT_ADDRESS
337 || type == RELOAD_FOR_OUTPUT_ADDRESS
338 || type == RELOAD_FOR_INPADDR_ADDRESS
339 || type == RELOAD_FOR_OUTADDR_ADDRESS)
340 secondary_type = type;
341 else
342 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
344 *picode = CODE_FOR_nothing;
346 /* If X is a paradoxical SUBREG, use the inner value to determine both the
347 mode and object being reloaded. */
348 if (paradoxical_subreg_p (x))
350 x = SUBREG_REG (x);
351 reload_mode = GET_MODE (x);
354 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
355 is still a pseudo-register by now, it *must* have an equivalent MEM
356 but we don't want to assume that), use that equivalent when seeing if
357 a secondary reload is needed since whether or not a reload is needed
358 might be sensitive to the form of the MEM. */
360 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
361 && reg_equiv_mem (REGNO (x)))
362 x = reg_equiv_mem (REGNO (x));
364 sri.icode = CODE_FOR_nothing;
365 sri.prev_sri = prev_sri;
366 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
367 reload_mode, &sri);
368 icode = (enum insn_code) sri.icode;
370 /* If we don't need any secondary registers, done. */
371 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
372 return -1;
374 if (rclass != NO_REGS)
375 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
376 reload_mode, type, &t_icode, &sri);
378 /* If we will be using an insn, the secondary reload is for a
379 scratch register. */
381 if (icode != CODE_FOR_nothing)
383 /* If IN_P is nonzero, the reload register will be the output in
384 operand 0. If IN_P is zero, the reload register will be the input
385 in operand 1. Outputs should have an initial "=", which we must
386 skip. */
388 /* ??? It would be useful to be able to handle only two, or more than
389 three, operands, but for now we can only handle the case of having
390 exactly three: output, input and one temp/scratch. */
391 gcc_assert (insn_data[(int) icode].n_operands == 3);
393 /* ??? We currently have no way to represent a reload that needs
394 an icode to reload from an intermediate tertiary reload register.
395 We should probably have a new field in struct reload to tag a
396 chain of scratch operand reloads onto. */
397 gcc_assert (rclass == NO_REGS);
399 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
400 gcc_assert (*scratch_constraint == '=');
401 scratch_constraint++;
402 if (*scratch_constraint == '&')
403 scratch_constraint++;
404 letter = *scratch_constraint;
405 scratch_class = (letter == 'r' ? GENERAL_REGS
406 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) letter,
407 scratch_constraint));
409 rclass = scratch_class;
410 mode = insn_data[(int) icode].operand[2].mode;
413 /* This case isn't valid, so fail. Reload is allowed to use the same
414 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
415 in the case of a secondary register, we actually need two different
416 registers for correct code. We fail here to prevent the possibility of
417 silently generating incorrect code later.
419 The convention is that secondary input reloads are valid only if the
420 secondary_class is different from class. If you have such a case, you
421 can not use secondary reloads, you must work around the problem some
422 other way.
424 Allow this when a reload_in/out pattern is being used. I.e. assume
425 that the generated code handles this case. */
427 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
428 || t_icode != CODE_FOR_nothing);
430 /* See if we can reuse an existing secondary reload. */
431 for (s_reload = 0; s_reload < n_reloads; s_reload++)
432 if (rld[s_reload].secondary_p
433 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
434 || reg_class_subset_p (rld[s_reload].rclass, rclass))
435 && ((in_p && rld[s_reload].inmode == mode)
436 || (! in_p && rld[s_reload].outmode == mode))
437 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
438 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
439 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
440 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
441 && (small_register_class_p (rclass)
442 || targetm.small_register_classes_for_mode_p (VOIDmode))
443 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
444 opnum, rld[s_reload].opnum))
446 if (in_p)
447 rld[s_reload].inmode = mode;
448 if (! in_p)
449 rld[s_reload].outmode = mode;
451 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
452 rld[s_reload].rclass = rclass;
454 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
455 rld[s_reload].optional &= optional;
456 rld[s_reload].secondary_p = 1;
457 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
458 opnum, rld[s_reload].opnum))
459 rld[s_reload].when_needed = RELOAD_OTHER;
461 break;
464 if (s_reload == n_reloads)
466 #ifdef SECONDARY_MEMORY_NEEDED
467 /* If we need a memory location to copy between the two reload regs,
468 set it up now. Note that we do the input case before making
469 the reload and the output case after. This is due to the
470 way reloads are output. */
472 if (in_p && icode == CODE_FOR_nothing
473 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
475 get_secondary_mem (x, reload_mode, opnum, type);
477 /* We may have just added new reloads. Make sure we add
478 the new reload at the end. */
479 s_reload = n_reloads;
481 #endif
483 /* We need to make a new secondary reload for this register class. */
484 rld[s_reload].in = rld[s_reload].out = 0;
485 rld[s_reload].rclass = rclass;
487 rld[s_reload].inmode = in_p ? mode : VOIDmode;
488 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
489 rld[s_reload].reg_rtx = 0;
490 rld[s_reload].optional = optional;
491 rld[s_reload].inc = 0;
492 /* Maybe we could combine these, but it seems too tricky. */
493 rld[s_reload].nocombine = 1;
494 rld[s_reload].in_reg = 0;
495 rld[s_reload].out_reg = 0;
496 rld[s_reload].opnum = opnum;
497 rld[s_reload].when_needed = secondary_type;
498 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
499 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
500 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
501 rld[s_reload].secondary_out_icode
502 = ! in_p ? t_icode : CODE_FOR_nothing;
503 rld[s_reload].secondary_p = 1;
505 n_reloads++;
507 #ifdef SECONDARY_MEMORY_NEEDED
508 if (! in_p && icode == CODE_FOR_nothing
509 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
510 get_secondary_mem (x, mode, opnum, type);
511 #endif
514 *picode = icode;
515 return s_reload;
518 /* If a secondary reload is needed, return its class. If both an intermediate
519 register and a scratch register is needed, we return the class of the
520 intermediate register. */
521 reg_class_t
522 secondary_reload_class (bool in_p, reg_class_t rclass, enum machine_mode mode,
523 rtx x)
525 enum insn_code icode;
526 secondary_reload_info sri;
528 sri.icode = CODE_FOR_nothing;
529 sri.prev_sri = NULL;
530 rclass
531 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
532 icode = (enum insn_code) sri.icode;
534 /* If there are no secondary reloads at all, we return NO_REGS.
535 If an intermediate register is needed, we return its class. */
536 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
537 return rclass;
539 /* No intermediate register is needed, but we have a special reload
540 pattern, which we assume for now needs a scratch register. */
541 return scratch_reload_class (icode);
544 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
545 three operands, verify that operand 2 is an output operand, and return
546 its register class.
547 ??? We'd like to be able to handle any pattern with at least 2 operands,
548 for zero or more scratch registers, but that needs more infrastructure. */
549 enum reg_class
550 scratch_reload_class (enum insn_code icode)
552 const char *scratch_constraint;
553 char scratch_letter;
554 enum reg_class rclass;
556 gcc_assert (insn_data[(int) icode].n_operands == 3);
557 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
558 gcc_assert (*scratch_constraint == '=');
559 scratch_constraint++;
560 if (*scratch_constraint == '&')
561 scratch_constraint++;
562 scratch_letter = *scratch_constraint;
563 if (scratch_letter == 'r')
564 return GENERAL_REGS;
565 rclass = REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
566 scratch_constraint);
567 gcc_assert (rclass != NO_REGS);
568 return rclass;
571 #ifdef SECONDARY_MEMORY_NEEDED
573 /* Return a memory location that will be used to copy X in mode MODE.
574 If we haven't already made a location for this mode in this insn,
575 call find_reloads_address on the location being returned. */
578 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
579 int opnum, enum reload_type type)
581 rtx loc;
582 int mem_valid;
584 /* By default, if MODE is narrower than a word, widen it to a word.
585 This is required because most machines that require these memory
586 locations do not support short load and stores from all registers
587 (e.g., FP registers). */
589 #ifdef SECONDARY_MEMORY_NEEDED_MODE
590 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
591 #else
592 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
593 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
594 #endif
596 /* If we already have made a MEM for this operand in MODE, return it. */
597 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
598 return secondary_memlocs_elim[(int) mode][opnum];
600 /* If this is the first time we've tried to get a MEM for this mode,
601 allocate a new one. `something_changed' in reload will get set
602 by noticing that the frame size has changed. */
604 if (secondary_memlocs[(int) mode] == 0)
606 #ifdef SECONDARY_MEMORY_NEEDED_RTX
607 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
608 #else
609 secondary_memlocs[(int) mode]
610 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
611 #endif
614 /* Get a version of the address doing any eliminations needed. If that
615 didn't give us a new MEM, make a new one if it isn't valid. */
617 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
618 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
619 MEM_ADDR_SPACE (loc));
621 if (! mem_valid && loc == secondary_memlocs[(int) mode])
622 loc = copy_rtx (loc);
624 /* The only time the call below will do anything is if the stack
625 offset is too large. In that case IND_LEVELS doesn't matter, so we
626 can just pass a zero. Adjust the type to be the address of the
627 corresponding object. If the address was valid, save the eliminated
628 address. If it wasn't valid, we need to make a reload each time, so
629 don't save it. */
631 if (! mem_valid)
633 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
634 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
635 : RELOAD_OTHER);
637 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
638 opnum, type, 0, 0);
641 secondary_memlocs_elim[(int) mode][opnum] = loc;
642 if (secondary_memlocs_elim_used <= (int)mode)
643 secondary_memlocs_elim_used = (int)mode + 1;
644 return loc;
647 /* Clear any secondary memory locations we've made. */
649 void
650 clear_secondary_mem (void)
652 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
654 #endif /* SECONDARY_MEMORY_NEEDED */
657 /* Find the largest class which has at least one register valid in
658 mode INNER, and which for every such register, that register number
659 plus N is also valid in OUTER (if in range) and is cheap to move
660 into REGNO. Such a class must exist. */
662 static enum reg_class
663 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
664 enum machine_mode inner ATTRIBUTE_UNUSED, int n,
665 unsigned int dest_regno ATTRIBUTE_UNUSED)
667 int best_cost = -1;
668 int rclass;
669 int regno;
670 enum reg_class best_class = NO_REGS;
671 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
672 unsigned int best_size = 0;
673 int cost;
675 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
677 int bad = 0;
678 int good = 0;
679 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
680 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
682 if (HARD_REGNO_MODE_OK (regno, inner))
684 good = 1;
685 if (! TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
686 || ! HARD_REGNO_MODE_OK (regno + n, outer))
687 bad = 1;
691 if (bad || !good)
692 continue;
693 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
695 if ((reg_class_size[rclass] > best_size
696 && (best_cost < 0 || best_cost >= cost))
697 || best_cost > cost)
699 best_class = (enum reg_class) rclass;
700 best_size = reg_class_size[rclass];
701 best_cost = register_move_cost (outer, (enum reg_class) rclass,
702 dest_class);
706 gcc_assert (best_size != 0);
708 return best_class;
711 /* We are trying to reload a subreg of something that is not a register.
712 Find the largest class which has at least one register valid in
713 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
714 which we would eventually like to obtain the object. */
716 static enum reg_class
717 find_valid_class_1 (enum machine_mode outer ATTRIBUTE_UNUSED,
718 enum machine_mode mode ATTRIBUTE_UNUSED,
719 enum reg_class dest_class ATTRIBUTE_UNUSED)
721 int best_cost = -1;
722 int rclass;
723 int regno;
724 enum reg_class best_class = NO_REGS;
725 unsigned int best_size = 0;
726 int cost;
728 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
730 int bad = 0;
731 for (regno = 0; regno < FIRST_PSEUDO_REGISTER && !bad; regno++)
732 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
733 && !HARD_REGNO_MODE_OK (regno, mode))
734 bad = 1;
736 if (bad)
737 continue;
739 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
741 if ((reg_class_size[rclass] > best_size
742 && (best_cost < 0 || best_cost >= cost))
743 || best_cost > cost)
745 best_class = (enum reg_class) rclass;
746 best_size = reg_class_size[rclass];
747 best_cost = register_move_cost (outer, (enum reg_class) rclass,
748 dest_class);
752 gcc_assert (best_size != 0);
754 #ifdef LIMIT_RELOAD_CLASS
755 best_class = LIMIT_RELOAD_CLASS (mode, best_class);
756 #endif
757 return best_class;
760 /* Return the number of a previously made reload that can be combined with
761 a new one, or n_reloads if none of the existing reloads can be used.
762 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
763 push_reload, they determine the kind of the new reload that we try to
764 combine. P_IN points to the corresponding value of IN, which can be
765 modified by this function.
766 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
768 static int
769 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
770 enum reload_type type, int opnum, int dont_share)
772 rtx in = *p_in;
773 int i;
774 /* We can't merge two reloads if the output of either one is
775 earlyclobbered. */
777 if (earlyclobber_operand_p (out))
778 return n_reloads;
780 /* We can use an existing reload if the class is right
781 and at least one of IN and OUT is a match
782 and the other is at worst neutral.
783 (A zero compared against anything is neutral.)
785 For targets with small register classes, don't use existing reloads
786 unless they are for the same thing since that can cause us to need
787 more reload registers than we otherwise would. */
789 for (i = 0; i < n_reloads; i++)
790 if ((reg_class_subset_p (rclass, rld[i].rclass)
791 || reg_class_subset_p (rld[i].rclass, rclass))
792 /* If the existing reload has a register, it must fit our class. */
793 && (rld[i].reg_rtx == 0
794 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
795 true_regnum (rld[i].reg_rtx)))
796 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
797 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
798 || (out != 0 && MATCHES (rld[i].out, out)
799 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
800 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
801 && (small_register_class_p (rclass)
802 || targetm.small_register_classes_for_mode_p (VOIDmode))
803 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
804 return i;
806 /* Reloading a plain reg for input can match a reload to postincrement
807 that reg, since the postincrement's value is the right value.
808 Likewise, it can match a preincrement reload, since we regard
809 the preincrementation as happening before any ref in this insn
810 to that register. */
811 for (i = 0; i < n_reloads; i++)
812 if ((reg_class_subset_p (rclass, rld[i].rclass)
813 || reg_class_subset_p (rld[i].rclass, rclass))
814 /* If the existing reload has a register, it must fit our
815 class. */
816 && (rld[i].reg_rtx == 0
817 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
818 true_regnum (rld[i].reg_rtx)))
819 && out == 0 && rld[i].out == 0 && rld[i].in != 0
820 && ((REG_P (in)
821 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
822 && MATCHES (XEXP (rld[i].in, 0), in))
823 || (REG_P (rld[i].in)
824 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
825 && MATCHES (XEXP (in, 0), rld[i].in)))
826 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
827 && (small_register_class_p (rclass)
828 || targetm.small_register_classes_for_mode_p (VOIDmode))
829 && MERGABLE_RELOADS (type, rld[i].when_needed,
830 opnum, rld[i].opnum))
832 /* Make sure reload_in ultimately has the increment,
833 not the plain register. */
834 if (REG_P (in))
835 *p_in = rld[i].in;
836 return i;
838 return n_reloads;
841 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
842 expression. MODE is the mode that X will be used in. OUTPUT is true if
843 the function is invoked for the output part of an enclosing reload. */
845 static bool
846 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, bool output)
848 rtx inner;
850 /* Only SUBREGs are problematical. */
851 if (GET_CODE (x) != SUBREG)
852 return false;
854 inner = SUBREG_REG (x);
856 /* If INNER is a constant or PLUS, then INNER will need reloading. */
857 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
858 return true;
860 /* If INNER is not a hard register, then INNER will not need reloading. */
861 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
862 return false;
864 /* If INNER is not ok for MODE, then INNER will need reloading. */
865 if (!HARD_REGNO_MODE_OK (subreg_regno (x), mode))
866 return true;
868 /* If this is for an output, and the outer part is a word or smaller,
869 INNER is larger than a word and the number of registers in INNER is
870 not the same as the number of words in INNER, then INNER will need
871 reloading (with an in-out reload). */
872 return (output
873 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
874 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
875 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
876 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
879 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
880 requiring an extra reload register. The caller has already found that
881 IN contains some reference to REGNO, so check that we can produce the
882 new value in a single step. E.g. if we have
883 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
884 instruction that adds one to a register, this should succeed.
885 However, if we have something like
886 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
887 needs to be loaded into a register first, we need a separate reload
888 register.
889 Such PLUS reloads are generated by find_reload_address_part.
890 The out-of-range PLUS expressions are usually introduced in the instruction
891 patterns by register elimination and substituting pseudos without a home
892 by their function-invariant equivalences. */
893 static int
894 can_reload_into (rtx in, int regno, enum machine_mode mode)
896 rtx dst, test_insn;
897 int r = 0;
898 struct recog_data save_recog_data;
900 /* For matching constraints, we often get notional input reloads where
901 we want to use the original register as the reload register. I.e.
902 technically this is a non-optional input-output reload, but IN is
903 already a valid register, and has been chosen as the reload register.
904 Speed this up, since it trivially works. */
905 if (REG_P (in))
906 return 1;
908 /* To test MEMs properly, we'd have to take into account all the reloads
909 that are already scheduled, which can become quite complicated.
910 And since we've already handled address reloads for this MEM, it
911 should always succeed anyway. */
912 if (MEM_P (in))
913 return 1;
915 /* If we can make a simple SET insn that does the job, everything should
916 be fine. */
917 dst = gen_rtx_REG (mode, regno);
918 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
919 save_recog_data = recog_data;
920 if (recog_memoized (test_insn) >= 0)
922 extract_insn (test_insn);
923 r = constrain_operands (1);
925 recog_data = save_recog_data;
926 return r;
929 /* Record one reload that needs to be performed.
930 IN is an rtx saying where the data are to be found before this instruction.
931 OUT says where they must be stored after the instruction.
932 (IN is zero for data not read, and OUT is zero for data not written.)
933 INLOC and OUTLOC point to the places in the instructions where
934 IN and OUT were found.
935 If IN and OUT are both nonzero, it means the same register must be used
936 to reload both IN and OUT.
938 RCLASS is a register class required for the reloaded data.
939 INMODE is the machine mode that the instruction requires
940 for the reg that replaces IN and OUTMODE is likewise for OUT.
942 If IN is zero, then OUT's location and mode should be passed as
943 INLOC and INMODE.
945 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
947 OPTIONAL nonzero means this reload does not need to be performed:
948 it can be discarded if that is more convenient.
950 OPNUM and TYPE say what the purpose of this reload is.
952 The return value is the reload-number for this reload.
954 If both IN and OUT are nonzero, in some rare cases we might
955 want to make two separate reloads. (Actually we never do this now.)
956 Therefore, the reload-number for OUT is stored in
957 output_reloadnum when we return; the return value applies to IN.
958 Usually (presently always), when IN and OUT are nonzero,
959 the two reload-numbers are equal, but the caller should be careful to
960 distinguish them. */
963 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
964 enum reg_class rclass, enum machine_mode inmode,
965 enum machine_mode outmode, int strict_low, int optional,
966 int opnum, enum reload_type type)
968 int i;
969 int dont_share = 0;
970 int dont_remove_subreg = 0;
971 #ifdef LIMIT_RELOAD_CLASS
972 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
973 #endif
974 int secondary_in_reload = -1, secondary_out_reload = -1;
975 enum insn_code secondary_in_icode = CODE_FOR_nothing;
976 enum insn_code secondary_out_icode = CODE_FOR_nothing;
977 enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
978 subreg_in_class = NO_REGS;
980 /* INMODE and/or OUTMODE could be VOIDmode if no mode
981 has been specified for the operand. In that case,
982 use the operand's mode as the mode to reload. */
983 if (inmode == VOIDmode && in != 0)
984 inmode = GET_MODE (in);
985 if (outmode == VOIDmode && out != 0)
986 outmode = GET_MODE (out);
988 /* If find_reloads and friends until now missed to replace a pseudo
989 with a constant of reg_equiv_constant something went wrong
990 beforehand.
991 Note that it can't simply be done here if we missed it earlier
992 since the constant might need to be pushed into the literal pool
993 and the resulting memref would probably need further
994 reloading. */
995 if (in != 0 && REG_P (in))
997 int regno = REGNO (in);
999 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1000 || reg_renumber[regno] >= 0
1001 || reg_equiv_constant (regno) == NULL_RTX);
1004 /* reg_equiv_constant only contains constants which are obviously
1005 not appropriate as destination. So if we would need to replace
1006 the destination pseudo with a constant we are in real
1007 trouble. */
1008 if (out != 0 && REG_P (out))
1010 int regno = REGNO (out);
1012 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1013 || reg_renumber[regno] >= 0
1014 || reg_equiv_constant (regno) == NULL_RTX);
1017 /* If we have a read-write operand with an address side-effect,
1018 change either IN or OUT so the side-effect happens only once. */
1019 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1020 switch (GET_CODE (XEXP (in, 0)))
1022 case POST_INC: case POST_DEC: case POST_MODIFY:
1023 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1024 break;
1026 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1027 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1028 break;
1030 default:
1031 break;
1034 /* If we are reloading a (SUBREG constant ...), really reload just the
1035 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1036 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1037 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1038 register is a pseudo, also reload the inside expression.
1039 For machines that extend byte loads, do this for any SUBREG of a pseudo
1040 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1041 M2 is an integral mode that gets extended when loaded.
1042 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1043 where either M1 is not valid for R or M2 is wider than a word but we
1044 only need one register to store an M2-sized quantity in R.
1045 (However, if OUT is nonzero, we need to reload the reg *and*
1046 the subreg, so do nothing here, and let following statement handle it.)
1048 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1049 we can't handle it here because CONST_INT does not indicate a mode.
1051 Similarly, we must reload the inside expression if we have a
1052 STRICT_LOW_PART (presumably, in == out in this case).
1054 Also reload the inner expression if it does not require a secondary
1055 reload but the SUBREG does.
1057 Finally, reload the inner expression if it is a register that is in
1058 the class whose registers cannot be referenced in a different size
1059 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1060 cannot reload just the inside since we might end up with the wrong
1061 register class. But if it is inside a STRICT_LOW_PART, we have
1062 no choice, so we hope we do get the right register class there. */
1064 if (in != 0 && GET_CODE (in) == SUBREG
1065 && (subreg_lowpart_p (in) || strict_low)
1066 #ifdef CANNOT_CHANGE_MODE_CLASS
1067 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1068 #endif
1069 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (in))]
1070 && (CONSTANT_P (SUBREG_REG (in))
1071 || GET_CODE (SUBREG_REG (in)) == PLUS
1072 || strict_low
1073 || (((REG_P (SUBREG_REG (in))
1074 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1075 || MEM_P (SUBREG_REG (in)))
1076 && ((GET_MODE_PRECISION (inmode)
1077 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1078 #ifdef LOAD_EXTEND_OP
1079 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1080 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1081 <= UNITS_PER_WORD)
1082 && (GET_MODE_PRECISION (inmode)
1083 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1084 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1085 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1086 #endif
1087 #ifdef WORD_REGISTER_OPERATIONS
1088 || ((GET_MODE_PRECISION (inmode)
1089 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1090 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1091 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1092 / UNITS_PER_WORD)))
1093 #endif
1095 || (REG_P (SUBREG_REG (in))
1096 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1097 /* The case where out is nonzero
1098 is handled differently in the following statement. */
1099 && (out == 0 || subreg_lowpart_p (in))
1100 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1101 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1102 > UNITS_PER_WORD)
1103 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1104 / UNITS_PER_WORD)
1105 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1106 [GET_MODE (SUBREG_REG (in))]))
1107 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1108 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1109 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1110 SUBREG_REG (in))
1111 == NO_REGS))
1112 #ifdef CANNOT_CHANGE_MODE_CLASS
1113 || (REG_P (SUBREG_REG (in))
1114 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1115 && REG_CANNOT_CHANGE_MODE_P
1116 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1117 #endif
1120 #ifdef LIMIT_RELOAD_CLASS
1121 in_subreg_loc = inloc;
1122 #endif
1123 inloc = &SUBREG_REG (in);
1124 in = *inloc;
1125 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1126 if (MEM_P (in))
1127 /* This is supposed to happen only for paradoxical subregs made by
1128 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1129 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1130 #endif
1131 inmode = GET_MODE (in);
1134 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1135 where M1 is not valid for R if it was not handled by the code above.
1137 Similar issue for (SUBREG constant ...) if it was not handled by the
1138 code above. This can happen if SUBREG_BYTE != 0.
1140 However, we must reload the inner reg *as well as* the subreg in
1141 that case. */
1143 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1145 if (REG_P (SUBREG_REG (in)))
1146 subreg_in_class
1147 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1148 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1149 GET_MODE (SUBREG_REG (in)),
1150 SUBREG_BYTE (in),
1151 GET_MODE (in)),
1152 REGNO (SUBREG_REG (in)));
1153 else if (GET_CODE (SUBREG_REG (in)) == SYMBOL_REF)
1154 subreg_in_class = find_valid_class_1 (inmode,
1155 GET_MODE (SUBREG_REG (in)),
1156 rclass);
1158 /* This relies on the fact that emit_reload_insns outputs the
1159 instructions for input reloads of type RELOAD_OTHER in the same
1160 order as the reloads. Thus if the outer reload is also of type
1161 RELOAD_OTHER, we are guaranteed that this inner reload will be
1162 output before the outer reload. */
1163 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1164 subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1165 dont_remove_subreg = 1;
1168 /* Similarly for paradoxical and problematical SUBREGs on the output.
1169 Note that there is no reason we need worry about the previous value
1170 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1171 entitled to clobber it all (except in the case of a word mode subreg
1172 or of a STRICT_LOW_PART, in that latter case the constraint should
1173 label it input-output.) */
1174 if (out != 0 && GET_CODE (out) == SUBREG
1175 && (subreg_lowpart_p (out) || strict_low)
1176 #ifdef CANNOT_CHANGE_MODE_CLASS
1177 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1178 #endif
1179 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (out))]
1180 && (CONSTANT_P (SUBREG_REG (out))
1181 || strict_low
1182 || (((REG_P (SUBREG_REG (out))
1183 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1184 || MEM_P (SUBREG_REG (out)))
1185 && ((GET_MODE_PRECISION (outmode)
1186 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1187 #ifdef WORD_REGISTER_OPERATIONS
1188 || ((GET_MODE_PRECISION (outmode)
1189 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1190 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1191 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1192 / UNITS_PER_WORD)))
1193 #endif
1195 || (REG_P (SUBREG_REG (out))
1196 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1197 /* The case of a word mode subreg
1198 is handled differently in the following statement. */
1199 && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1200 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1201 > UNITS_PER_WORD))
1202 && ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode))
1203 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1204 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1205 SUBREG_REG (out))
1206 == NO_REGS))
1207 #ifdef CANNOT_CHANGE_MODE_CLASS
1208 || (REG_P (SUBREG_REG (out))
1209 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1210 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1211 GET_MODE (SUBREG_REG (out)),
1212 outmode))
1213 #endif
1216 #ifdef LIMIT_RELOAD_CLASS
1217 out_subreg_loc = outloc;
1218 #endif
1219 outloc = &SUBREG_REG (out);
1220 out = *outloc;
1221 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1222 gcc_assert (!MEM_P (out)
1223 || GET_MODE_SIZE (GET_MODE (out))
1224 <= GET_MODE_SIZE (outmode));
1225 #endif
1226 outmode = GET_MODE (out);
1229 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1230 where either M1 is not valid for R or M2 is wider than a word but we
1231 only need one register to store an M2-sized quantity in R.
1233 However, we must reload the inner reg *as well as* the subreg in
1234 that case and the inner reg is an in-out reload. */
1236 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1238 enum reg_class in_out_class
1239 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1240 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1241 GET_MODE (SUBREG_REG (out)),
1242 SUBREG_BYTE (out),
1243 GET_MODE (out)),
1244 REGNO (SUBREG_REG (out)));
1246 /* This relies on the fact that emit_reload_insns outputs the
1247 instructions for output reloads of type RELOAD_OTHER in reverse
1248 order of the reloads. Thus if the outer reload is also of type
1249 RELOAD_OTHER, we are guaranteed that this inner reload will be
1250 output after the outer reload. */
1251 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1252 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1253 0, 0, opnum, RELOAD_OTHER);
1254 dont_remove_subreg = 1;
1257 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1258 if (in != 0 && out != 0 && MEM_P (out)
1259 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1260 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1261 dont_share = 1;
1263 /* If IN is a SUBREG of a hard register, make a new REG. This
1264 simplifies some of the cases below. */
1266 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1267 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1268 && ! dont_remove_subreg)
1269 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1271 /* Similarly for OUT. */
1272 if (out != 0 && GET_CODE (out) == SUBREG
1273 && REG_P (SUBREG_REG (out))
1274 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1275 && ! dont_remove_subreg)
1276 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1278 /* Narrow down the class of register wanted if that is
1279 desirable on this machine for efficiency. */
1281 reg_class_t preferred_class = rclass;
1283 if (in != 0)
1284 preferred_class = targetm.preferred_reload_class (in, rclass);
1286 /* Output reloads may need analogous treatment, different in detail. */
1287 if (out != 0)
1288 preferred_class
1289 = targetm.preferred_output_reload_class (out, preferred_class);
1291 /* Discard what the target said if we cannot do it. */
1292 if (preferred_class != NO_REGS
1293 || (optional && type == RELOAD_FOR_OUTPUT))
1294 rclass = (enum reg_class) preferred_class;
1297 /* Make sure we use a class that can handle the actual pseudo
1298 inside any subreg. For example, on the 386, QImode regs
1299 can appear within SImode subregs. Although GENERAL_REGS
1300 can handle SImode, QImode needs a smaller class. */
1301 #ifdef LIMIT_RELOAD_CLASS
1302 if (in_subreg_loc)
1303 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1304 else if (in != 0 && GET_CODE (in) == SUBREG)
1305 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1307 if (out_subreg_loc)
1308 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1309 if (out != 0 && GET_CODE (out) == SUBREG)
1310 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1311 #endif
1313 /* Verify that this class is at least possible for the mode that
1314 is specified. */
1315 if (this_insn_is_asm)
1317 enum machine_mode mode;
1318 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1319 mode = inmode;
1320 else
1321 mode = outmode;
1322 if (mode == VOIDmode)
1324 error_for_asm (this_insn, "cannot reload integer constant "
1325 "operand in %<asm%>");
1326 mode = word_mode;
1327 if (in != 0)
1328 inmode = word_mode;
1329 if (out != 0)
1330 outmode = word_mode;
1332 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1333 if (HARD_REGNO_MODE_OK (i, mode)
1334 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1335 break;
1336 if (i == FIRST_PSEUDO_REGISTER)
1338 error_for_asm (this_insn, "impossible register constraint "
1339 "in %<asm%>");
1340 /* Avoid further trouble with this insn. */
1341 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1342 /* We used to continue here setting class to ALL_REGS, but it triggers
1343 sanity check on i386 for:
1344 void foo(long double d)
1346 asm("" :: "a" (d));
1348 Returning zero here ought to be safe as we take care in
1349 find_reloads to not process the reloads when instruction was
1350 replaced by USE. */
1352 return 0;
1356 /* Optional output reloads are always OK even if we have no register class,
1357 since the function of these reloads is only to have spill_reg_store etc.
1358 set, so that the storing insn can be deleted later. */
1359 gcc_assert (rclass != NO_REGS
1360 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1362 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1364 if (i == n_reloads)
1366 /* See if we need a secondary reload register to move between CLASS
1367 and IN or CLASS and OUT. Get the icode and push any required reloads
1368 needed for each of them if so. */
1370 if (in != 0)
1371 secondary_in_reload
1372 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1373 &secondary_in_icode, NULL);
1374 if (out != 0 && GET_CODE (out) != SCRATCH)
1375 secondary_out_reload
1376 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1377 type, &secondary_out_icode, NULL);
1379 /* We found no existing reload suitable for re-use.
1380 So add an additional reload. */
1382 #ifdef SECONDARY_MEMORY_NEEDED
1383 if (subreg_in_class == NO_REGS
1384 && in != 0
1385 && (REG_P (in)
1386 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1387 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1388 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1389 /* If a memory location is needed for the copy, make one. */
1390 if (subreg_in_class != NO_REGS
1391 && SECONDARY_MEMORY_NEEDED (subreg_in_class, rclass, inmode))
1392 get_secondary_mem (in, inmode, opnum, type);
1393 #endif
1395 i = n_reloads;
1396 rld[i].in = in;
1397 rld[i].out = out;
1398 rld[i].rclass = rclass;
1399 rld[i].inmode = inmode;
1400 rld[i].outmode = outmode;
1401 rld[i].reg_rtx = 0;
1402 rld[i].optional = optional;
1403 rld[i].inc = 0;
1404 rld[i].nocombine = 0;
1405 rld[i].in_reg = inloc ? *inloc : 0;
1406 rld[i].out_reg = outloc ? *outloc : 0;
1407 rld[i].opnum = opnum;
1408 rld[i].when_needed = type;
1409 rld[i].secondary_in_reload = secondary_in_reload;
1410 rld[i].secondary_out_reload = secondary_out_reload;
1411 rld[i].secondary_in_icode = secondary_in_icode;
1412 rld[i].secondary_out_icode = secondary_out_icode;
1413 rld[i].secondary_p = 0;
1415 n_reloads++;
1417 #ifdef SECONDARY_MEMORY_NEEDED
1418 if (out != 0
1419 && (REG_P (out)
1420 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1421 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1422 && SECONDARY_MEMORY_NEEDED (rclass,
1423 REGNO_REG_CLASS (reg_or_subregno (out)),
1424 outmode))
1425 get_secondary_mem (out, outmode, opnum, type);
1426 #endif
1428 else
1430 /* We are reusing an existing reload,
1431 but we may have additional information for it.
1432 For example, we may now have both IN and OUT
1433 while the old one may have just one of them. */
1435 /* The modes can be different. If they are, we want to reload in
1436 the larger mode, so that the value is valid for both modes. */
1437 if (inmode != VOIDmode
1438 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1439 rld[i].inmode = inmode;
1440 if (outmode != VOIDmode
1441 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1442 rld[i].outmode = outmode;
1443 if (in != 0)
1445 rtx in_reg = inloc ? *inloc : 0;
1446 /* If we merge reloads for two distinct rtl expressions that
1447 are identical in content, there might be duplicate address
1448 reloads. Remove the extra set now, so that if we later find
1449 that we can inherit this reload, we can get rid of the
1450 address reloads altogether.
1452 Do not do this if both reloads are optional since the result
1453 would be an optional reload which could potentially leave
1454 unresolved address replacements.
1456 It is not sufficient to call transfer_replacements since
1457 choose_reload_regs will remove the replacements for address
1458 reloads of inherited reloads which results in the same
1459 problem. */
1460 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1461 && ! (rld[i].optional && optional))
1463 /* We must keep the address reload with the lower operand
1464 number alive. */
1465 if (opnum > rld[i].opnum)
1467 remove_address_replacements (in);
1468 in = rld[i].in;
1469 in_reg = rld[i].in_reg;
1471 else
1472 remove_address_replacements (rld[i].in);
1474 /* When emitting reloads we don't necessarily look at the in-
1475 and outmode, but also directly at the operands (in and out).
1476 So we can't simply overwrite them with whatever we have found
1477 for this (to-be-merged) reload, we have to "merge" that too.
1478 Reusing another reload already verified that we deal with the
1479 same operands, just possibly in different modes. So we
1480 overwrite the operands only when the new mode is larger.
1481 See also PR33613. */
1482 if (!rld[i].in
1483 || GET_MODE_SIZE (GET_MODE (in))
1484 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1485 rld[i].in = in;
1486 if (!rld[i].in_reg
1487 || (in_reg
1488 && GET_MODE_SIZE (GET_MODE (in_reg))
1489 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1490 rld[i].in_reg = in_reg;
1492 if (out != 0)
1494 if (!rld[i].out
1495 || (out
1496 && GET_MODE_SIZE (GET_MODE (out))
1497 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1498 rld[i].out = out;
1499 if (outloc
1500 && (!rld[i].out_reg
1501 || GET_MODE_SIZE (GET_MODE (*outloc))
1502 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1503 rld[i].out_reg = *outloc;
1505 if (reg_class_subset_p (rclass, rld[i].rclass))
1506 rld[i].rclass = rclass;
1507 rld[i].optional &= optional;
1508 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1509 opnum, rld[i].opnum))
1510 rld[i].when_needed = RELOAD_OTHER;
1511 rld[i].opnum = MIN (rld[i].opnum, opnum);
1514 /* If the ostensible rtx being reloaded differs from the rtx found
1515 in the location to substitute, this reload is not safe to combine
1516 because we cannot reliably tell whether it appears in the insn. */
1518 if (in != 0 && in != *inloc)
1519 rld[i].nocombine = 1;
1521 #if 0
1522 /* This was replaced by changes in find_reloads_address_1 and the new
1523 function inc_for_reload, which go with a new meaning of reload_inc. */
1525 /* If this is an IN/OUT reload in an insn that sets the CC,
1526 it must be for an autoincrement. It doesn't work to store
1527 the incremented value after the insn because that would clobber the CC.
1528 So we must do the increment of the value reloaded from,
1529 increment it, store it back, then decrement again. */
1530 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1532 out = 0;
1533 rld[i].out = 0;
1534 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1535 /* If we did not find a nonzero amount-to-increment-by,
1536 that contradicts the belief that IN is being incremented
1537 in an address in this insn. */
1538 gcc_assert (rld[i].inc != 0);
1540 #endif
1542 /* If we will replace IN and OUT with the reload-reg,
1543 record where they are located so that substitution need
1544 not do a tree walk. */
1546 if (replace_reloads)
1548 if (inloc != 0)
1550 struct replacement *r = &replacements[n_replacements++];
1551 r->what = i;
1552 r->where = inloc;
1553 r->mode = inmode;
1555 if (outloc != 0 && outloc != inloc)
1557 struct replacement *r = &replacements[n_replacements++];
1558 r->what = i;
1559 r->where = outloc;
1560 r->mode = outmode;
1564 /* If this reload is just being introduced and it has both
1565 an incoming quantity and an outgoing quantity that are
1566 supposed to be made to match, see if either one of the two
1567 can serve as the place to reload into.
1569 If one of them is acceptable, set rld[i].reg_rtx
1570 to that one. */
1572 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1574 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1575 inmode, outmode,
1576 rld[i].rclass, i,
1577 earlyclobber_operand_p (out));
1579 /* If the outgoing register already contains the same value
1580 as the incoming one, we can dispense with loading it.
1581 The easiest way to tell the caller that is to give a phony
1582 value for the incoming operand (same as outgoing one). */
1583 if (rld[i].reg_rtx == out
1584 && (REG_P (in) || CONSTANT_P (in))
1585 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1586 static_reload_reg_p, i, inmode))
1587 rld[i].in = out;
1590 /* If this is an input reload and the operand contains a register that
1591 dies in this insn and is used nowhere else, see if it is the right class
1592 to be used for this reload. Use it if so. (This occurs most commonly
1593 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1594 this if it is also an output reload that mentions the register unless
1595 the output is a SUBREG that clobbers an entire register.
1597 Note that the operand might be one of the spill regs, if it is a
1598 pseudo reg and we are in a block where spilling has not taken place.
1599 But if there is no spilling in this block, that is OK.
1600 An explicitly used hard reg cannot be a spill reg. */
1602 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1604 rtx note;
1605 int regno;
1606 enum machine_mode rel_mode = inmode;
1608 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1609 rel_mode = outmode;
1611 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1612 if (REG_NOTE_KIND (note) == REG_DEAD
1613 && REG_P (XEXP (note, 0))
1614 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1615 && reg_mentioned_p (XEXP (note, 0), in)
1616 /* Check that a former pseudo is valid; see find_dummy_reload. */
1617 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1618 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1619 ORIGINAL_REGNO (XEXP (note, 0)))
1620 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1621 && ! refers_to_regno_for_reload_p (regno,
1622 end_hard_regno (rel_mode,
1623 regno),
1624 PATTERN (this_insn), inloc)
1625 /* If this is also an output reload, IN cannot be used as
1626 the reload register if it is set in this insn unless IN
1627 is also OUT. */
1628 && (out == 0 || in == out
1629 || ! hard_reg_set_here_p (regno,
1630 end_hard_regno (rel_mode, regno),
1631 PATTERN (this_insn)))
1632 /* ??? Why is this code so different from the previous?
1633 Is there any simple coherent way to describe the two together?
1634 What's going on here. */
1635 && (in != out
1636 || (GET_CODE (in) == SUBREG
1637 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1638 / UNITS_PER_WORD)
1639 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1640 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1641 /* Make sure the operand fits in the reg that dies. */
1642 && (GET_MODE_SIZE (rel_mode)
1643 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1644 && HARD_REGNO_MODE_OK (regno, inmode)
1645 && HARD_REGNO_MODE_OK (regno, outmode))
1647 unsigned int offs;
1648 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1649 hard_regno_nregs[regno][outmode]);
1651 for (offs = 0; offs < nregs; offs++)
1652 if (fixed_regs[regno + offs]
1653 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1654 regno + offs))
1655 break;
1657 if (offs == nregs
1658 && (! (refers_to_regno_for_reload_p
1659 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1660 || can_reload_into (in, regno, inmode)))
1662 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1663 break;
1668 if (out)
1669 output_reloadnum = i;
1671 return i;
1674 /* Record an additional place we must replace a value
1675 for which we have already recorded a reload.
1676 RELOADNUM is the value returned by push_reload
1677 when the reload was recorded.
1678 This is used in insn patterns that use match_dup. */
1680 static void
1681 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
1683 if (replace_reloads)
1685 struct replacement *r = &replacements[n_replacements++];
1686 r->what = reloadnum;
1687 r->where = loc;
1688 r->mode = mode;
1692 /* Duplicate any replacement we have recorded to apply at
1693 location ORIG_LOC to also be performed at DUP_LOC.
1694 This is used in insn patterns that use match_dup. */
1696 static void
1697 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1699 int i, n = n_replacements;
1701 for (i = 0; i < n; i++)
1703 struct replacement *r = &replacements[i];
1704 if (r->where == orig_loc)
1705 push_replacement (dup_loc, r->what, r->mode);
1709 /* Transfer all replacements that used to be in reload FROM to be in
1710 reload TO. */
1712 void
1713 transfer_replacements (int to, int from)
1715 int i;
1717 for (i = 0; i < n_replacements; i++)
1718 if (replacements[i].what == from)
1719 replacements[i].what = to;
1722 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1723 or a subpart of it. If we have any replacements registered for IN_RTX,
1724 cancel the reloads that were supposed to load them.
1725 Return nonzero if we canceled any reloads. */
1727 remove_address_replacements (rtx in_rtx)
1729 int i, j;
1730 char reload_flags[MAX_RELOADS];
1731 int something_changed = 0;
1733 memset (reload_flags, 0, sizeof reload_flags);
1734 for (i = 0, j = 0; i < n_replacements; i++)
1736 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1737 reload_flags[replacements[i].what] |= 1;
1738 else
1740 replacements[j++] = replacements[i];
1741 reload_flags[replacements[i].what] |= 2;
1744 /* Note that the following store must be done before the recursive calls. */
1745 n_replacements = j;
1747 for (i = n_reloads - 1; i >= 0; i--)
1749 if (reload_flags[i] == 1)
1751 deallocate_reload_reg (i);
1752 remove_address_replacements (rld[i].in);
1753 rld[i].in = 0;
1754 something_changed = 1;
1757 return something_changed;
1760 /* If there is only one output reload, and it is not for an earlyclobber
1761 operand, try to combine it with a (logically unrelated) input reload
1762 to reduce the number of reload registers needed.
1764 This is safe if the input reload does not appear in
1765 the value being output-reloaded, because this implies
1766 it is not needed any more once the original insn completes.
1768 If that doesn't work, see we can use any of the registers that
1769 die in this insn as a reload register. We can if it is of the right
1770 class and does not appear in the value being output-reloaded. */
1772 static void
1773 combine_reloads (void)
1775 int i, regno;
1776 int output_reload = -1;
1777 int secondary_out = -1;
1778 rtx note;
1780 /* Find the output reload; return unless there is exactly one
1781 and that one is mandatory. */
1783 for (i = 0; i < n_reloads; i++)
1784 if (rld[i].out != 0)
1786 if (output_reload >= 0)
1787 return;
1788 output_reload = i;
1791 if (output_reload < 0 || rld[output_reload].optional)
1792 return;
1794 /* An input-output reload isn't combinable. */
1796 if (rld[output_reload].in != 0)
1797 return;
1799 /* If this reload is for an earlyclobber operand, we can't do anything. */
1800 if (earlyclobber_operand_p (rld[output_reload].out))
1801 return;
1803 /* If there is a reload for part of the address of this operand, we would
1804 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1805 its life to the point where doing this combine would not lower the
1806 number of spill registers needed. */
1807 for (i = 0; i < n_reloads; i++)
1808 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1809 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1810 && rld[i].opnum == rld[output_reload].opnum)
1811 return;
1813 /* Check each input reload; can we combine it? */
1815 for (i = 0; i < n_reloads; i++)
1816 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1817 /* Life span of this reload must not extend past main insn. */
1818 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1819 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1820 && rld[i].when_needed != RELOAD_OTHER
1821 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1822 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1823 [(int) rld[output_reload].outmode])
1824 && rld[i].inc == 0
1825 && rld[i].reg_rtx == 0
1826 #ifdef SECONDARY_MEMORY_NEEDED
1827 /* Don't combine two reloads with different secondary
1828 memory locations. */
1829 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1830 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1831 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1832 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1833 #endif
1834 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1835 ? (rld[i].rclass == rld[output_reload].rclass)
1836 : (reg_class_subset_p (rld[i].rclass,
1837 rld[output_reload].rclass)
1838 || reg_class_subset_p (rld[output_reload].rclass,
1839 rld[i].rclass)))
1840 && (MATCHES (rld[i].in, rld[output_reload].out)
1841 /* Args reversed because the first arg seems to be
1842 the one that we imagine being modified
1843 while the second is the one that might be affected. */
1844 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1845 rld[i].in)
1846 /* However, if the input is a register that appears inside
1847 the output, then we also can't share.
1848 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1849 If the same reload reg is used for both reg 69 and the
1850 result to be stored in memory, then that result
1851 will clobber the address of the memory ref. */
1852 && ! (REG_P (rld[i].in)
1853 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1854 rld[output_reload].out))))
1855 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1856 rld[i].when_needed != RELOAD_FOR_INPUT)
1857 && (reg_class_size[(int) rld[i].rclass]
1858 || targetm.small_register_classes_for_mode_p (VOIDmode))
1859 /* We will allow making things slightly worse by combining an
1860 input and an output, but no worse than that. */
1861 && (rld[i].when_needed == RELOAD_FOR_INPUT
1862 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1864 int j;
1866 /* We have found a reload to combine with! */
1867 rld[i].out = rld[output_reload].out;
1868 rld[i].out_reg = rld[output_reload].out_reg;
1869 rld[i].outmode = rld[output_reload].outmode;
1870 /* Mark the old output reload as inoperative. */
1871 rld[output_reload].out = 0;
1872 /* The combined reload is needed for the entire insn. */
1873 rld[i].when_needed = RELOAD_OTHER;
1874 /* If the output reload had a secondary reload, copy it. */
1875 if (rld[output_reload].secondary_out_reload != -1)
1877 rld[i].secondary_out_reload
1878 = rld[output_reload].secondary_out_reload;
1879 rld[i].secondary_out_icode
1880 = rld[output_reload].secondary_out_icode;
1883 #ifdef SECONDARY_MEMORY_NEEDED
1884 /* Copy any secondary MEM. */
1885 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1886 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1887 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1888 #endif
1889 /* If required, minimize the register class. */
1890 if (reg_class_subset_p (rld[output_reload].rclass,
1891 rld[i].rclass))
1892 rld[i].rclass = rld[output_reload].rclass;
1894 /* Transfer all replacements from the old reload to the combined. */
1895 for (j = 0; j < n_replacements; j++)
1896 if (replacements[j].what == output_reload)
1897 replacements[j].what = i;
1899 return;
1902 /* If this insn has only one operand that is modified or written (assumed
1903 to be the first), it must be the one corresponding to this reload. It
1904 is safe to use anything that dies in this insn for that output provided
1905 that it does not occur in the output (we already know it isn't an
1906 earlyclobber. If this is an asm insn, give up. */
1908 if (INSN_CODE (this_insn) == -1)
1909 return;
1911 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1912 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1913 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1914 return;
1916 /* See if some hard register that dies in this insn and is not used in
1917 the output is the right class. Only works if the register we pick
1918 up can fully hold our output reload. */
1919 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1920 if (REG_NOTE_KIND (note) == REG_DEAD
1921 && REG_P (XEXP (note, 0))
1922 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1923 rld[output_reload].out)
1924 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1925 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1926 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1927 regno)
1928 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1929 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1930 /* Ensure that a secondary or tertiary reload for this output
1931 won't want this register. */
1932 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1933 || (!(TEST_HARD_REG_BIT
1934 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1935 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1936 || !(TEST_HARD_REG_BIT
1937 (reg_class_contents[(int) rld[secondary_out].rclass],
1938 regno)))))
1939 && !fixed_regs[regno]
1940 /* Check that a former pseudo is valid; see find_dummy_reload. */
1941 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1942 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1943 ORIGINAL_REGNO (XEXP (note, 0)))
1944 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1946 rld[output_reload].reg_rtx
1947 = gen_rtx_REG (rld[output_reload].outmode, regno);
1948 return;
1952 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1953 See if one of IN and OUT is a register that may be used;
1954 this is desirable since a spill-register won't be needed.
1955 If so, return the register rtx that proves acceptable.
1957 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1958 RCLASS is the register class required for the reload.
1960 If FOR_REAL is >= 0, it is the number of the reload,
1961 and in some cases when it can be discovered that OUT doesn't need
1962 to be computed, clear out rld[FOR_REAL].out.
1964 If FOR_REAL is -1, this should not be done, because this call
1965 is just to see if a register can be found, not to find and install it.
1967 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1968 puts an additional constraint on being able to use IN for OUT since
1969 IN must not appear elsewhere in the insn (it is assumed that IN itself
1970 is safe from the earlyclobber). */
1972 static rtx
1973 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1974 enum machine_mode inmode, enum machine_mode outmode,
1975 reg_class_t rclass, int for_real, int earlyclobber)
1977 rtx in = real_in;
1978 rtx out = real_out;
1979 int in_offset = 0;
1980 int out_offset = 0;
1981 rtx value = 0;
1983 /* If operands exceed a word, we can't use either of them
1984 unless they have the same size. */
1985 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1986 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1987 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1988 return 0;
1990 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1991 respectively refers to a hard register. */
1993 /* Find the inside of any subregs. */
1994 while (GET_CODE (out) == SUBREG)
1996 if (REG_P (SUBREG_REG (out))
1997 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1998 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1999 GET_MODE (SUBREG_REG (out)),
2000 SUBREG_BYTE (out),
2001 GET_MODE (out));
2002 out = SUBREG_REG (out);
2004 while (GET_CODE (in) == SUBREG)
2006 if (REG_P (SUBREG_REG (in))
2007 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
2008 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
2009 GET_MODE (SUBREG_REG (in)),
2010 SUBREG_BYTE (in),
2011 GET_MODE (in));
2012 in = SUBREG_REG (in);
2015 /* Narrow down the reg class, the same way push_reload will;
2016 otherwise we might find a dummy now, but push_reload won't. */
2018 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
2019 if (preferred_class != NO_REGS)
2020 rclass = (enum reg_class) preferred_class;
2023 /* See if OUT will do. */
2024 if (REG_P (out)
2025 && REGNO (out) < FIRST_PSEUDO_REGISTER)
2027 unsigned int regno = REGNO (out) + out_offset;
2028 unsigned int nwords = hard_regno_nregs[regno][outmode];
2029 rtx saved_rtx;
2031 /* When we consider whether the insn uses OUT,
2032 ignore references within IN. They don't prevent us
2033 from copying IN into OUT, because those refs would
2034 move into the insn that reloads IN.
2036 However, we only ignore IN in its role as this reload.
2037 If the insn uses IN elsewhere and it contains OUT,
2038 that counts. We can't be sure it's the "same" operand
2039 so it might not go through this reload.
2041 We also need to avoid using OUT if it, or part of it, is a
2042 fixed register. Modifying such registers, even transiently,
2043 may have undefined effects on the machine, such as modifying
2044 the stack pointer. */
2045 saved_rtx = *inloc;
2046 *inloc = const0_rtx;
2048 if (regno < FIRST_PSEUDO_REGISTER
2049 && HARD_REGNO_MODE_OK (regno, outmode)
2050 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2051 PATTERN (this_insn), outloc))
2053 unsigned int i;
2055 for (i = 0; i < nwords; i++)
2056 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2057 regno + i)
2058 || fixed_regs[regno + i])
2059 break;
2061 if (i == nwords)
2063 if (REG_P (real_out))
2064 value = real_out;
2065 else
2066 value = gen_rtx_REG (outmode, regno);
2070 *inloc = saved_rtx;
2073 /* Consider using IN if OUT was not acceptable
2074 or if OUT dies in this insn (like the quotient in a divmod insn).
2075 We can't use IN unless it is dies in this insn,
2076 which means we must know accurately which hard regs are live.
2077 Also, the result can't go in IN if IN is used within OUT,
2078 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2079 if (hard_regs_live_known
2080 && REG_P (in)
2081 && REGNO (in) < FIRST_PSEUDO_REGISTER
2082 && (value == 0
2083 || find_reg_note (this_insn, REG_UNUSED, real_out))
2084 && find_reg_note (this_insn, REG_DEAD, real_in)
2085 && !fixed_regs[REGNO (in)]
2086 && HARD_REGNO_MODE_OK (REGNO (in),
2087 /* The only case where out and real_out might
2088 have different modes is where real_out
2089 is a subreg, and in that case, out
2090 has a real mode. */
2091 (GET_MODE (out) != VOIDmode
2092 ? GET_MODE (out) : outmode))
2093 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2094 /* However only do this if we can be sure that this input
2095 operand doesn't correspond with an uninitialized pseudo.
2096 global can assign some hardreg to it that is the same as
2097 the one assigned to a different, also live pseudo (as it
2098 can ignore the conflict). We must never introduce writes
2099 to such hardregs, as they would clobber the other live
2100 pseudo. See PR 20973. */
2101 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
2102 ORIGINAL_REGNO (in))
2103 /* Similarly, only do this if we can be sure that the death
2104 note is still valid. global can assign some hardreg to
2105 the pseudo referenced in the note and simultaneously a
2106 subword of this hardreg to a different, also live pseudo,
2107 because only another subword of the hardreg is actually
2108 used in the insn. This cannot happen if the pseudo has
2109 been assigned exactly one hardreg. See PR 33732. */
2110 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2112 unsigned int regno = REGNO (in) + in_offset;
2113 unsigned int nwords = hard_regno_nregs[regno][inmode];
2115 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2116 && ! hard_reg_set_here_p (regno, regno + nwords,
2117 PATTERN (this_insn))
2118 && (! earlyclobber
2119 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2120 PATTERN (this_insn), inloc)))
2122 unsigned int i;
2124 for (i = 0; i < nwords; i++)
2125 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2126 regno + i))
2127 break;
2129 if (i == nwords)
2131 /* If we were going to use OUT as the reload reg
2132 and changed our mind, it means OUT is a dummy that
2133 dies here. So don't bother copying value to it. */
2134 if (for_real >= 0 && value == real_out)
2135 rld[for_real].out = 0;
2136 if (REG_P (real_in))
2137 value = real_in;
2138 else
2139 value = gen_rtx_REG (inmode, regno);
2144 return value;
2147 /* This page contains subroutines used mainly for determining
2148 whether the IN or an OUT of a reload can serve as the
2149 reload register. */
2151 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2154 earlyclobber_operand_p (rtx x)
2156 int i;
2158 for (i = 0; i < n_earlyclobbers; i++)
2159 if (reload_earlyclobbers[i] == x)
2160 return 1;
2162 return 0;
2165 /* Return 1 if expression X alters a hard reg in the range
2166 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2167 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2168 X should be the body of an instruction. */
2170 static int
2171 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2173 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2175 rtx op0 = SET_DEST (x);
2177 while (GET_CODE (op0) == SUBREG)
2178 op0 = SUBREG_REG (op0);
2179 if (REG_P (op0))
2181 unsigned int r = REGNO (op0);
2183 /* See if this reg overlaps range under consideration. */
2184 if (r < end_regno
2185 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2186 return 1;
2189 else if (GET_CODE (x) == PARALLEL)
2191 int i = XVECLEN (x, 0) - 1;
2193 for (; i >= 0; i--)
2194 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2195 return 1;
2198 return 0;
2201 /* Return 1 if ADDR is a valid memory address for mode MODE
2202 in address space AS, and check that each pseudo reg has the
2203 proper kind of hard reg. */
2206 strict_memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2207 rtx addr, addr_space_t as)
2209 #ifdef GO_IF_LEGITIMATE_ADDRESS
2210 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2211 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2212 return 0;
2214 win:
2215 return 1;
2216 #else
2217 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2218 #endif
2221 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2222 if they are the same hard reg, and has special hacks for
2223 autoincrement and autodecrement.
2224 This is specifically intended for find_reloads to use
2225 in determining whether two operands match.
2226 X is the operand whose number is the lower of the two.
2228 The value is 2 if Y contains a pre-increment that matches
2229 a non-incrementing address in X. */
2231 /* ??? To be completely correct, we should arrange to pass
2232 for X the output operand and for Y the input operand.
2233 For now, we assume that the output operand has the lower number
2234 because that is natural in (SET output (... input ...)). */
2237 operands_match_p (rtx x, rtx y)
2239 int i;
2240 RTX_CODE code = GET_CODE (x);
2241 const char *fmt;
2242 int success_2;
2244 if (x == y)
2245 return 1;
2246 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2247 && (REG_P (y) || (GET_CODE (y) == SUBREG
2248 && REG_P (SUBREG_REG (y)))))
2250 int j;
2252 if (code == SUBREG)
2254 i = REGNO (SUBREG_REG (x));
2255 if (i >= FIRST_PSEUDO_REGISTER)
2256 goto slow;
2257 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2258 GET_MODE (SUBREG_REG (x)),
2259 SUBREG_BYTE (x),
2260 GET_MODE (x));
2262 else
2263 i = REGNO (x);
2265 if (GET_CODE (y) == SUBREG)
2267 j = REGNO (SUBREG_REG (y));
2268 if (j >= FIRST_PSEUDO_REGISTER)
2269 goto slow;
2270 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2271 GET_MODE (SUBREG_REG (y)),
2272 SUBREG_BYTE (y),
2273 GET_MODE (y));
2275 else
2276 j = REGNO (y);
2278 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2279 multiple hard register group of scalar integer registers, so that
2280 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2281 register. */
2282 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2283 && SCALAR_INT_MODE_P (GET_MODE (x))
2284 && i < FIRST_PSEUDO_REGISTER)
2285 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2286 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2287 && SCALAR_INT_MODE_P (GET_MODE (y))
2288 && j < FIRST_PSEUDO_REGISTER)
2289 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2291 return i == j;
2293 /* If two operands must match, because they are really a single
2294 operand of an assembler insn, then two postincrements are invalid
2295 because the assembler insn would increment only once.
2296 On the other hand, a postincrement matches ordinary indexing
2297 if the postincrement is the output operand. */
2298 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2299 return operands_match_p (XEXP (x, 0), y);
2300 /* Two preincrements are invalid
2301 because the assembler insn would increment only once.
2302 On the other hand, a preincrement matches ordinary indexing
2303 if the preincrement is the input operand.
2304 In this case, return 2, since some callers need to do special
2305 things when this happens. */
2306 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2307 || GET_CODE (y) == PRE_MODIFY)
2308 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2310 slow:
2312 /* Now we have disposed of all the cases in which different rtx codes
2313 can match. */
2314 if (code != GET_CODE (y))
2315 return 0;
2317 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2318 if (GET_MODE (x) != GET_MODE (y))
2319 return 0;
2321 /* MEMs referring to different address space are not equivalent. */
2322 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2323 return 0;
2325 switch (code)
2327 CASE_CONST_UNIQUE:
2328 return 0;
2330 case LABEL_REF:
2331 return XEXP (x, 0) == XEXP (y, 0);
2332 case SYMBOL_REF:
2333 return XSTR (x, 0) == XSTR (y, 0);
2335 default:
2336 break;
2339 /* Compare the elements. If any pair of corresponding elements
2340 fail to match, return 0 for the whole things. */
2342 success_2 = 0;
2343 fmt = GET_RTX_FORMAT (code);
2344 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2346 int val, j;
2347 switch (fmt[i])
2349 case 'w':
2350 if (XWINT (x, i) != XWINT (y, i))
2351 return 0;
2352 break;
2354 case 'i':
2355 if (XINT (x, i) != XINT (y, i))
2356 return 0;
2357 break;
2359 case 'e':
2360 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2361 if (val == 0)
2362 return 0;
2363 /* If any subexpression returns 2,
2364 we should return 2 if we are successful. */
2365 if (val == 2)
2366 success_2 = 1;
2367 break;
2369 case '0':
2370 break;
2372 case 'E':
2373 if (XVECLEN (x, i) != XVECLEN (y, i))
2374 return 0;
2375 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2377 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2378 if (val == 0)
2379 return 0;
2380 if (val == 2)
2381 success_2 = 1;
2383 break;
2385 /* It is believed that rtx's at this level will never
2386 contain anything but integers and other rtx's,
2387 except for within LABEL_REFs and SYMBOL_REFs. */
2388 default:
2389 gcc_unreachable ();
2392 return 1 + success_2;
2395 /* Describe the range of registers or memory referenced by X.
2396 If X is a register, set REG_FLAG and put the first register
2397 number into START and the last plus one into END.
2398 If X is a memory reference, put a base address into BASE
2399 and a range of integer offsets into START and END.
2400 If X is pushing on the stack, we can assume it causes no trouble,
2401 so we set the SAFE field. */
2403 static struct decomposition
2404 decompose (rtx x)
2406 struct decomposition val;
2407 int all_const = 0;
2409 memset (&val, 0, sizeof (val));
2411 switch (GET_CODE (x))
2413 case MEM:
2415 rtx base = NULL_RTX, offset = 0;
2416 rtx addr = XEXP (x, 0);
2418 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2419 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2421 val.base = XEXP (addr, 0);
2422 val.start = -GET_MODE_SIZE (GET_MODE (x));
2423 val.end = GET_MODE_SIZE (GET_MODE (x));
2424 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2425 return val;
2428 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2430 if (GET_CODE (XEXP (addr, 1)) == PLUS
2431 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2432 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2434 val.base = XEXP (addr, 0);
2435 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2436 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2437 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2438 return val;
2442 if (GET_CODE (addr) == CONST)
2444 addr = XEXP (addr, 0);
2445 all_const = 1;
2447 if (GET_CODE (addr) == PLUS)
2449 if (CONSTANT_P (XEXP (addr, 0)))
2451 base = XEXP (addr, 1);
2452 offset = XEXP (addr, 0);
2454 else if (CONSTANT_P (XEXP (addr, 1)))
2456 base = XEXP (addr, 0);
2457 offset = XEXP (addr, 1);
2461 if (offset == 0)
2463 base = addr;
2464 offset = const0_rtx;
2466 if (GET_CODE (offset) == CONST)
2467 offset = XEXP (offset, 0);
2468 if (GET_CODE (offset) == PLUS)
2470 if (CONST_INT_P (XEXP (offset, 0)))
2472 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2473 offset = XEXP (offset, 0);
2475 else if (CONST_INT_P (XEXP (offset, 1)))
2477 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2478 offset = XEXP (offset, 1);
2480 else
2482 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2483 offset = const0_rtx;
2486 else if (!CONST_INT_P (offset))
2488 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2489 offset = const0_rtx;
2492 if (all_const && GET_CODE (base) == PLUS)
2493 base = gen_rtx_CONST (GET_MODE (base), base);
2495 gcc_assert (CONST_INT_P (offset));
2497 val.start = INTVAL (offset);
2498 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2499 val.base = base;
2501 break;
2503 case REG:
2504 val.reg_flag = 1;
2505 val.start = true_regnum (x);
2506 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2508 /* A pseudo with no hard reg. */
2509 val.start = REGNO (x);
2510 val.end = val.start + 1;
2512 else
2513 /* A hard reg. */
2514 val.end = end_hard_regno (GET_MODE (x), val.start);
2515 break;
2517 case SUBREG:
2518 if (!REG_P (SUBREG_REG (x)))
2519 /* This could be more precise, but it's good enough. */
2520 return decompose (SUBREG_REG (x));
2521 val.reg_flag = 1;
2522 val.start = true_regnum (x);
2523 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2524 return decompose (SUBREG_REG (x));
2525 else
2526 /* A hard reg. */
2527 val.end = val.start + subreg_nregs (x);
2528 break;
2530 case SCRATCH:
2531 /* This hasn't been assigned yet, so it can't conflict yet. */
2532 val.safe = 1;
2533 break;
2535 default:
2536 gcc_assert (CONSTANT_P (x));
2537 val.safe = 1;
2538 break;
2540 return val;
2543 /* Return 1 if altering Y will not modify the value of X.
2544 Y is also described by YDATA, which should be decompose (Y). */
2546 static int
2547 immune_p (rtx x, rtx y, struct decomposition ydata)
2549 struct decomposition xdata;
2551 if (ydata.reg_flag)
2552 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2553 if (ydata.safe)
2554 return 1;
2556 gcc_assert (MEM_P (y));
2557 /* If Y is memory and X is not, Y can't affect X. */
2558 if (!MEM_P (x))
2559 return 1;
2561 xdata = decompose (x);
2563 if (! rtx_equal_p (xdata.base, ydata.base))
2565 /* If bases are distinct symbolic constants, there is no overlap. */
2566 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2567 return 1;
2568 /* Constants and stack slots never overlap. */
2569 if (CONSTANT_P (xdata.base)
2570 && (ydata.base == frame_pointer_rtx
2571 || ydata.base == hard_frame_pointer_rtx
2572 || ydata.base == stack_pointer_rtx))
2573 return 1;
2574 if (CONSTANT_P (ydata.base)
2575 && (xdata.base == frame_pointer_rtx
2576 || xdata.base == hard_frame_pointer_rtx
2577 || xdata.base == stack_pointer_rtx))
2578 return 1;
2579 /* If either base is variable, we don't know anything. */
2580 return 0;
2583 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2586 /* Similar, but calls decompose. */
2589 safe_from_earlyclobber (rtx op, rtx clobber)
2591 struct decomposition early_data;
2593 early_data = decompose (clobber);
2594 return immune_p (op, clobber, early_data);
2597 /* Main entry point of this file: search the body of INSN
2598 for values that need reloading and record them with push_reload.
2599 REPLACE nonzero means record also where the values occur
2600 so that subst_reloads can be used.
2602 IND_LEVELS says how many levels of indirection are supported by this
2603 machine; a value of zero means that a memory reference is not a valid
2604 memory address.
2606 LIVE_KNOWN says we have valid information about which hard
2607 regs are live at each point in the program; this is true when
2608 we are called from global_alloc but false when stupid register
2609 allocation has been done.
2611 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2612 which is nonnegative if the reg has been commandeered for reloading into.
2613 It is copied into STATIC_RELOAD_REG_P and referenced from there
2614 by various subroutines.
2616 Return TRUE if some operands need to be changed, because of swapping
2617 commutative operands, reg_equiv_address substitution, or whatever. */
2620 find_reloads (rtx insn, int replace, int ind_levels, int live_known,
2621 short *reload_reg_p)
2623 int insn_code_number;
2624 int i, j;
2625 int noperands;
2626 /* These start out as the constraints for the insn
2627 and they are chewed up as we consider alternatives. */
2628 const char *constraints[MAX_RECOG_OPERANDS];
2629 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2630 a register. */
2631 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2632 char pref_or_nothing[MAX_RECOG_OPERANDS];
2633 /* Nonzero for a MEM operand whose entire address needs a reload.
2634 May be -1 to indicate the entire address may or may not need a reload. */
2635 int address_reloaded[MAX_RECOG_OPERANDS];
2636 /* Nonzero for an address operand that needs to be completely reloaded.
2637 May be -1 to indicate the entire operand may or may not need a reload. */
2638 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2639 /* Value of enum reload_type to use for operand. */
2640 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2641 /* Value of enum reload_type to use within address of operand. */
2642 enum reload_type address_type[MAX_RECOG_OPERANDS];
2643 /* Save the usage of each operand. */
2644 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2645 int no_input_reloads = 0, no_output_reloads = 0;
2646 int n_alternatives;
2647 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2648 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2649 char this_alternative_win[MAX_RECOG_OPERANDS];
2650 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2651 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2652 int this_alternative_matches[MAX_RECOG_OPERANDS];
2653 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2654 int this_alternative_number;
2655 int goal_alternative_number = 0;
2656 int operand_reloadnum[MAX_RECOG_OPERANDS];
2657 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2658 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2659 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2660 char goal_alternative_win[MAX_RECOG_OPERANDS];
2661 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2662 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2663 int goal_alternative_swapped;
2664 int best;
2665 int commutative;
2666 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2667 rtx substed_operand[MAX_RECOG_OPERANDS];
2668 rtx body = PATTERN (insn);
2669 rtx set = single_set (insn);
2670 int goal_earlyclobber = 0, this_earlyclobber;
2671 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
2672 int retval = 0;
2674 this_insn = insn;
2675 n_reloads = 0;
2676 n_replacements = 0;
2677 n_earlyclobbers = 0;
2678 replace_reloads = replace;
2679 hard_regs_live_known = live_known;
2680 static_reload_reg_p = reload_reg_p;
2682 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2683 neither are insns that SET cc0. Insns that use CC0 are not allowed
2684 to have any input reloads. */
2685 if (JUMP_P (insn) || CALL_P (insn))
2686 no_output_reloads = 1;
2688 #ifdef HAVE_cc0
2689 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2690 no_input_reloads = 1;
2691 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2692 no_output_reloads = 1;
2693 #endif
2695 #ifdef SECONDARY_MEMORY_NEEDED
2696 /* The eliminated forms of any secondary memory locations are per-insn, so
2697 clear them out here. */
2699 if (secondary_memlocs_elim_used)
2701 memset (secondary_memlocs_elim, 0,
2702 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2703 secondary_memlocs_elim_used = 0;
2705 #endif
2707 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2708 is cheap to move between them. If it is not, there may not be an insn
2709 to do the copy, so we may need a reload. */
2710 if (GET_CODE (body) == SET
2711 && REG_P (SET_DEST (body))
2712 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2713 && REG_P (SET_SRC (body))
2714 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2715 && register_move_cost (GET_MODE (SET_SRC (body)),
2716 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2717 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2718 return 0;
2720 extract_insn (insn);
2722 noperands = reload_n_operands = recog_data.n_operands;
2723 n_alternatives = recog_data.n_alternatives;
2725 /* Just return "no reloads" if insn has no operands with constraints. */
2726 if (noperands == 0 || n_alternatives == 0)
2727 return 0;
2729 insn_code_number = INSN_CODE (insn);
2730 this_insn_is_asm = insn_code_number < 0;
2732 memcpy (operand_mode, recog_data.operand_mode,
2733 noperands * sizeof (enum machine_mode));
2734 memcpy (constraints, recog_data.constraints,
2735 noperands * sizeof (const char *));
2737 commutative = -1;
2739 /* If we will need to know, later, whether some pair of operands
2740 are the same, we must compare them now and save the result.
2741 Reloading the base and index registers will clobber them
2742 and afterward they will fail to match. */
2744 for (i = 0; i < noperands; i++)
2746 const char *p;
2747 int c;
2748 char *end;
2750 substed_operand[i] = recog_data.operand[i];
2751 p = constraints[i];
2753 modified[i] = RELOAD_READ;
2755 /* Scan this operand's constraint to see if it is an output operand,
2756 an in-out operand, is commutative, or should match another. */
2758 while ((c = *p))
2760 p += CONSTRAINT_LEN (c, p);
2761 switch (c)
2763 case '=':
2764 modified[i] = RELOAD_WRITE;
2765 break;
2766 case '+':
2767 modified[i] = RELOAD_READ_WRITE;
2768 break;
2769 case '%':
2771 /* The last operand should not be marked commutative. */
2772 gcc_assert (i != noperands - 1);
2774 /* We currently only support one commutative pair of
2775 operands. Some existing asm code currently uses more
2776 than one pair. Previously, that would usually work,
2777 but sometimes it would crash the compiler. We
2778 continue supporting that case as well as we can by
2779 silently ignoring all but the first pair. In the
2780 future we may handle it correctly. */
2781 if (commutative < 0)
2782 commutative = i;
2783 else
2784 gcc_assert (this_insn_is_asm);
2786 break;
2787 /* Use of ISDIGIT is tempting here, but it may get expensive because
2788 of locale support we don't want. */
2789 case '0': case '1': case '2': case '3': case '4':
2790 case '5': case '6': case '7': case '8': case '9':
2792 c = strtoul (p - 1, &end, 10);
2793 p = end;
2795 operands_match[c][i]
2796 = operands_match_p (recog_data.operand[c],
2797 recog_data.operand[i]);
2799 /* An operand may not match itself. */
2800 gcc_assert (c != i);
2802 /* If C can be commuted with C+1, and C might need to match I,
2803 then C+1 might also need to match I. */
2804 if (commutative >= 0)
2806 if (c == commutative || c == commutative + 1)
2808 int other = c + (c == commutative ? 1 : -1);
2809 operands_match[other][i]
2810 = operands_match_p (recog_data.operand[other],
2811 recog_data.operand[i]);
2813 if (i == commutative || i == commutative + 1)
2815 int other = i + (i == commutative ? 1 : -1);
2816 operands_match[c][other]
2817 = operands_match_p (recog_data.operand[c],
2818 recog_data.operand[other]);
2820 /* Note that C is supposed to be less than I.
2821 No need to consider altering both C and I because in
2822 that case we would alter one into the other. */
2829 /* Examine each operand that is a memory reference or memory address
2830 and reload parts of the addresses into index registers.
2831 Also here any references to pseudo regs that didn't get hard regs
2832 but are equivalent to constants get replaced in the insn itself
2833 with those constants. Nobody will ever see them again.
2835 Finally, set up the preferred classes of each operand. */
2837 for (i = 0; i < noperands; i++)
2839 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2841 address_reloaded[i] = 0;
2842 address_operand_reloaded[i] = 0;
2843 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2844 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2845 : RELOAD_OTHER);
2846 address_type[i]
2847 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2848 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2849 : RELOAD_OTHER);
2851 if (*constraints[i] == 0)
2852 /* Ignore things like match_operator operands. */
2854 else if (constraints[i][0] == 'p'
2855 || EXTRA_ADDRESS_CONSTRAINT (constraints[i][0], constraints[i]))
2857 address_operand_reloaded[i]
2858 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2859 recog_data.operand[i],
2860 recog_data.operand_loc[i],
2861 i, operand_type[i], ind_levels, insn);
2863 /* If we now have a simple operand where we used to have a
2864 PLUS or MULT, re-recognize and try again. */
2865 if ((OBJECT_P (*recog_data.operand_loc[i])
2866 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2867 && (GET_CODE (recog_data.operand[i]) == MULT
2868 || GET_CODE (recog_data.operand[i]) == PLUS))
2870 INSN_CODE (insn) = -1;
2871 retval = find_reloads (insn, replace, ind_levels, live_known,
2872 reload_reg_p);
2873 return retval;
2876 recog_data.operand[i] = *recog_data.operand_loc[i];
2877 substed_operand[i] = recog_data.operand[i];
2879 /* Address operands are reloaded in their existing mode,
2880 no matter what is specified in the machine description. */
2881 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2883 /* If the address is a single CONST_INT pick address mode
2884 instead otherwise we will later not know in which mode
2885 the reload should be performed. */
2886 if (operand_mode[i] == VOIDmode)
2887 operand_mode[i] = Pmode;
2890 else if (code == MEM)
2892 address_reloaded[i]
2893 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2894 recog_data.operand_loc[i],
2895 XEXP (recog_data.operand[i], 0),
2896 &XEXP (recog_data.operand[i], 0),
2897 i, address_type[i], ind_levels, insn);
2898 recog_data.operand[i] = *recog_data.operand_loc[i];
2899 substed_operand[i] = recog_data.operand[i];
2901 else if (code == SUBREG)
2903 rtx reg = SUBREG_REG (recog_data.operand[i]);
2904 rtx op
2905 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2906 ind_levels,
2907 set != 0
2908 && &SET_DEST (set) == recog_data.operand_loc[i],
2909 insn,
2910 &address_reloaded[i]);
2912 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2913 that didn't get a hard register, emit a USE with a REG_EQUAL
2914 note in front so that we might inherit a previous, possibly
2915 wider reload. */
2917 if (replace
2918 && MEM_P (op)
2919 && REG_P (reg)
2920 && (GET_MODE_SIZE (GET_MODE (reg))
2921 >= GET_MODE_SIZE (GET_MODE (op)))
2922 && reg_equiv_constant (REGNO (reg)) == 0)
2923 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2924 insn),
2925 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2927 substed_operand[i] = recog_data.operand[i] = op;
2929 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2930 /* We can get a PLUS as an "operand" as a result of register
2931 elimination. See eliminate_regs and gen_reload. We handle
2932 a unary operator by reloading the operand. */
2933 substed_operand[i] = recog_data.operand[i]
2934 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2935 ind_levels, 0, insn,
2936 &address_reloaded[i]);
2937 else if (code == REG)
2939 /* This is equivalent to calling find_reloads_toplev.
2940 The code is duplicated for speed.
2941 When we find a pseudo always equivalent to a constant,
2942 we replace it by the constant. We must be sure, however,
2943 that we don't try to replace it in the insn in which it
2944 is being set. */
2945 int regno = REGNO (recog_data.operand[i]);
2946 if (reg_equiv_constant (regno) != 0
2947 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2949 /* Record the existing mode so that the check if constants are
2950 allowed will work when operand_mode isn't specified. */
2952 if (operand_mode[i] == VOIDmode)
2953 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2955 substed_operand[i] = recog_data.operand[i]
2956 = reg_equiv_constant (regno);
2958 if (reg_equiv_memory_loc (regno) != 0
2959 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2960 /* We need not give a valid is_set_dest argument since the case
2961 of a constant equivalence was checked above. */
2962 substed_operand[i] = recog_data.operand[i]
2963 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2964 ind_levels, 0, insn,
2965 &address_reloaded[i]);
2967 /* If the operand is still a register (we didn't replace it with an
2968 equivalent), get the preferred class to reload it into. */
2969 code = GET_CODE (recog_data.operand[i]);
2970 preferred_class[i]
2971 = ((code == REG && REGNO (recog_data.operand[i])
2972 >= FIRST_PSEUDO_REGISTER)
2973 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2974 : NO_REGS);
2975 pref_or_nothing[i]
2976 = (code == REG
2977 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2978 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2981 /* If this is simply a copy from operand 1 to operand 0, merge the
2982 preferred classes for the operands. */
2983 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2984 && recog_data.operand[1] == SET_SRC (set))
2986 preferred_class[0] = preferred_class[1]
2987 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2988 pref_or_nothing[0] |= pref_or_nothing[1];
2989 pref_or_nothing[1] |= pref_or_nothing[0];
2992 /* Now see what we need for pseudo-regs that didn't get hard regs
2993 or got the wrong kind of hard reg. For this, we must consider
2994 all the operands together against the register constraints. */
2996 best = MAX_RECOG_OPERANDS * 2 + 600;
2998 goal_alternative_swapped = 0;
3000 /* The constraints are made of several alternatives.
3001 Each operand's constraint looks like foo,bar,... with commas
3002 separating the alternatives. The first alternatives for all
3003 operands go together, the second alternatives go together, etc.
3005 First loop over alternatives. */
3007 for (this_alternative_number = 0;
3008 this_alternative_number < n_alternatives;
3009 this_alternative_number++)
3011 int swapped;
3013 if (!recog_data.alternative_enabled_p[this_alternative_number])
3015 int i;
3017 for (i = 0; i < recog_data.n_operands; i++)
3018 constraints[i] = skip_alternative (constraints[i]);
3020 continue;
3023 /* If insn is commutative (it's safe to exchange a certain pair
3024 of operands) then we need to try each alternative twice, the
3025 second time matching those two operands as if we had
3026 exchanged them. To do this, really exchange them in
3027 operands. */
3028 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3030 /* Loop over operands for one constraint alternative. */
3031 /* LOSERS counts those that don't fit this alternative
3032 and would require loading. */
3033 int losers = 0;
3034 /* BAD is set to 1 if it some operand can't fit this alternative
3035 even after reloading. */
3036 int bad = 0;
3037 /* REJECT is a count of how undesirable this alternative says it is
3038 if any reloading is required. If the alternative matches exactly
3039 then REJECT is ignored, but otherwise it gets this much
3040 counted against it in addition to the reloading needed. Each
3041 ? counts three times here since we want the disparaging caused by
3042 a bad register class to only count 1/3 as much. */
3043 int reject = 0;
3045 if (swapped)
3047 enum reg_class tclass;
3048 int t;
3050 recog_data.operand[commutative] = substed_operand[commutative + 1];
3051 recog_data.operand[commutative + 1] = substed_operand[commutative];
3052 /* Swap the duplicates too. */
3053 for (i = 0; i < recog_data.n_dups; i++)
3054 if (recog_data.dup_num[i] == commutative
3055 || recog_data.dup_num[i] == commutative + 1)
3056 *recog_data.dup_loc[i]
3057 = recog_data.operand[(int) recog_data.dup_num[i]];
3059 tclass = preferred_class[commutative];
3060 preferred_class[commutative] = preferred_class[commutative + 1];
3061 preferred_class[commutative + 1] = tclass;
3063 t = pref_or_nothing[commutative];
3064 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3065 pref_or_nothing[commutative + 1] = t;
3067 t = address_reloaded[commutative];
3068 address_reloaded[commutative] = address_reloaded[commutative + 1];
3069 address_reloaded[commutative + 1] = t;
3072 this_earlyclobber = 0;
3074 for (i = 0; i < noperands; i++)
3076 const char *p = constraints[i];
3077 char *end;
3078 int len;
3079 int win = 0;
3080 int did_match = 0;
3081 /* 0 => this operand can be reloaded somehow for this alternative. */
3082 int badop = 1;
3083 /* 0 => this operand can be reloaded if the alternative allows regs. */
3084 int winreg = 0;
3085 int c;
3086 int m;
3087 rtx operand = recog_data.operand[i];
3088 int offset = 0;
3089 /* Nonzero means this is a MEM that must be reloaded into a reg
3090 regardless of what the constraint says. */
3091 int force_reload = 0;
3092 int offmemok = 0;
3093 /* Nonzero if a constant forced into memory would be OK for this
3094 operand. */
3095 int constmemok = 0;
3096 int earlyclobber = 0;
3098 /* If the predicate accepts a unary operator, it means that
3099 we need to reload the operand, but do not do this for
3100 match_operator and friends. */
3101 if (UNARY_P (operand) && *p != 0)
3102 operand = XEXP (operand, 0);
3104 /* If the operand is a SUBREG, extract
3105 the REG or MEM (or maybe even a constant) within.
3106 (Constants can occur as a result of reg_equiv_constant.) */
3108 while (GET_CODE (operand) == SUBREG)
3110 /* Offset only matters when operand is a REG and
3111 it is a hard reg. This is because it is passed
3112 to reg_fits_class_p if it is a REG and all pseudos
3113 return 0 from that function. */
3114 if (REG_P (SUBREG_REG (operand))
3115 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3117 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3118 GET_MODE (SUBREG_REG (operand)),
3119 SUBREG_BYTE (operand),
3120 GET_MODE (operand)) < 0)
3121 force_reload = 1;
3122 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3123 GET_MODE (SUBREG_REG (operand)),
3124 SUBREG_BYTE (operand),
3125 GET_MODE (operand));
3127 operand = SUBREG_REG (operand);
3128 /* Force reload if this is a constant or PLUS or if there may
3129 be a problem accessing OPERAND in the outer mode. */
3130 if (CONSTANT_P (operand)
3131 || GET_CODE (operand) == PLUS
3132 /* We must force a reload of paradoxical SUBREGs
3133 of a MEM because the alignment of the inner value
3134 may not be enough to do the outer reference. On
3135 big-endian machines, it may also reference outside
3136 the object.
3138 On machines that extend byte operations and we have a
3139 SUBREG where both the inner and outer modes are no wider
3140 than a word and the inner mode is narrower, is integral,
3141 and gets extended when loaded from memory, combine.c has
3142 made assumptions about the behavior of the machine in such
3143 register access. If the data is, in fact, in memory we
3144 must always load using the size assumed to be in the
3145 register and let the insn do the different-sized
3146 accesses.
3148 This is doubly true if WORD_REGISTER_OPERATIONS. In
3149 this case eliminate_regs has left non-paradoxical
3150 subregs for push_reload to see. Make sure it does
3151 by forcing the reload.
3153 ??? When is it right at this stage to have a subreg
3154 of a mem that is _not_ to be handled specially? IMO
3155 those should have been reduced to just a mem. */
3156 || ((MEM_P (operand)
3157 || (REG_P (operand)
3158 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3159 #ifndef WORD_REGISTER_OPERATIONS
3160 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3161 < BIGGEST_ALIGNMENT)
3162 && (GET_MODE_SIZE (operand_mode[i])
3163 > GET_MODE_SIZE (GET_MODE (operand))))
3164 || BYTES_BIG_ENDIAN
3165 #ifdef LOAD_EXTEND_OP
3166 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3167 && (GET_MODE_SIZE (GET_MODE (operand))
3168 <= UNITS_PER_WORD)
3169 && (GET_MODE_SIZE (operand_mode[i])
3170 > GET_MODE_SIZE (GET_MODE (operand)))
3171 && INTEGRAL_MODE_P (GET_MODE (operand))
3172 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3173 #endif
3175 #endif
3178 force_reload = 1;
3181 this_alternative[i] = NO_REGS;
3182 this_alternative_win[i] = 0;
3183 this_alternative_match_win[i] = 0;
3184 this_alternative_offmemok[i] = 0;
3185 this_alternative_earlyclobber[i] = 0;
3186 this_alternative_matches[i] = -1;
3188 /* An empty constraint or empty alternative
3189 allows anything which matched the pattern. */
3190 if (*p == 0 || *p == ',')
3191 win = 1, badop = 0;
3193 /* Scan this alternative's specs for this operand;
3194 set WIN if the operand fits any letter in this alternative.
3195 Otherwise, clear BADOP if this operand could
3196 fit some letter after reloads,
3197 or set WINREG if this operand could fit after reloads
3198 provided the constraint allows some registers. */
3201 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3203 case '\0':
3204 len = 0;
3205 break;
3206 case ',':
3207 c = '\0';
3208 break;
3210 case '=': case '+': case '*':
3211 break;
3213 case '%':
3214 /* We only support one commutative marker, the first
3215 one. We already set commutative above. */
3216 break;
3218 case '?':
3219 reject += 6;
3220 break;
3222 case '!':
3223 reject = 600;
3224 break;
3226 case '#':
3227 /* Ignore rest of this alternative as far as
3228 reloading is concerned. */
3230 p++;
3231 while (*p && *p != ',');
3232 len = 0;
3233 break;
3235 case '0': case '1': case '2': case '3': case '4':
3236 case '5': case '6': case '7': case '8': case '9':
3237 m = strtoul (p, &end, 10);
3238 p = end;
3239 len = 0;
3241 this_alternative_matches[i] = m;
3242 /* We are supposed to match a previous operand.
3243 If we do, we win if that one did.
3244 If we do not, count both of the operands as losers.
3245 (This is too conservative, since most of the time
3246 only a single reload insn will be needed to make
3247 the two operands win. As a result, this alternative
3248 may be rejected when it is actually desirable.) */
3249 if ((swapped && (m != commutative || i != commutative + 1))
3250 /* If we are matching as if two operands were swapped,
3251 also pretend that operands_match had been computed
3252 with swapped.
3253 But if I is the second of those and C is the first,
3254 don't exchange them, because operands_match is valid
3255 only on one side of its diagonal. */
3256 ? (operands_match
3257 [(m == commutative || m == commutative + 1)
3258 ? 2 * commutative + 1 - m : m]
3259 [(i == commutative || i == commutative + 1)
3260 ? 2 * commutative + 1 - i : i])
3261 : operands_match[m][i])
3263 /* If we are matching a non-offsettable address where an
3264 offsettable address was expected, then we must reject
3265 this combination, because we can't reload it. */
3266 if (this_alternative_offmemok[m]
3267 && MEM_P (recog_data.operand[m])
3268 && this_alternative[m] == NO_REGS
3269 && ! this_alternative_win[m])
3270 bad = 1;
3272 did_match = this_alternative_win[m];
3274 else
3276 /* Operands don't match. */
3277 rtx value;
3278 int loc1, loc2;
3279 /* Retroactively mark the operand we had to match
3280 as a loser, if it wasn't already. */
3281 if (this_alternative_win[m])
3282 losers++;
3283 this_alternative_win[m] = 0;
3284 if (this_alternative[m] == NO_REGS)
3285 bad = 1;
3286 /* But count the pair only once in the total badness of
3287 this alternative, if the pair can be a dummy reload.
3288 The pointers in operand_loc are not swapped; swap
3289 them by hand if necessary. */
3290 if (swapped && i == commutative)
3291 loc1 = commutative + 1;
3292 else if (swapped && i == commutative + 1)
3293 loc1 = commutative;
3294 else
3295 loc1 = i;
3296 if (swapped && m == commutative)
3297 loc2 = commutative + 1;
3298 else if (swapped && m == commutative + 1)
3299 loc2 = commutative;
3300 else
3301 loc2 = m;
3302 value
3303 = find_dummy_reload (recog_data.operand[i],
3304 recog_data.operand[m],
3305 recog_data.operand_loc[loc1],
3306 recog_data.operand_loc[loc2],
3307 operand_mode[i], operand_mode[m],
3308 this_alternative[m], -1,
3309 this_alternative_earlyclobber[m]);
3311 if (value != 0)
3312 losers--;
3314 /* This can be fixed with reloads if the operand
3315 we are supposed to match can be fixed with reloads. */
3316 badop = 0;
3317 this_alternative[i] = this_alternative[m];
3319 /* If we have to reload this operand and some previous
3320 operand also had to match the same thing as this
3321 operand, we don't know how to do that. So reject this
3322 alternative. */
3323 if (! did_match || force_reload)
3324 for (j = 0; j < i; j++)
3325 if (this_alternative_matches[j]
3326 == this_alternative_matches[i])
3327 badop = 1;
3328 break;
3330 case 'p':
3331 /* All necessary reloads for an address_operand
3332 were handled in find_reloads_address. */
3333 this_alternative[i]
3334 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3335 ADDRESS, SCRATCH);
3336 win = 1;
3337 badop = 0;
3338 break;
3340 case TARGET_MEM_CONSTRAINT:
3341 if (force_reload)
3342 break;
3343 if (MEM_P (operand)
3344 || (REG_P (operand)
3345 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3346 && reg_renumber[REGNO (operand)] < 0))
3347 win = 1;
3348 if (CONST_POOL_OK_P (operand_mode[i], operand))
3349 badop = 0;
3350 constmemok = 1;
3351 break;
3353 case '<':
3354 if (MEM_P (operand)
3355 && ! address_reloaded[i]
3356 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3357 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3358 win = 1;
3359 break;
3361 case '>':
3362 if (MEM_P (operand)
3363 && ! address_reloaded[i]
3364 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3365 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3366 win = 1;
3367 break;
3369 /* Memory operand whose address is not offsettable. */
3370 case 'V':
3371 if (force_reload)
3372 break;
3373 if (MEM_P (operand)
3374 && ! (ind_levels ? offsettable_memref_p (operand)
3375 : offsettable_nonstrict_memref_p (operand))
3376 /* Certain mem addresses will become offsettable
3377 after they themselves are reloaded. This is important;
3378 we don't want our own handling of unoffsettables
3379 to override the handling of reg_equiv_address. */
3380 && !(REG_P (XEXP (operand, 0))
3381 && (ind_levels == 0
3382 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3383 win = 1;
3384 break;
3386 /* Memory operand whose address is offsettable. */
3387 case 'o':
3388 if (force_reload)
3389 break;
3390 if ((MEM_P (operand)
3391 /* If IND_LEVELS, find_reloads_address won't reload a
3392 pseudo that didn't get a hard reg, so we have to
3393 reject that case. */
3394 && ((ind_levels ? offsettable_memref_p (operand)
3395 : offsettable_nonstrict_memref_p (operand))
3396 /* A reloaded address is offsettable because it is now
3397 just a simple register indirect. */
3398 || address_reloaded[i] == 1))
3399 || (REG_P (operand)
3400 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3401 && reg_renumber[REGNO (operand)] < 0
3402 /* If reg_equiv_address is nonzero, we will be
3403 loading it into a register; hence it will be
3404 offsettable, but we cannot say that reg_equiv_mem
3405 is offsettable without checking. */
3406 && ((reg_equiv_mem (REGNO (operand)) != 0
3407 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3408 || (reg_equiv_address (REGNO (operand)) != 0))))
3409 win = 1;
3410 if (CONST_POOL_OK_P (operand_mode[i], operand)
3411 || MEM_P (operand))
3412 badop = 0;
3413 constmemok = 1;
3414 offmemok = 1;
3415 break;
3417 case '&':
3418 /* Output operand that is stored before the need for the
3419 input operands (and their index registers) is over. */
3420 earlyclobber = 1, this_earlyclobber = 1;
3421 break;
3423 case 'E':
3424 case 'F':
3425 if (CONST_DOUBLE_AS_FLOAT_P (operand)
3426 || (GET_CODE (operand) == CONST_VECTOR
3427 && (GET_MODE_CLASS (GET_MODE (operand))
3428 == MODE_VECTOR_FLOAT)))
3429 win = 1;
3430 break;
3432 case 'G':
3433 case 'H':
3434 if (CONST_DOUBLE_AS_FLOAT_P (operand)
3435 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (operand, c, p))
3436 win = 1;
3437 break;
3439 case 's':
3440 if (CONST_INT_P (operand) || CONST_DOUBLE_AS_INT_P (operand))
3441 break;
3442 case 'i':
3443 if (CONSTANT_P (operand)
3444 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (operand)))
3445 win = 1;
3446 break;
3448 case 'n':
3449 if (CONST_INT_P (operand) || CONST_DOUBLE_AS_INT_P (operand))
3450 win = 1;
3451 break;
3453 case 'I':
3454 case 'J':
3455 case 'K':
3456 case 'L':
3457 case 'M':
3458 case 'N':
3459 case 'O':
3460 case 'P':
3461 if (CONST_INT_P (operand)
3462 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (operand), c, p))
3463 win = 1;
3464 break;
3466 case 'X':
3467 force_reload = 0;
3468 win = 1;
3469 break;
3471 case 'g':
3472 if (! force_reload
3473 /* A PLUS is never a valid operand, but reload can make
3474 it from a register when eliminating registers. */
3475 && GET_CODE (operand) != PLUS
3476 /* A SCRATCH is not a valid operand. */
3477 && GET_CODE (operand) != SCRATCH
3478 && (! CONSTANT_P (operand)
3479 || ! flag_pic
3480 || LEGITIMATE_PIC_OPERAND_P (operand))
3481 && (GENERAL_REGS == ALL_REGS
3482 || !REG_P (operand)
3483 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3484 && reg_renumber[REGNO (operand)] < 0)))
3485 win = 1;
3486 /* Drop through into 'r' case. */
3488 case 'r':
3489 this_alternative[i]
3490 = reg_class_subunion[this_alternative[i]][(int) GENERAL_REGS];
3491 goto reg;
3493 default:
3494 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS)
3496 #ifdef EXTRA_CONSTRAINT_STR
3497 if (EXTRA_MEMORY_CONSTRAINT (c, p))
3499 if (force_reload)
3500 break;
3501 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3502 win = 1;
3503 /* If the address was already reloaded,
3504 we win as well. */
3505 else if (MEM_P (operand)
3506 && address_reloaded[i] == 1)
3507 win = 1;
3508 /* Likewise if the address will be reloaded because
3509 reg_equiv_address is nonzero. For reg_equiv_mem
3510 we have to check. */
3511 else if (REG_P (operand)
3512 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3513 && reg_renumber[REGNO (operand)] < 0
3514 && ((reg_equiv_mem (REGNO (operand)) != 0
3515 && EXTRA_CONSTRAINT_STR (reg_equiv_mem (REGNO (operand)), c, p))
3516 || (reg_equiv_address (REGNO (operand)) != 0)))
3517 win = 1;
3519 /* If we didn't already win, we can reload
3520 constants via force_const_mem, and other
3521 MEMs by reloading the address like for 'o'. */
3522 if (CONST_POOL_OK_P (operand_mode[i], operand)
3523 || MEM_P (operand))
3524 badop = 0;
3525 constmemok = 1;
3526 offmemok = 1;
3527 break;
3529 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
3531 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3532 win = 1;
3534 /* If we didn't already win, we can reload
3535 the address into a base register. */
3536 this_alternative[i]
3537 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3538 ADDRESS, SCRATCH);
3539 badop = 0;
3540 break;
3543 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3544 win = 1;
3545 #endif
3546 break;
3549 this_alternative[i]
3550 = (reg_class_subunion
3551 [this_alternative[i]]
3552 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)]);
3553 reg:
3554 if (GET_MODE (operand) == BLKmode)
3555 break;
3556 winreg = 1;
3557 if (REG_P (operand)
3558 && reg_fits_class_p (operand, this_alternative[i],
3559 offset, GET_MODE (recog_data.operand[i])))
3560 win = 1;
3561 break;
3563 while ((p += len), c);
3565 if (swapped == (commutative >= 0 ? 1 : 0))
3566 constraints[i] = p;
3568 /* If this operand could be handled with a reg,
3569 and some reg is allowed, then this operand can be handled. */
3570 if (winreg && this_alternative[i] != NO_REGS
3571 && (win || !class_only_fixed_regs[this_alternative[i]]))
3572 badop = 0;
3574 /* Record which operands fit this alternative. */
3575 this_alternative_earlyclobber[i] = earlyclobber;
3576 if (win && ! force_reload)
3577 this_alternative_win[i] = 1;
3578 else if (did_match && ! force_reload)
3579 this_alternative_match_win[i] = 1;
3580 else
3582 int const_to_mem = 0;
3584 this_alternative_offmemok[i] = offmemok;
3585 losers++;
3586 if (badop)
3587 bad = 1;
3588 /* Alternative loses if it has no regs for a reg operand. */
3589 if (REG_P (operand)
3590 && this_alternative[i] == NO_REGS
3591 && this_alternative_matches[i] < 0)
3592 bad = 1;
3594 /* If this is a constant that is reloaded into the desired
3595 class by copying it to memory first, count that as another
3596 reload. This is consistent with other code and is
3597 required to avoid choosing another alternative when
3598 the constant is moved into memory by this function on
3599 an early reload pass. Note that the test here is
3600 precisely the same as in the code below that calls
3601 force_const_mem. */
3602 if (CONST_POOL_OK_P (operand_mode[i], operand)
3603 && ((targetm.preferred_reload_class (operand,
3604 this_alternative[i])
3605 == NO_REGS)
3606 || no_input_reloads))
3608 const_to_mem = 1;
3609 if (this_alternative[i] != NO_REGS)
3610 losers++;
3613 /* Alternative loses if it requires a type of reload not
3614 permitted for this insn. We can always reload SCRATCH
3615 and objects with a REG_UNUSED note. */
3616 if (GET_CODE (operand) != SCRATCH
3617 && modified[i] != RELOAD_READ && no_output_reloads
3618 && ! find_reg_note (insn, REG_UNUSED, operand))
3619 bad = 1;
3620 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3621 && ! const_to_mem)
3622 bad = 1;
3624 /* If we can't reload this value at all, reject this
3625 alternative. Note that we could also lose due to
3626 LIMIT_RELOAD_CLASS, but we don't check that
3627 here. */
3629 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3631 if (targetm.preferred_reload_class (operand,
3632 this_alternative[i])
3633 == NO_REGS)
3634 reject = 600;
3636 if (operand_type[i] == RELOAD_FOR_OUTPUT
3637 && (targetm.preferred_output_reload_class (operand,
3638 this_alternative[i])
3639 == NO_REGS))
3640 reject = 600;
3643 /* We prefer to reload pseudos over reloading other things,
3644 since such reloads may be able to be eliminated later.
3645 If we are reloading a SCRATCH, we won't be generating any
3646 insns, just using a register, so it is also preferred.
3647 So bump REJECT in other cases. Don't do this in the
3648 case where we are forcing a constant into memory and
3649 it will then win since we don't want to have a different
3650 alternative match then. */
3651 if (! (REG_P (operand)
3652 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3653 && GET_CODE (operand) != SCRATCH
3654 && ! (const_to_mem && constmemok))
3655 reject += 2;
3657 /* Input reloads can be inherited more often than output
3658 reloads can be removed, so penalize output reloads. */
3659 if (operand_type[i] != RELOAD_FOR_INPUT
3660 && GET_CODE (operand) != SCRATCH)
3661 reject++;
3664 /* If this operand is a pseudo register that didn't get
3665 a hard reg and this alternative accepts some
3666 register, see if the class that we want is a subset
3667 of the preferred class for this register. If not,
3668 but it intersects that class, use the preferred class
3669 instead. If it does not intersect the preferred
3670 class, show that usage of this alternative should be
3671 discouraged; it will be discouraged more still if the
3672 register is `preferred or nothing'. We do this
3673 because it increases the chance of reusing our spill
3674 register in a later insn and avoiding a pair of
3675 memory stores and loads.
3677 Don't bother with this if this alternative will
3678 accept this operand.
3680 Don't do this for a multiword operand, since it is
3681 only a small win and has the risk of requiring more
3682 spill registers, which could cause a large loss.
3684 Don't do this if the preferred class has only one
3685 register because we might otherwise exhaust the
3686 class. */
3688 if (! win && ! did_match
3689 && this_alternative[i] != NO_REGS
3690 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3691 && reg_class_size [(int) preferred_class[i]] > 0
3692 && ! small_register_class_p (preferred_class[i]))
3694 if (! reg_class_subset_p (this_alternative[i],
3695 preferred_class[i]))
3697 /* Since we don't have a way of forming the intersection,
3698 we just do something special if the preferred class
3699 is a subset of the class we have; that's the most
3700 common case anyway. */
3701 if (reg_class_subset_p (preferred_class[i],
3702 this_alternative[i]))
3703 this_alternative[i] = preferred_class[i];
3704 else
3705 reject += (2 + 2 * pref_or_nothing[i]);
3710 /* Now see if any output operands that are marked "earlyclobber"
3711 in this alternative conflict with any input operands
3712 or any memory addresses. */
3714 for (i = 0; i < noperands; i++)
3715 if (this_alternative_earlyclobber[i]
3716 && (this_alternative_win[i] || this_alternative_match_win[i]))
3718 struct decomposition early_data;
3720 early_data = decompose (recog_data.operand[i]);
3722 gcc_assert (modified[i] != RELOAD_READ);
3724 if (this_alternative[i] == NO_REGS)
3726 this_alternative_earlyclobber[i] = 0;
3727 gcc_assert (this_insn_is_asm);
3728 error_for_asm (this_insn,
3729 "%<&%> constraint used with no register class");
3732 for (j = 0; j < noperands; j++)
3733 /* Is this an input operand or a memory ref? */
3734 if ((MEM_P (recog_data.operand[j])
3735 || modified[j] != RELOAD_WRITE)
3736 && j != i
3737 /* Ignore things like match_operator operands. */
3738 && !recog_data.is_operator[j]
3739 /* Don't count an input operand that is constrained to match
3740 the early clobber operand. */
3741 && ! (this_alternative_matches[j] == i
3742 && rtx_equal_p (recog_data.operand[i],
3743 recog_data.operand[j]))
3744 /* Is it altered by storing the earlyclobber operand? */
3745 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3746 early_data))
3748 /* If the output is in a non-empty few-regs class,
3749 it's costly to reload it, so reload the input instead. */
3750 if (small_register_class_p (this_alternative[i])
3751 && (REG_P (recog_data.operand[j])
3752 || GET_CODE (recog_data.operand[j]) == SUBREG))
3754 losers++;
3755 this_alternative_win[j] = 0;
3756 this_alternative_match_win[j] = 0;
3758 else
3759 break;
3761 /* If an earlyclobber operand conflicts with something,
3762 it must be reloaded, so request this and count the cost. */
3763 if (j != noperands)
3765 losers++;
3766 this_alternative_win[i] = 0;
3767 this_alternative_match_win[j] = 0;
3768 for (j = 0; j < noperands; j++)
3769 if (this_alternative_matches[j] == i
3770 && this_alternative_match_win[j])
3772 this_alternative_win[j] = 0;
3773 this_alternative_match_win[j] = 0;
3774 losers++;
3779 /* If one alternative accepts all the operands, no reload required,
3780 choose that alternative; don't consider the remaining ones. */
3781 if (losers == 0)
3783 /* Unswap these so that they are never swapped at `finish'. */
3784 if (swapped)
3786 recog_data.operand[commutative] = substed_operand[commutative];
3787 recog_data.operand[commutative + 1]
3788 = substed_operand[commutative + 1];
3790 for (i = 0; i < noperands; i++)
3792 goal_alternative_win[i] = this_alternative_win[i];
3793 goal_alternative_match_win[i] = this_alternative_match_win[i];
3794 goal_alternative[i] = this_alternative[i];
3795 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3796 goal_alternative_matches[i] = this_alternative_matches[i];
3797 goal_alternative_earlyclobber[i]
3798 = this_alternative_earlyclobber[i];
3800 goal_alternative_number = this_alternative_number;
3801 goal_alternative_swapped = swapped;
3802 goal_earlyclobber = this_earlyclobber;
3803 goto finish;
3806 /* REJECT, set by the ! and ? constraint characters and when a register
3807 would be reloaded into a non-preferred class, discourages the use of
3808 this alternative for a reload goal. REJECT is incremented by six
3809 for each ? and two for each non-preferred class. */
3810 losers = losers * 6 + reject;
3812 /* If this alternative can be made to work by reloading,
3813 and it needs less reloading than the others checked so far,
3814 record it as the chosen goal for reloading. */
3815 if (! bad)
3817 if (best > losers)
3819 for (i = 0; i < noperands; i++)
3821 goal_alternative[i] = this_alternative[i];
3822 goal_alternative_win[i] = this_alternative_win[i];
3823 goal_alternative_match_win[i]
3824 = this_alternative_match_win[i];
3825 goal_alternative_offmemok[i]
3826 = this_alternative_offmemok[i];
3827 goal_alternative_matches[i] = this_alternative_matches[i];
3828 goal_alternative_earlyclobber[i]
3829 = this_alternative_earlyclobber[i];
3831 goal_alternative_swapped = swapped;
3832 best = losers;
3833 goal_alternative_number = this_alternative_number;
3834 goal_earlyclobber = this_earlyclobber;
3838 if (swapped)
3840 enum reg_class tclass;
3841 int t;
3843 /* If the commutative operands have been swapped, swap
3844 them back in order to check the next alternative. */
3845 recog_data.operand[commutative] = substed_operand[commutative];
3846 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3847 /* Unswap the duplicates too. */
3848 for (i = 0; i < recog_data.n_dups; i++)
3849 if (recog_data.dup_num[i] == commutative
3850 || recog_data.dup_num[i] == commutative + 1)
3851 *recog_data.dup_loc[i]
3852 = recog_data.operand[(int) recog_data.dup_num[i]];
3854 /* Unswap the operand related information as well. */
3855 tclass = preferred_class[commutative];
3856 preferred_class[commutative] = preferred_class[commutative + 1];
3857 preferred_class[commutative + 1] = tclass;
3859 t = pref_or_nothing[commutative];
3860 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3861 pref_or_nothing[commutative + 1] = t;
3863 t = address_reloaded[commutative];
3864 address_reloaded[commutative] = address_reloaded[commutative + 1];
3865 address_reloaded[commutative + 1] = t;
3870 /* The operands don't meet the constraints.
3871 goal_alternative describes the alternative
3872 that we could reach by reloading the fewest operands.
3873 Reload so as to fit it. */
3875 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3877 /* No alternative works with reloads?? */
3878 if (insn_code_number >= 0)
3879 fatal_insn ("unable to generate reloads for:", insn);
3880 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3881 /* Avoid further trouble with this insn. */
3882 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3883 n_reloads = 0;
3884 return 0;
3887 /* Jump to `finish' from above if all operands are valid already.
3888 In that case, goal_alternative_win is all 1. */
3889 finish:
3891 /* Right now, for any pair of operands I and J that are required to match,
3892 with I < J,
3893 goal_alternative_matches[J] is I.
3894 Set up goal_alternative_matched as the inverse function:
3895 goal_alternative_matched[I] = J. */
3897 for (i = 0; i < noperands; i++)
3898 goal_alternative_matched[i] = -1;
3900 for (i = 0; i < noperands; i++)
3901 if (! goal_alternative_win[i]
3902 && goal_alternative_matches[i] >= 0)
3903 goal_alternative_matched[goal_alternative_matches[i]] = i;
3905 for (i = 0; i < noperands; i++)
3906 goal_alternative_win[i] |= goal_alternative_match_win[i];
3908 /* If the best alternative is with operands 1 and 2 swapped,
3909 consider them swapped before reporting the reloads. Update the
3910 operand numbers of any reloads already pushed. */
3912 if (goal_alternative_swapped)
3914 rtx tem;
3916 tem = substed_operand[commutative];
3917 substed_operand[commutative] = substed_operand[commutative + 1];
3918 substed_operand[commutative + 1] = tem;
3919 tem = recog_data.operand[commutative];
3920 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3921 recog_data.operand[commutative + 1] = tem;
3922 tem = *recog_data.operand_loc[commutative];
3923 *recog_data.operand_loc[commutative]
3924 = *recog_data.operand_loc[commutative + 1];
3925 *recog_data.operand_loc[commutative + 1] = tem;
3927 for (i = 0; i < n_reloads; i++)
3929 if (rld[i].opnum == commutative)
3930 rld[i].opnum = commutative + 1;
3931 else if (rld[i].opnum == commutative + 1)
3932 rld[i].opnum = commutative;
3936 for (i = 0; i < noperands; i++)
3938 operand_reloadnum[i] = -1;
3940 /* If this is an earlyclobber operand, we need to widen the scope.
3941 The reload must remain valid from the start of the insn being
3942 reloaded until after the operand is stored into its destination.
3943 We approximate this with RELOAD_OTHER even though we know that we
3944 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3946 One special case that is worth checking is when we have an
3947 output that is earlyclobber but isn't used past the insn (typically
3948 a SCRATCH). In this case, we only need have the reload live
3949 through the insn itself, but not for any of our input or output
3950 reloads.
3951 But we must not accidentally narrow the scope of an existing
3952 RELOAD_OTHER reload - leave these alone.
3954 In any case, anything needed to address this operand can remain
3955 however they were previously categorized. */
3957 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3958 operand_type[i]
3959 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3960 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3963 /* Any constants that aren't allowed and can't be reloaded
3964 into registers are here changed into memory references. */
3965 for (i = 0; i < noperands; i++)
3966 if (! goal_alternative_win[i])
3968 rtx op = recog_data.operand[i];
3969 rtx subreg = NULL_RTX;
3970 rtx plus = NULL_RTX;
3971 enum machine_mode mode = operand_mode[i];
3973 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3974 push_reload so we have to let them pass here. */
3975 if (GET_CODE (op) == SUBREG)
3977 subreg = op;
3978 op = SUBREG_REG (op);
3979 mode = GET_MODE (op);
3982 if (GET_CODE (op) == PLUS)
3984 plus = op;
3985 op = XEXP (op, 1);
3988 if (CONST_POOL_OK_P (mode, op)
3989 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3990 == NO_REGS)
3991 || no_input_reloads))
3993 int this_address_reloaded;
3994 rtx tem = force_const_mem (mode, op);
3996 /* If we stripped a SUBREG or a PLUS above add it back. */
3997 if (plus != NULL_RTX)
3998 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
4000 if (subreg != NULL_RTX)
4001 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
4003 this_address_reloaded = 0;
4004 substed_operand[i] = recog_data.operand[i]
4005 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
4006 0, insn, &this_address_reloaded);
4008 /* If the alternative accepts constant pool refs directly
4009 there will be no reload needed at all. */
4010 if (plus == NULL_RTX
4011 && subreg == NULL_RTX
4012 && alternative_allows_const_pool_ref (this_address_reloaded == 0
4013 ? substed_operand[i]
4014 : NULL,
4015 recog_data.constraints[i],
4016 goal_alternative_number))
4017 goal_alternative_win[i] = 1;
4021 /* Record the values of the earlyclobber operands for the caller. */
4022 if (goal_earlyclobber)
4023 for (i = 0; i < noperands; i++)
4024 if (goal_alternative_earlyclobber[i])
4025 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
4027 /* Now record reloads for all the operands that need them. */
4028 for (i = 0; i < noperands; i++)
4029 if (! goal_alternative_win[i])
4031 /* Operands that match previous ones have already been handled. */
4032 if (goal_alternative_matches[i] >= 0)
4034 /* Handle an operand with a nonoffsettable address
4035 appearing where an offsettable address will do
4036 by reloading the address into a base register.
4038 ??? We can also do this when the operand is a register and
4039 reg_equiv_mem is not offsettable, but this is a bit tricky,
4040 so we don't bother with it. It may not be worth doing. */
4041 else if (goal_alternative_matched[i] == -1
4042 && goal_alternative_offmemok[i]
4043 && MEM_P (recog_data.operand[i]))
4045 /* If the address to be reloaded is a VOIDmode constant,
4046 use the default address mode as mode of the reload register,
4047 as would have been done by find_reloads_address. */
4048 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
4049 enum machine_mode address_mode;
4051 address_mode = get_address_mode (recog_data.operand[i]);
4052 operand_reloadnum[i]
4053 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
4054 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
4055 base_reg_class (VOIDmode, as, MEM, SCRATCH),
4056 address_mode,
4057 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
4058 rld[operand_reloadnum[i]].inc
4059 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4061 /* If this operand is an output, we will have made any
4062 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4063 now we are treating part of the operand as an input, so
4064 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
4066 if (modified[i] == RELOAD_WRITE)
4068 for (j = 0; j < n_reloads; j++)
4070 if (rld[j].opnum == i)
4072 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4073 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4074 else if (rld[j].when_needed
4075 == RELOAD_FOR_OUTADDR_ADDRESS)
4076 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4081 else if (goal_alternative_matched[i] == -1)
4083 operand_reloadnum[i]
4084 = push_reload ((modified[i] != RELOAD_WRITE
4085 ? recog_data.operand[i] : 0),
4086 (modified[i] != RELOAD_READ
4087 ? recog_data.operand[i] : 0),
4088 (modified[i] != RELOAD_WRITE
4089 ? recog_data.operand_loc[i] : 0),
4090 (modified[i] != RELOAD_READ
4091 ? recog_data.operand_loc[i] : 0),
4092 (enum reg_class) goal_alternative[i],
4093 (modified[i] == RELOAD_WRITE
4094 ? VOIDmode : operand_mode[i]),
4095 (modified[i] == RELOAD_READ
4096 ? VOIDmode : operand_mode[i]),
4097 (insn_code_number < 0 ? 0
4098 : insn_data[insn_code_number].operand[i].strict_low),
4099 0, i, operand_type[i]);
4101 /* In a matching pair of operands, one must be input only
4102 and the other must be output only.
4103 Pass the input operand as IN and the other as OUT. */
4104 else if (modified[i] == RELOAD_READ
4105 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4107 operand_reloadnum[i]
4108 = push_reload (recog_data.operand[i],
4109 recog_data.operand[goal_alternative_matched[i]],
4110 recog_data.operand_loc[i],
4111 recog_data.operand_loc[goal_alternative_matched[i]],
4112 (enum reg_class) goal_alternative[i],
4113 operand_mode[i],
4114 operand_mode[goal_alternative_matched[i]],
4115 0, 0, i, RELOAD_OTHER);
4116 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4118 else if (modified[i] == RELOAD_WRITE
4119 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4121 operand_reloadnum[goal_alternative_matched[i]]
4122 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4123 recog_data.operand[i],
4124 recog_data.operand_loc[goal_alternative_matched[i]],
4125 recog_data.operand_loc[i],
4126 (enum reg_class) goal_alternative[i],
4127 operand_mode[goal_alternative_matched[i]],
4128 operand_mode[i],
4129 0, 0, i, RELOAD_OTHER);
4130 operand_reloadnum[i] = output_reloadnum;
4132 else
4134 gcc_assert (insn_code_number < 0);
4135 error_for_asm (insn, "inconsistent operand constraints "
4136 "in an %<asm%>");
4137 /* Avoid further trouble with this insn. */
4138 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4139 n_reloads = 0;
4140 return 0;
4143 else if (goal_alternative_matched[i] < 0
4144 && goal_alternative_matches[i] < 0
4145 && address_operand_reloaded[i] != 1
4146 && optimize)
4148 /* For each non-matching operand that's a MEM or a pseudo-register
4149 that didn't get a hard register, make an optional reload.
4150 This may get done even if the insn needs no reloads otherwise. */
4152 rtx operand = recog_data.operand[i];
4154 while (GET_CODE (operand) == SUBREG)
4155 operand = SUBREG_REG (operand);
4156 if ((MEM_P (operand)
4157 || (REG_P (operand)
4158 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4159 /* If this is only for an output, the optional reload would not
4160 actually cause us to use a register now, just note that
4161 something is stored here. */
4162 && (goal_alternative[i] != NO_REGS
4163 || modified[i] == RELOAD_WRITE)
4164 && ! no_input_reloads
4165 /* An optional output reload might allow to delete INSN later.
4166 We mustn't make in-out reloads on insns that are not permitted
4167 output reloads.
4168 If this is an asm, we can't delete it; we must not even call
4169 push_reload for an optional output reload in this case,
4170 because we can't be sure that the constraint allows a register,
4171 and push_reload verifies the constraints for asms. */
4172 && (modified[i] == RELOAD_READ
4173 || (! no_output_reloads && ! this_insn_is_asm)))
4174 operand_reloadnum[i]
4175 = push_reload ((modified[i] != RELOAD_WRITE
4176 ? recog_data.operand[i] : 0),
4177 (modified[i] != RELOAD_READ
4178 ? recog_data.operand[i] : 0),
4179 (modified[i] != RELOAD_WRITE
4180 ? recog_data.operand_loc[i] : 0),
4181 (modified[i] != RELOAD_READ
4182 ? recog_data.operand_loc[i] : 0),
4183 (enum reg_class) goal_alternative[i],
4184 (modified[i] == RELOAD_WRITE
4185 ? VOIDmode : operand_mode[i]),
4186 (modified[i] == RELOAD_READ
4187 ? VOIDmode : operand_mode[i]),
4188 (insn_code_number < 0 ? 0
4189 : insn_data[insn_code_number].operand[i].strict_low),
4190 1, i, operand_type[i]);
4191 /* If a memory reference remains (either as a MEM or a pseudo that
4192 did not get a hard register), yet we can't make an optional
4193 reload, check if this is actually a pseudo register reference;
4194 we then need to emit a USE and/or a CLOBBER so that reload
4195 inheritance will do the right thing. */
4196 else if (replace
4197 && (MEM_P (operand)
4198 || (REG_P (operand)
4199 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4200 && reg_renumber [REGNO (operand)] < 0)))
4202 operand = *recog_data.operand_loc[i];
4204 while (GET_CODE (operand) == SUBREG)
4205 operand = SUBREG_REG (operand);
4206 if (REG_P (operand))
4208 if (modified[i] != RELOAD_WRITE)
4209 /* We mark the USE with QImode so that we recognize
4210 it as one that can be safely deleted at the end
4211 of reload. */
4212 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4213 insn), QImode);
4214 if (modified[i] != RELOAD_READ)
4215 emit_insn_after (gen_clobber (operand), insn);
4219 else if (goal_alternative_matches[i] >= 0
4220 && goal_alternative_win[goal_alternative_matches[i]]
4221 && modified[i] == RELOAD_READ
4222 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4223 && ! no_input_reloads && ! no_output_reloads
4224 && optimize)
4226 /* Similarly, make an optional reload for a pair of matching
4227 objects that are in MEM or a pseudo that didn't get a hard reg. */
4229 rtx operand = recog_data.operand[i];
4231 while (GET_CODE (operand) == SUBREG)
4232 operand = SUBREG_REG (operand);
4233 if ((MEM_P (operand)
4234 || (REG_P (operand)
4235 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4236 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4237 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4238 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4239 recog_data.operand[i],
4240 recog_data.operand_loc[goal_alternative_matches[i]],
4241 recog_data.operand_loc[i],
4242 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4243 operand_mode[goal_alternative_matches[i]],
4244 operand_mode[i],
4245 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4248 /* Perform whatever substitutions on the operands we are supposed
4249 to make due to commutativity or replacement of registers
4250 with equivalent constants or memory slots. */
4252 for (i = 0; i < noperands; i++)
4254 /* We only do this on the last pass through reload, because it is
4255 possible for some data (like reg_equiv_address) to be changed during
4256 later passes. Moreover, we lose the opportunity to get a useful
4257 reload_{in,out}_reg when we do these replacements. */
4259 if (replace)
4261 rtx substitution = substed_operand[i];
4263 *recog_data.operand_loc[i] = substitution;
4265 /* If we're replacing an operand with a LABEL_REF, we need to
4266 make sure that there's a REG_LABEL_OPERAND note attached to
4267 this instruction. */
4268 if (GET_CODE (substitution) == LABEL_REF
4269 && !find_reg_note (insn, REG_LABEL_OPERAND,
4270 XEXP (substitution, 0))
4271 /* For a JUMP_P, if it was a branch target it must have
4272 already been recorded as such. */
4273 && (!JUMP_P (insn)
4274 || !label_is_jump_target_p (XEXP (substitution, 0),
4275 insn)))
4277 add_reg_note (insn, REG_LABEL_OPERAND, XEXP (substitution, 0));
4278 if (LABEL_P (XEXP (substitution, 0)))
4279 ++LABEL_NUSES (XEXP (substitution, 0));
4283 else
4284 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4287 /* If this insn pattern contains any MATCH_DUP's, make sure that
4288 they will be substituted if the operands they match are substituted.
4289 Also do now any substitutions we already did on the operands.
4291 Don't do this if we aren't making replacements because we might be
4292 propagating things allocated by frame pointer elimination into places
4293 it doesn't expect. */
4295 if (insn_code_number >= 0 && replace)
4296 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4298 int opno = recog_data.dup_num[i];
4299 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4300 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4303 #if 0
4304 /* This loses because reloading of prior insns can invalidate the equivalence
4305 (or at least find_equiv_reg isn't smart enough to find it any more),
4306 causing this insn to need more reload regs than it needed before.
4307 It may be too late to make the reload regs available.
4308 Now this optimization is done safely in choose_reload_regs. */
4310 /* For each reload of a reg into some other class of reg,
4311 search for an existing equivalent reg (same value now) in the right class.
4312 We can use it as long as we don't need to change its contents. */
4313 for (i = 0; i < n_reloads; i++)
4314 if (rld[i].reg_rtx == 0
4315 && rld[i].in != 0
4316 && REG_P (rld[i].in)
4317 && rld[i].out == 0)
4319 rld[i].reg_rtx
4320 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4321 static_reload_reg_p, 0, rld[i].inmode);
4322 /* Prevent generation of insn to load the value
4323 because the one we found already has the value. */
4324 if (rld[i].reg_rtx)
4325 rld[i].in = rld[i].reg_rtx;
4327 #endif
4329 /* If we detected error and replaced asm instruction by USE, forget about the
4330 reloads. */
4331 if (GET_CODE (PATTERN (insn)) == USE
4332 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4333 n_reloads = 0;
4335 /* Perhaps an output reload can be combined with another
4336 to reduce needs by one. */
4337 if (!goal_earlyclobber)
4338 combine_reloads ();
4340 /* If we have a pair of reloads for parts of an address, they are reloading
4341 the same object, the operands themselves were not reloaded, and they
4342 are for two operands that are supposed to match, merge the reloads and
4343 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4345 for (i = 0; i < n_reloads; i++)
4347 int k;
4349 for (j = i + 1; j < n_reloads; j++)
4350 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4351 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4352 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4353 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4354 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4355 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4356 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4357 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4358 && rtx_equal_p (rld[i].in, rld[j].in)
4359 && (operand_reloadnum[rld[i].opnum] < 0
4360 || rld[operand_reloadnum[rld[i].opnum]].optional)
4361 && (operand_reloadnum[rld[j].opnum] < 0
4362 || rld[operand_reloadnum[rld[j].opnum]].optional)
4363 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4364 || (goal_alternative_matches[rld[j].opnum]
4365 == rld[i].opnum)))
4367 for (k = 0; k < n_replacements; k++)
4368 if (replacements[k].what == j)
4369 replacements[k].what = i;
4371 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4372 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4373 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4374 else
4375 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4376 rld[j].in = 0;
4380 /* Scan all the reloads and update their type.
4381 If a reload is for the address of an operand and we didn't reload
4382 that operand, change the type. Similarly, change the operand number
4383 of a reload when two operands match. If a reload is optional, treat it
4384 as though the operand isn't reloaded.
4386 ??? This latter case is somewhat odd because if we do the optional
4387 reload, it means the object is hanging around. Thus we need only
4388 do the address reload if the optional reload was NOT done.
4390 Change secondary reloads to be the address type of their operand, not
4391 the normal type.
4393 If an operand's reload is now RELOAD_OTHER, change any
4394 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4395 RELOAD_FOR_OTHER_ADDRESS. */
4397 for (i = 0; i < n_reloads; i++)
4399 if (rld[i].secondary_p
4400 && rld[i].when_needed == operand_type[rld[i].opnum])
4401 rld[i].when_needed = address_type[rld[i].opnum];
4403 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4404 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4405 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4406 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4407 && (operand_reloadnum[rld[i].opnum] < 0
4408 || rld[operand_reloadnum[rld[i].opnum]].optional))
4410 /* If we have a secondary reload to go along with this reload,
4411 change its type to RELOAD_FOR_OPADDR_ADDR. */
4413 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4414 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4415 && rld[i].secondary_in_reload != -1)
4417 int secondary_in_reload = rld[i].secondary_in_reload;
4419 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4421 /* If there's a tertiary reload we have to change it also. */
4422 if (secondary_in_reload > 0
4423 && rld[secondary_in_reload].secondary_in_reload != -1)
4424 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4425 = RELOAD_FOR_OPADDR_ADDR;
4428 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4429 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4430 && rld[i].secondary_out_reload != -1)
4432 int secondary_out_reload = rld[i].secondary_out_reload;
4434 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4436 /* If there's a tertiary reload we have to change it also. */
4437 if (secondary_out_reload
4438 && rld[secondary_out_reload].secondary_out_reload != -1)
4439 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4440 = RELOAD_FOR_OPADDR_ADDR;
4443 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4444 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4445 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4446 else
4447 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4450 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4451 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4452 && operand_reloadnum[rld[i].opnum] >= 0
4453 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4454 == RELOAD_OTHER))
4455 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4457 if (goal_alternative_matches[rld[i].opnum] >= 0)
4458 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4461 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4462 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4463 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4465 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4466 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4467 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4468 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4469 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4470 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4471 This is complicated by the fact that a single operand can have more
4472 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4473 choose_reload_regs without affecting code quality, and cases that
4474 actually fail are extremely rare, so it turns out to be better to fix
4475 the problem here by not generating cases that choose_reload_regs will
4476 fail for. */
4477 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4478 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4479 a single operand.
4480 We can reduce the register pressure by exploiting that a
4481 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4482 does not conflict with any of them, if it is only used for the first of
4483 the RELOAD_FOR_X_ADDRESS reloads. */
4485 int first_op_addr_num = -2;
4486 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4487 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4488 int need_change = 0;
4489 /* We use last_op_addr_reload and the contents of the above arrays
4490 first as flags - -2 means no instance encountered, -1 means exactly
4491 one instance encountered.
4492 If more than one instance has been encountered, we store the reload
4493 number of the first reload of the kind in question; reload numbers
4494 are known to be non-negative. */
4495 for (i = 0; i < noperands; i++)
4496 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4497 for (i = n_reloads - 1; i >= 0; i--)
4499 switch (rld[i].when_needed)
4501 case RELOAD_FOR_OPERAND_ADDRESS:
4502 if (++first_op_addr_num >= 0)
4504 first_op_addr_num = i;
4505 need_change = 1;
4507 break;
4508 case RELOAD_FOR_INPUT_ADDRESS:
4509 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4511 first_inpaddr_num[rld[i].opnum] = i;
4512 need_change = 1;
4514 break;
4515 case RELOAD_FOR_OUTPUT_ADDRESS:
4516 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4518 first_outpaddr_num[rld[i].opnum] = i;
4519 need_change = 1;
4521 break;
4522 default:
4523 break;
4527 if (need_change)
4529 for (i = 0; i < n_reloads; i++)
4531 int first_num;
4532 enum reload_type type;
4534 switch (rld[i].when_needed)
4536 case RELOAD_FOR_OPADDR_ADDR:
4537 first_num = first_op_addr_num;
4538 type = RELOAD_FOR_OPERAND_ADDRESS;
4539 break;
4540 case RELOAD_FOR_INPADDR_ADDRESS:
4541 first_num = first_inpaddr_num[rld[i].opnum];
4542 type = RELOAD_FOR_INPUT_ADDRESS;
4543 break;
4544 case RELOAD_FOR_OUTADDR_ADDRESS:
4545 first_num = first_outpaddr_num[rld[i].opnum];
4546 type = RELOAD_FOR_OUTPUT_ADDRESS;
4547 break;
4548 default:
4549 continue;
4551 if (first_num < 0)
4552 continue;
4553 else if (i > first_num)
4554 rld[i].when_needed = type;
4555 else
4557 /* Check if the only TYPE reload that uses reload I is
4558 reload FIRST_NUM. */
4559 for (j = n_reloads - 1; j > first_num; j--)
4561 if (rld[j].when_needed == type
4562 && (rld[i].secondary_p
4563 ? rld[j].secondary_in_reload == i
4564 : reg_mentioned_p (rld[i].in, rld[j].in)))
4566 rld[i].when_needed = type;
4567 break;
4575 /* See if we have any reloads that are now allowed to be merged
4576 because we've changed when the reload is needed to
4577 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4578 check for the most common cases. */
4580 for (i = 0; i < n_reloads; i++)
4581 if (rld[i].in != 0 && rld[i].out == 0
4582 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4583 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4584 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4585 for (j = 0; j < n_reloads; j++)
4586 if (i != j && rld[j].in != 0 && rld[j].out == 0
4587 && rld[j].when_needed == rld[i].when_needed
4588 && MATCHES (rld[i].in, rld[j].in)
4589 && rld[i].rclass == rld[j].rclass
4590 && !rld[i].nocombine && !rld[j].nocombine
4591 && rld[i].reg_rtx == rld[j].reg_rtx)
4593 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4594 transfer_replacements (i, j);
4595 rld[j].in = 0;
4598 #ifdef HAVE_cc0
4599 /* If we made any reloads for addresses, see if they violate a
4600 "no input reloads" requirement for this insn. But loads that we
4601 do after the insn (such as for output addresses) are fine. */
4602 if (no_input_reloads)
4603 for (i = 0; i < n_reloads; i++)
4604 gcc_assert (rld[i].in == 0
4605 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4606 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4607 #endif
4609 /* Compute reload_mode and reload_nregs. */
4610 for (i = 0; i < n_reloads; i++)
4612 rld[i].mode
4613 = (rld[i].inmode == VOIDmode
4614 || (GET_MODE_SIZE (rld[i].outmode)
4615 > GET_MODE_SIZE (rld[i].inmode)))
4616 ? rld[i].outmode : rld[i].inmode;
4618 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4621 /* Special case a simple move with an input reload and a
4622 destination of a hard reg, if the hard reg is ok, use it. */
4623 for (i = 0; i < n_reloads; i++)
4624 if (rld[i].when_needed == RELOAD_FOR_INPUT
4625 && GET_CODE (PATTERN (insn)) == SET
4626 && REG_P (SET_DEST (PATTERN (insn)))
4627 && (SET_SRC (PATTERN (insn)) == rld[i].in
4628 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4629 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4631 rtx dest = SET_DEST (PATTERN (insn));
4632 unsigned int regno = REGNO (dest);
4634 if (regno < FIRST_PSEUDO_REGISTER
4635 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4636 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4638 int nr = hard_regno_nregs[regno][rld[i].mode];
4639 int ok = 1, nri;
4641 for (nri = 1; nri < nr; nri ++)
4642 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4643 ok = 0;
4645 if (ok)
4646 rld[i].reg_rtx = dest;
4650 return retval;
4653 /* Return true if alternative number ALTNUM in constraint-string
4654 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4655 MEM gives the reference if it didn't need any reloads, otherwise it
4656 is null. */
4658 static bool
4659 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4660 const char *constraint, int altnum)
4662 int c;
4664 /* Skip alternatives before the one requested. */
4665 while (altnum > 0)
4667 while (*constraint++ != ',')
4669 altnum--;
4671 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4672 If one of them is present, this alternative accepts the result of
4673 passing a constant-pool reference through find_reloads_toplev.
4675 The same is true of extra memory constraints if the address
4676 was reloaded into a register. However, the target may elect
4677 to disallow the original constant address, forcing it to be
4678 reloaded into a register instead. */
4679 for (; (c = *constraint) && c != ',' && c != '#';
4680 constraint += CONSTRAINT_LEN (c, constraint))
4682 if (c == TARGET_MEM_CONSTRAINT || c == 'o')
4683 return true;
4684 #ifdef EXTRA_CONSTRAINT_STR
4685 if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
4686 && (mem == NULL || EXTRA_CONSTRAINT_STR (mem, c, constraint)))
4687 return true;
4688 #endif
4690 return false;
4693 /* Scan X for memory references and scan the addresses for reloading.
4694 Also checks for references to "constant" regs that we want to eliminate
4695 and replaces them with the values they stand for.
4696 We may alter X destructively if it contains a reference to such.
4697 If X is just a constant reg, we return the equivalent value
4698 instead of X.
4700 IND_LEVELS says how many levels of indirect addressing this machine
4701 supports.
4703 OPNUM and TYPE identify the purpose of the reload.
4705 IS_SET_DEST is true if X is the destination of a SET, which is not
4706 appropriate to be replaced by a constant.
4708 INSN, if nonzero, is the insn in which we do the reload. It is used
4709 to determine if we may generate output reloads, and where to put USEs
4710 for pseudos that we have to replace with stack slots.
4712 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4713 result of find_reloads_address. */
4715 static rtx
4716 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4717 int ind_levels, int is_set_dest, rtx insn,
4718 int *address_reloaded)
4720 RTX_CODE code = GET_CODE (x);
4722 const char *fmt = GET_RTX_FORMAT (code);
4723 int i;
4724 int copied;
4726 if (code == REG)
4728 /* This code is duplicated for speed in find_reloads. */
4729 int regno = REGNO (x);
4730 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4731 x = reg_equiv_constant (regno);
4732 #if 0
4733 /* This creates (subreg (mem...)) which would cause an unnecessary
4734 reload of the mem. */
4735 else if (reg_equiv_mem (regno) != 0)
4736 x = reg_equiv_mem (regno);
4737 #endif
4738 else if (reg_equiv_memory_loc (regno)
4739 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4741 rtx mem = make_memloc (x, regno);
4742 if (reg_equiv_address (regno)
4743 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4745 /* If this is not a toplevel operand, find_reloads doesn't see
4746 this substitution. We have to emit a USE of the pseudo so
4747 that delete_output_reload can see it. */
4748 if (replace_reloads && recog_data.operand[opnum] != x)
4749 /* We mark the USE with QImode so that we recognize it
4750 as one that can be safely deleted at the end of
4751 reload. */
4752 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4753 QImode);
4754 x = mem;
4755 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4756 opnum, type, ind_levels, insn);
4757 if (!rtx_equal_p (x, mem))
4758 push_reg_equiv_alt_mem (regno, x);
4759 if (address_reloaded)
4760 *address_reloaded = i;
4763 return x;
4765 if (code == MEM)
4767 rtx tem = x;
4769 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4770 opnum, type, ind_levels, insn);
4771 if (address_reloaded)
4772 *address_reloaded = i;
4774 return tem;
4777 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4779 /* Check for SUBREG containing a REG that's equivalent to a
4780 constant. If the constant has a known value, truncate it
4781 right now. Similarly if we are extracting a single-word of a
4782 multi-word constant. If the constant is symbolic, allow it
4783 to be substituted normally. push_reload will strip the
4784 subreg later. The constant must not be VOIDmode, because we
4785 will lose the mode of the register (this should never happen
4786 because one of the cases above should handle it). */
4788 int regno = REGNO (SUBREG_REG (x));
4789 rtx tem;
4791 if (regno >= FIRST_PSEUDO_REGISTER
4792 && reg_renumber[regno] < 0
4793 && reg_equiv_constant (regno) != 0)
4795 tem =
4796 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4797 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4798 gcc_assert (tem);
4799 if (CONSTANT_P (tem)
4800 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4802 tem = force_const_mem (GET_MODE (x), tem);
4803 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4804 &XEXP (tem, 0), opnum, type,
4805 ind_levels, insn);
4806 if (address_reloaded)
4807 *address_reloaded = i;
4809 return tem;
4812 /* If the subreg contains a reg that will be converted to a mem,
4813 convert the subreg to a narrower memref now.
4814 Otherwise, we would get (subreg (mem ...) ...),
4815 which would force reload of the mem.
4817 We also need to do this if there is an equivalent MEM that is
4818 not offsettable. In that case, alter_subreg would produce an
4819 invalid address on big-endian machines.
4821 For machines that extend byte loads, we must not reload using
4822 a wider mode if we have a paradoxical SUBREG. find_reloads will
4823 force a reload in that case. So we should not do anything here. */
4825 if (regno >= FIRST_PSEUDO_REGISTER
4826 #ifdef LOAD_EXTEND_OP
4827 && !paradoxical_subreg_p (x)
4828 #endif
4829 && (reg_equiv_address (regno) != 0
4830 || (reg_equiv_mem (regno) != 0
4831 && (! strict_memory_address_addr_space_p
4832 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
4833 MEM_ADDR_SPACE (reg_equiv_mem (regno)))
4834 || ! offsettable_memref_p (reg_equiv_mem (regno))
4835 || num_not_at_initial_offset))))
4836 x = find_reloads_subreg_address (x, 1, opnum, type, ind_levels,
4837 insn, address_reloaded);
4840 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4842 if (fmt[i] == 'e')
4844 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4845 ind_levels, is_set_dest, insn,
4846 address_reloaded);
4847 /* If we have replaced a reg with it's equivalent memory loc -
4848 that can still be handled here e.g. if it's in a paradoxical
4849 subreg - we must make the change in a copy, rather than using
4850 a destructive change. This way, find_reloads can still elect
4851 not to do the change. */
4852 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4854 x = shallow_copy_rtx (x);
4855 copied = 1;
4857 XEXP (x, i) = new_part;
4860 return x;
4863 /* Return a mem ref for the memory equivalent of reg REGNO.
4864 This mem ref is not shared with anything. */
4866 static rtx
4867 make_memloc (rtx ad, int regno)
4869 /* We must rerun eliminate_regs, in case the elimination
4870 offsets have changed. */
4871 rtx tem
4872 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4875 /* If TEM might contain a pseudo, we must copy it to avoid
4876 modifying it when we do the substitution for the reload. */
4877 if (rtx_varies_p (tem, 0))
4878 tem = copy_rtx (tem);
4880 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4881 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4883 /* Copy the result if it's still the same as the equivalence, to avoid
4884 modifying it when we do the substitution for the reload. */
4885 if (tem == reg_equiv_memory_loc (regno))
4886 tem = copy_rtx (tem);
4887 return tem;
4890 /* Returns true if AD could be turned into a valid memory reference
4891 to mode MODE in address space AS by reloading the part pointed to
4892 by PART into a register. */
4894 static int
4895 maybe_memory_address_addr_space_p (enum machine_mode mode, rtx ad,
4896 addr_space_t as, rtx *part)
4898 int retv;
4899 rtx tem = *part;
4900 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4902 *part = reg;
4903 retv = memory_address_addr_space_p (mode, ad, as);
4904 *part = tem;
4906 return retv;
4909 /* Record all reloads needed for handling memory address AD
4910 which appears in *LOC in a memory reference to mode MODE
4911 which itself is found in location *MEMREFLOC.
4912 Note that we take shortcuts assuming that no multi-reg machine mode
4913 occurs as part of an address.
4915 OPNUM and TYPE specify the purpose of this reload.
4917 IND_LEVELS says how many levels of indirect addressing this machine
4918 supports.
4920 INSN, if nonzero, is the insn in which we do the reload. It is used
4921 to determine if we may generate output reloads, and where to put USEs
4922 for pseudos that we have to replace with stack slots.
4924 Value is one if this address is reloaded or replaced as a whole; it is
4925 zero if the top level of this address was not reloaded or replaced, and
4926 it is -1 if it may or may not have been reloaded or replaced.
4928 Note that there is no verification that the address will be valid after
4929 this routine does its work. Instead, we rely on the fact that the address
4930 was valid when reload started. So we need only undo things that reload
4931 could have broken. These are wrong register types, pseudos not allocated
4932 to a hard register, and frame pointer elimination. */
4934 static int
4935 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
4936 rtx *loc, int opnum, enum reload_type type,
4937 int ind_levels, rtx insn)
4939 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4940 : ADDR_SPACE_GENERIC;
4941 int regno;
4942 int removed_and = 0;
4943 int op_index;
4944 rtx tem;
4946 /* If the address is a register, see if it is a legitimate address and
4947 reload if not. We first handle the cases where we need not reload
4948 or where we must reload in a non-standard way. */
4950 if (REG_P (ad))
4952 regno = REGNO (ad);
4954 if (reg_equiv_constant (regno) != 0)
4956 find_reloads_address_part (reg_equiv_constant (regno), loc,
4957 base_reg_class (mode, as, MEM, SCRATCH),
4958 GET_MODE (ad), opnum, type, ind_levels);
4959 return 1;
4962 tem = reg_equiv_memory_loc (regno);
4963 if (tem != 0)
4965 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4967 tem = make_memloc (ad, regno);
4968 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4969 XEXP (tem, 0),
4970 MEM_ADDR_SPACE (tem)))
4972 rtx orig = tem;
4974 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4975 &XEXP (tem, 0), opnum,
4976 ADDR_TYPE (type), ind_levels, insn);
4977 if (!rtx_equal_p (tem, orig))
4978 push_reg_equiv_alt_mem (regno, tem);
4980 /* We can avoid a reload if the register's equivalent memory
4981 expression is valid as an indirect memory address.
4982 But not all addresses are valid in a mem used as an indirect
4983 address: only reg or reg+constant. */
4985 if (ind_levels > 0
4986 && strict_memory_address_addr_space_p (mode, tem, as)
4987 && (REG_P (XEXP (tem, 0))
4988 || (GET_CODE (XEXP (tem, 0)) == PLUS
4989 && REG_P (XEXP (XEXP (tem, 0), 0))
4990 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4992 /* TEM is not the same as what we'll be replacing the
4993 pseudo with after reload, put a USE in front of INSN
4994 in the final reload pass. */
4995 if (replace_reloads
4996 && num_not_at_initial_offset
4997 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4999 *loc = tem;
5000 /* We mark the USE with QImode so that we
5001 recognize it as one that can be safely
5002 deleted at the end of reload. */
5003 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
5004 insn), QImode);
5006 /* This doesn't really count as replacing the address
5007 as a whole, since it is still a memory access. */
5009 return 0;
5011 ad = tem;
5015 /* The only remaining case where we can avoid a reload is if this is a
5016 hard register that is valid as a base register and which is not the
5017 subject of a CLOBBER in this insn. */
5019 else if (regno < FIRST_PSEUDO_REGISTER
5020 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
5021 && ! regno_clobbered_p (regno, this_insn, mode, 0))
5022 return 0;
5024 /* If we do not have one of the cases above, we must do the reload. */
5025 push_reload (ad, NULL_RTX, loc, (rtx*) 0,
5026 base_reg_class (mode, as, MEM, SCRATCH),
5027 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
5028 return 1;
5031 if (strict_memory_address_addr_space_p (mode, ad, as))
5033 /* The address appears valid, so reloads are not needed.
5034 But the address may contain an eliminable register.
5035 This can happen because a machine with indirect addressing
5036 may consider a pseudo register by itself a valid address even when
5037 it has failed to get a hard reg.
5038 So do a tree-walk to find and eliminate all such regs. */
5040 /* But first quickly dispose of a common case. */
5041 if (GET_CODE (ad) == PLUS
5042 && CONST_INT_P (XEXP (ad, 1))
5043 && REG_P (XEXP (ad, 0))
5044 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
5045 return 0;
5047 subst_reg_equivs_changed = 0;
5048 *loc = subst_reg_equivs (ad, insn);
5050 if (! subst_reg_equivs_changed)
5051 return 0;
5053 /* Check result for validity after substitution. */
5054 if (strict_memory_address_addr_space_p (mode, ad, as))
5055 return 0;
5058 #ifdef LEGITIMIZE_RELOAD_ADDRESS
5061 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
5063 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5064 ind_levels, win);
5066 break;
5067 win:
5068 *memrefloc = copy_rtx (*memrefloc);
5069 XEXP (*memrefloc, 0) = ad;
5070 move_replacements (&ad, &XEXP (*memrefloc, 0));
5071 return -1;
5073 while (0);
5074 #endif
5076 /* The address is not valid. We have to figure out why. First see if
5077 we have an outer AND and remove it if so. Then analyze what's inside. */
5079 if (GET_CODE (ad) == AND)
5081 removed_and = 1;
5082 loc = &XEXP (ad, 0);
5083 ad = *loc;
5086 /* One possibility for why the address is invalid is that it is itself
5087 a MEM. This can happen when the frame pointer is being eliminated, a
5088 pseudo is not allocated to a hard register, and the offset between the
5089 frame and stack pointers is not its initial value. In that case the
5090 pseudo will have been replaced by a MEM referring to the
5091 stack pointer. */
5092 if (MEM_P (ad))
5094 /* First ensure that the address in this MEM is valid. Then, unless
5095 indirect addresses are valid, reload the MEM into a register. */
5096 tem = ad;
5097 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5098 opnum, ADDR_TYPE (type),
5099 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5101 /* If tem was changed, then we must create a new memory reference to
5102 hold it and store it back into memrefloc. */
5103 if (tem != ad && memrefloc)
5105 *memrefloc = copy_rtx (*memrefloc);
5106 copy_replacements (tem, XEXP (*memrefloc, 0));
5107 loc = &XEXP (*memrefloc, 0);
5108 if (removed_and)
5109 loc = &XEXP (*loc, 0);
5112 /* Check similar cases as for indirect addresses as above except
5113 that we can allow pseudos and a MEM since they should have been
5114 taken care of above. */
5116 if (ind_levels == 0
5117 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5118 || MEM_P (XEXP (tem, 0))
5119 || ! (REG_P (XEXP (tem, 0))
5120 || (GET_CODE (XEXP (tem, 0)) == PLUS
5121 && REG_P (XEXP (XEXP (tem, 0), 0))
5122 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5124 /* Must use TEM here, not AD, since it is the one that will
5125 have any subexpressions reloaded, if needed. */
5126 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5127 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5128 VOIDmode, 0,
5129 0, opnum, type);
5130 return ! removed_and;
5132 else
5133 return 0;
5136 /* If we have address of a stack slot but it's not valid because the
5137 displacement is too large, compute the sum in a register.
5138 Handle all base registers here, not just fp/ap/sp, because on some
5139 targets (namely SH) we can also get too large displacements from
5140 big-endian corrections. */
5141 else if (GET_CODE (ad) == PLUS
5142 && REG_P (XEXP (ad, 0))
5143 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5144 && CONST_INT_P (XEXP (ad, 1))
5145 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5146 CONST_INT)
5147 /* Similarly, if we were to reload the base register and the
5148 mem+offset address is still invalid, then we want to reload
5149 the whole address, not just the base register. */
5150 || ! maybe_memory_address_addr_space_p
5151 (mode, ad, as, &(XEXP (ad, 0)))))
5154 /* Unshare the MEM rtx so we can safely alter it. */
5155 if (memrefloc)
5157 *memrefloc = copy_rtx (*memrefloc);
5158 loc = &XEXP (*memrefloc, 0);
5159 if (removed_and)
5160 loc = &XEXP (*loc, 0);
5163 if (double_reg_address_ok
5164 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5165 PLUS, CONST_INT))
5167 /* Unshare the sum as well. */
5168 *loc = ad = copy_rtx (ad);
5170 /* Reload the displacement into an index reg.
5171 We assume the frame pointer or arg pointer is a base reg. */
5172 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5173 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5174 type, ind_levels);
5175 return 0;
5177 else
5179 /* If the sum of two regs is not necessarily valid,
5180 reload the sum into a base reg.
5181 That will at least work. */
5182 find_reloads_address_part (ad, loc,
5183 base_reg_class (mode, as, MEM, SCRATCH),
5184 GET_MODE (ad), opnum, type, ind_levels);
5186 return ! removed_and;
5189 /* If we have an indexed stack slot, there are three possible reasons why
5190 it might be invalid: The index might need to be reloaded, the address
5191 might have been made by frame pointer elimination and hence have a
5192 constant out of range, or both reasons might apply.
5194 We can easily check for an index needing reload, but even if that is the
5195 case, we might also have an invalid constant. To avoid making the
5196 conservative assumption and requiring two reloads, we see if this address
5197 is valid when not interpreted strictly. If it is, the only problem is
5198 that the index needs a reload and find_reloads_address_1 will take care
5199 of it.
5201 Handle all base registers here, not just fp/ap/sp, because on some
5202 targets (namely SPARC) we can also get invalid addresses from preventive
5203 subreg big-endian corrections made by find_reloads_toplev. We
5204 can also get expressions involving LO_SUM (rather than PLUS) from
5205 find_reloads_subreg_address.
5207 If we decide to do something, it must be that `double_reg_address_ok'
5208 is true. We generate a reload of the base register + constant and
5209 rework the sum so that the reload register will be added to the index.
5210 This is safe because we know the address isn't shared.
5212 We check for the base register as both the first and second operand of
5213 the innermost PLUS and/or LO_SUM. */
5215 for (op_index = 0; op_index < 2; ++op_index)
5217 rtx operand, addend;
5218 enum rtx_code inner_code;
5220 if (GET_CODE (ad) != PLUS)
5221 continue;
5223 inner_code = GET_CODE (XEXP (ad, 0));
5224 if (!(GET_CODE (ad) == PLUS
5225 && CONST_INT_P (XEXP (ad, 1))
5226 && (inner_code == PLUS || inner_code == LO_SUM)))
5227 continue;
5229 operand = XEXP (XEXP (ad, 0), op_index);
5230 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5231 continue;
5233 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5235 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5236 GET_CODE (addend))
5237 || operand == frame_pointer_rtx
5238 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
5239 || operand == hard_frame_pointer_rtx
5240 #endif
5241 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5242 || operand == arg_pointer_rtx
5243 #endif
5244 || operand == stack_pointer_rtx)
5245 && ! maybe_memory_address_addr_space_p
5246 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5248 rtx offset_reg;
5249 enum reg_class cls;
5251 offset_reg = plus_constant (GET_MODE (ad), operand,
5252 INTVAL (XEXP (ad, 1)));
5254 /* Form the adjusted address. */
5255 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5256 ad = gen_rtx_PLUS (GET_MODE (ad),
5257 op_index == 0 ? offset_reg : addend,
5258 op_index == 0 ? addend : offset_reg);
5259 else
5260 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5261 op_index == 0 ? offset_reg : addend,
5262 op_index == 0 ? addend : offset_reg);
5263 *loc = ad;
5265 cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5266 find_reloads_address_part (XEXP (ad, op_index),
5267 &XEXP (ad, op_index), cls,
5268 GET_MODE (ad), opnum, type, ind_levels);
5269 find_reloads_address_1 (mode, as,
5270 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5271 GET_CODE (XEXP (ad, op_index)),
5272 &XEXP (ad, 1 - op_index), opnum,
5273 type, 0, insn);
5275 return 0;
5279 /* See if address becomes valid when an eliminable register
5280 in a sum is replaced. */
5282 tem = ad;
5283 if (GET_CODE (ad) == PLUS)
5284 tem = subst_indexed_address (ad);
5285 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5287 /* Ok, we win that way. Replace any additional eliminable
5288 registers. */
5290 subst_reg_equivs_changed = 0;
5291 tem = subst_reg_equivs (tem, insn);
5293 /* Make sure that didn't make the address invalid again. */
5295 if (! subst_reg_equivs_changed
5296 || strict_memory_address_addr_space_p (mode, tem, as))
5298 *loc = tem;
5299 return 0;
5303 /* If constants aren't valid addresses, reload the constant address
5304 into a register. */
5305 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5307 enum machine_mode address_mode = GET_MODE (ad);
5308 if (address_mode == VOIDmode)
5309 address_mode = targetm.addr_space.address_mode (as);
5311 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5312 Unshare it so we can safely alter it. */
5313 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5314 && CONSTANT_POOL_ADDRESS_P (ad))
5316 *memrefloc = copy_rtx (*memrefloc);
5317 loc = &XEXP (*memrefloc, 0);
5318 if (removed_and)
5319 loc = &XEXP (*loc, 0);
5322 find_reloads_address_part (ad, loc,
5323 base_reg_class (mode, as, MEM, SCRATCH),
5324 address_mode, opnum, type, ind_levels);
5325 return ! removed_and;
5328 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5329 opnum, type, ind_levels, insn);
5332 /* Find all pseudo regs appearing in AD
5333 that are eliminable in favor of equivalent values
5334 and do not have hard regs; replace them by their equivalents.
5335 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5336 front of it for pseudos that we have to replace with stack slots. */
5338 static rtx
5339 subst_reg_equivs (rtx ad, rtx insn)
5341 RTX_CODE code = GET_CODE (ad);
5342 int i;
5343 const char *fmt;
5345 switch (code)
5347 case HIGH:
5348 case CONST:
5349 CASE_CONST_ANY:
5350 case SYMBOL_REF:
5351 case LABEL_REF:
5352 case PC:
5353 case CC0:
5354 return ad;
5356 case REG:
5358 int regno = REGNO (ad);
5360 if (reg_equiv_constant (regno) != 0)
5362 subst_reg_equivs_changed = 1;
5363 return reg_equiv_constant (regno);
5365 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5367 rtx mem = make_memloc (ad, regno);
5368 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5370 subst_reg_equivs_changed = 1;
5371 /* We mark the USE with QImode so that we recognize it
5372 as one that can be safely deleted at the end of
5373 reload. */
5374 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5375 QImode);
5376 return mem;
5380 return ad;
5382 case PLUS:
5383 /* Quickly dispose of a common case. */
5384 if (XEXP (ad, 0) == frame_pointer_rtx
5385 && CONST_INT_P (XEXP (ad, 1)))
5386 return ad;
5387 break;
5389 default:
5390 break;
5393 fmt = GET_RTX_FORMAT (code);
5394 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5395 if (fmt[i] == 'e')
5396 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5397 return ad;
5400 /* Compute the sum of X and Y, making canonicalizations assumed in an
5401 address, namely: sum constant integers, surround the sum of two
5402 constants with a CONST, put the constant as the second operand, and
5403 group the constant on the outermost sum.
5405 This routine assumes both inputs are already in canonical form. */
5408 form_sum (enum machine_mode mode, rtx x, rtx y)
5410 rtx tem;
5412 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5413 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5415 if (CONST_INT_P (x))
5416 return plus_constant (mode, y, INTVAL (x));
5417 else if (CONST_INT_P (y))
5418 return plus_constant (mode, x, INTVAL (y));
5419 else if (CONSTANT_P (x))
5420 tem = x, x = y, y = tem;
5422 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5423 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5425 /* Note that if the operands of Y are specified in the opposite
5426 order in the recursive calls below, infinite recursion will occur. */
5427 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5428 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5430 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5431 constant will have been placed second. */
5432 if (CONSTANT_P (x) && CONSTANT_P (y))
5434 if (GET_CODE (x) == CONST)
5435 x = XEXP (x, 0);
5436 if (GET_CODE (y) == CONST)
5437 y = XEXP (y, 0);
5439 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5442 return gen_rtx_PLUS (mode, x, y);
5445 /* If ADDR is a sum containing a pseudo register that should be
5446 replaced with a constant (from reg_equiv_constant),
5447 return the result of doing so, and also apply the associative
5448 law so that the result is more likely to be a valid address.
5449 (But it is not guaranteed to be one.)
5451 Note that at most one register is replaced, even if more are
5452 replaceable. Also, we try to put the result into a canonical form
5453 so it is more likely to be a valid address.
5455 In all other cases, return ADDR. */
5457 static rtx
5458 subst_indexed_address (rtx addr)
5460 rtx op0 = 0, op1 = 0, op2 = 0;
5461 rtx tem;
5462 int regno;
5464 if (GET_CODE (addr) == PLUS)
5466 /* Try to find a register to replace. */
5467 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5468 if (REG_P (op0)
5469 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5470 && reg_renumber[regno] < 0
5471 && reg_equiv_constant (regno) != 0)
5472 op0 = reg_equiv_constant (regno);
5473 else if (REG_P (op1)
5474 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5475 && reg_renumber[regno] < 0
5476 && reg_equiv_constant (regno) != 0)
5477 op1 = reg_equiv_constant (regno);
5478 else if (GET_CODE (op0) == PLUS
5479 && (tem = subst_indexed_address (op0)) != op0)
5480 op0 = tem;
5481 else if (GET_CODE (op1) == PLUS
5482 && (tem = subst_indexed_address (op1)) != op1)
5483 op1 = tem;
5484 else
5485 return addr;
5487 /* Pick out up to three things to add. */
5488 if (GET_CODE (op1) == PLUS)
5489 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5490 else if (GET_CODE (op0) == PLUS)
5491 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5493 /* Compute the sum. */
5494 if (op2 != 0)
5495 op1 = form_sum (GET_MODE (addr), op1, op2);
5496 if (op1 != 0)
5497 op0 = form_sum (GET_MODE (addr), op0, op1);
5499 return op0;
5501 return addr;
5504 /* Update the REG_INC notes for an insn. It updates all REG_INC
5505 notes for the instruction which refer to REGNO the to refer
5506 to the reload number.
5508 INSN is the insn for which any REG_INC notes need updating.
5510 REGNO is the register number which has been reloaded.
5512 RELOADNUM is the reload number. */
5514 static void
5515 update_auto_inc_notes (rtx insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5516 int reloadnum ATTRIBUTE_UNUSED)
5518 #ifdef AUTO_INC_DEC
5519 rtx link;
5521 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5522 if (REG_NOTE_KIND (link) == REG_INC
5523 && (int) REGNO (XEXP (link, 0)) == regno)
5524 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5525 #endif
5528 /* Record the pseudo registers we must reload into hard registers in a
5529 subexpression of a would-be memory address, X referring to a value
5530 in mode MODE. (This function is not called if the address we find
5531 is strictly valid.)
5533 CONTEXT = 1 means we are considering regs as index regs,
5534 = 0 means we are considering them as base regs.
5535 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5536 or an autoinc code.
5537 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5538 is the code of the index part of the address. Otherwise, pass SCRATCH
5539 for this argument.
5540 OPNUM and TYPE specify the purpose of any reloads made.
5542 IND_LEVELS says how many levels of indirect addressing are
5543 supported at this point in the address.
5545 INSN, if nonzero, is the insn in which we do the reload. It is used
5546 to determine if we may generate output reloads.
5548 We return nonzero if X, as a whole, is reloaded or replaced. */
5550 /* Note that we take shortcuts assuming that no multi-reg machine mode
5551 occurs as part of an address.
5552 Also, this is not fully machine-customizable; it works for machines
5553 such as VAXen and 68000's and 32000's, but other possible machines
5554 could have addressing modes that this does not handle right.
5555 If you add push_reload calls here, you need to make sure gen_reload
5556 handles those cases gracefully. */
5558 static int
5559 find_reloads_address_1 (enum machine_mode mode, addr_space_t as,
5560 rtx x, int context,
5561 enum rtx_code outer_code, enum rtx_code index_code,
5562 rtx *loc, int opnum, enum reload_type type,
5563 int ind_levels, rtx insn)
5565 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5566 ((CONTEXT) == 0 \
5567 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5568 : REGNO_OK_FOR_INDEX_P (REGNO))
5570 enum reg_class context_reg_class;
5571 RTX_CODE code = GET_CODE (x);
5573 if (context == 1)
5574 context_reg_class = INDEX_REG_CLASS;
5575 else
5576 context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5578 switch (code)
5580 case PLUS:
5582 rtx orig_op0 = XEXP (x, 0);
5583 rtx orig_op1 = XEXP (x, 1);
5584 RTX_CODE code0 = GET_CODE (orig_op0);
5585 RTX_CODE code1 = GET_CODE (orig_op1);
5586 rtx op0 = orig_op0;
5587 rtx op1 = orig_op1;
5589 if (GET_CODE (op0) == SUBREG)
5591 op0 = SUBREG_REG (op0);
5592 code0 = GET_CODE (op0);
5593 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5594 op0 = gen_rtx_REG (word_mode,
5595 (REGNO (op0) +
5596 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5597 GET_MODE (SUBREG_REG (orig_op0)),
5598 SUBREG_BYTE (orig_op0),
5599 GET_MODE (orig_op0))));
5602 if (GET_CODE (op1) == SUBREG)
5604 op1 = SUBREG_REG (op1);
5605 code1 = GET_CODE (op1);
5606 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5607 /* ??? Why is this given op1's mode and above for
5608 ??? op0 SUBREGs we use word_mode? */
5609 op1 = gen_rtx_REG (GET_MODE (op1),
5610 (REGNO (op1) +
5611 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5612 GET_MODE (SUBREG_REG (orig_op1)),
5613 SUBREG_BYTE (orig_op1),
5614 GET_MODE (orig_op1))));
5616 /* Plus in the index register may be created only as a result of
5617 register rematerialization for expression like &localvar*4. Reload it.
5618 It may be possible to combine the displacement on the outer level,
5619 but it is probably not worthwhile to do so. */
5620 if (context == 1)
5622 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5623 opnum, ADDR_TYPE (type), ind_levels, insn);
5624 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5625 context_reg_class,
5626 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5627 return 1;
5630 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5631 || code0 == ZERO_EXTEND || code1 == MEM)
5633 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5634 &XEXP (x, 0), opnum, type, ind_levels,
5635 insn);
5636 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5637 &XEXP (x, 1), opnum, type, ind_levels,
5638 insn);
5641 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5642 || code1 == ZERO_EXTEND || code0 == MEM)
5644 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5645 &XEXP (x, 0), opnum, type, ind_levels,
5646 insn);
5647 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5648 &XEXP (x, 1), opnum, type, ind_levels,
5649 insn);
5652 else if (code0 == CONST_INT || code0 == CONST
5653 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5654 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5655 &XEXP (x, 1), opnum, type, ind_levels,
5656 insn);
5658 else if (code1 == CONST_INT || code1 == CONST
5659 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5660 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5661 &XEXP (x, 0), opnum, type, ind_levels,
5662 insn);
5664 else if (code0 == REG && code1 == REG)
5666 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5667 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5668 return 0;
5669 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5670 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5671 return 0;
5672 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5673 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5674 &XEXP (x, 1), opnum, type, ind_levels,
5675 insn);
5676 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5677 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5678 &XEXP (x, 0), opnum, type, ind_levels,
5679 insn);
5680 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5681 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5682 &XEXP (x, 0), opnum, type, ind_levels,
5683 insn);
5684 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5685 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5686 &XEXP (x, 1), opnum, type, ind_levels,
5687 insn);
5688 else
5690 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5691 &XEXP (x, 0), opnum, type, ind_levels,
5692 insn);
5693 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5694 &XEXP (x, 1), opnum, type, ind_levels,
5695 insn);
5699 else if (code0 == REG)
5701 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5702 &XEXP (x, 0), opnum, type, ind_levels,
5703 insn);
5704 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5705 &XEXP (x, 1), opnum, type, ind_levels,
5706 insn);
5709 else if (code1 == REG)
5711 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5712 &XEXP (x, 1), opnum, type, ind_levels,
5713 insn);
5714 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5715 &XEXP (x, 0), opnum, type, ind_levels,
5716 insn);
5720 return 0;
5722 case POST_MODIFY:
5723 case PRE_MODIFY:
5725 rtx op0 = XEXP (x, 0);
5726 rtx op1 = XEXP (x, 1);
5727 enum rtx_code index_code;
5728 int regno;
5729 int reloadnum;
5731 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5732 return 0;
5734 /* Currently, we only support {PRE,POST}_MODIFY constructs
5735 where a base register is {inc,dec}remented by the contents
5736 of another register or by a constant value. Thus, these
5737 operands must match. */
5738 gcc_assert (op0 == XEXP (op1, 0));
5740 /* Require index register (or constant). Let's just handle the
5741 register case in the meantime... If the target allows
5742 auto-modify by a constant then we could try replacing a pseudo
5743 register with its equivalent constant where applicable.
5745 We also handle the case where the register was eliminated
5746 resulting in a PLUS subexpression.
5748 If we later decide to reload the whole PRE_MODIFY or
5749 POST_MODIFY, inc_for_reload might clobber the reload register
5750 before reading the index. The index register might therefore
5751 need to live longer than a TYPE reload normally would, so be
5752 conservative and class it as RELOAD_OTHER. */
5753 if ((REG_P (XEXP (op1, 1))
5754 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5755 || GET_CODE (XEXP (op1, 1)) == PLUS)
5756 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5757 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5758 ind_levels, insn);
5760 gcc_assert (REG_P (XEXP (op1, 0)));
5762 regno = REGNO (XEXP (op1, 0));
5763 index_code = GET_CODE (XEXP (op1, 1));
5765 /* A register that is incremented cannot be constant! */
5766 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5767 || reg_equiv_constant (regno) == 0);
5769 /* Handle a register that is equivalent to a memory location
5770 which cannot be addressed directly. */
5771 if (reg_equiv_memory_loc (regno) != 0
5772 && (reg_equiv_address (regno) != 0
5773 || num_not_at_initial_offset))
5775 rtx tem = make_memloc (XEXP (x, 0), regno);
5777 if (reg_equiv_address (regno)
5778 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5780 rtx orig = tem;
5782 /* First reload the memory location's address.
5783 We can't use ADDR_TYPE (type) here, because we need to
5784 write back the value after reading it, hence we actually
5785 need two registers. */
5786 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5787 &XEXP (tem, 0), opnum,
5788 RELOAD_OTHER,
5789 ind_levels, insn);
5791 if (!rtx_equal_p (tem, orig))
5792 push_reg_equiv_alt_mem (regno, tem);
5794 /* Then reload the memory location into a base
5795 register. */
5796 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5797 &XEXP (op1, 0),
5798 base_reg_class (mode, as,
5799 code, index_code),
5800 GET_MODE (x), GET_MODE (x), 0,
5801 0, opnum, RELOAD_OTHER);
5803 update_auto_inc_notes (this_insn, regno, reloadnum);
5804 return 0;
5808 if (reg_renumber[regno] >= 0)
5809 regno = reg_renumber[regno];
5811 /* We require a base register here... */
5812 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5814 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5815 &XEXP (op1, 0), &XEXP (x, 0),
5816 base_reg_class (mode, as,
5817 code, index_code),
5818 GET_MODE (x), GET_MODE (x), 0, 0,
5819 opnum, RELOAD_OTHER);
5821 update_auto_inc_notes (this_insn, regno, reloadnum);
5822 return 0;
5825 return 0;
5827 case POST_INC:
5828 case POST_DEC:
5829 case PRE_INC:
5830 case PRE_DEC:
5831 if (REG_P (XEXP (x, 0)))
5833 int regno = REGNO (XEXP (x, 0));
5834 int value = 0;
5835 rtx x_orig = x;
5837 /* A register that is incremented cannot be constant! */
5838 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5839 || reg_equiv_constant (regno) == 0);
5841 /* Handle a register that is equivalent to a memory location
5842 which cannot be addressed directly. */
5843 if (reg_equiv_memory_loc (regno) != 0
5844 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5846 rtx tem = make_memloc (XEXP (x, 0), regno);
5847 if (reg_equiv_address (regno)
5848 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5850 rtx orig = tem;
5852 /* First reload the memory location's address.
5853 We can't use ADDR_TYPE (type) here, because we need to
5854 write back the value after reading it, hence we actually
5855 need two registers. */
5856 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5857 &XEXP (tem, 0), opnum, type,
5858 ind_levels, insn);
5859 if (!rtx_equal_p (tem, orig))
5860 push_reg_equiv_alt_mem (regno, tem);
5861 /* Put this inside a new increment-expression. */
5862 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5863 /* Proceed to reload that, as if it contained a register. */
5867 /* If we have a hard register that is ok in this incdec context,
5868 don't make a reload. If the register isn't nice enough for
5869 autoincdec, we can reload it. But, if an autoincrement of a
5870 register that we here verified as playing nice, still outside
5871 isn't "valid", it must be that no autoincrement is "valid".
5872 If that is true and something made an autoincrement anyway,
5873 this must be a special context where one is allowed.
5874 (For example, a "push" instruction.)
5875 We can't improve this address, so leave it alone. */
5877 /* Otherwise, reload the autoincrement into a suitable hard reg
5878 and record how much to increment by. */
5880 if (reg_renumber[regno] >= 0)
5881 regno = reg_renumber[regno];
5882 if (regno >= FIRST_PSEUDO_REGISTER
5883 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5884 index_code))
5886 int reloadnum;
5888 /* If we can output the register afterwards, do so, this
5889 saves the extra update.
5890 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5891 CALL_INSN - and it does not set CC0.
5892 But don't do this if we cannot directly address the
5893 memory location, since this will make it harder to
5894 reuse address reloads, and increases register pressure.
5895 Also don't do this if we can probably update x directly. */
5896 rtx equiv = (MEM_P (XEXP (x, 0))
5897 ? XEXP (x, 0)
5898 : reg_equiv_mem (regno));
5899 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5900 if (insn && NONJUMP_INSN_P (insn) && equiv
5901 && memory_operand (equiv, GET_MODE (equiv))
5902 #ifdef HAVE_cc0
5903 && ! sets_cc0_p (PATTERN (insn))
5904 #endif
5905 && ! (icode != CODE_FOR_nothing
5906 && insn_operand_matches (icode, 0, equiv)
5907 && insn_operand_matches (icode, 1, equiv)))
5909 /* We use the original pseudo for loc, so that
5910 emit_reload_insns() knows which pseudo this
5911 reload refers to and updates the pseudo rtx, not
5912 its equivalent memory location, as well as the
5913 corresponding entry in reg_last_reload_reg. */
5914 loc = &XEXP (x_orig, 0);
5915 x = XEXP (x, 0);
5916 reloadnum
5917 = push_reload (x, x, loc, loc,
5918 context_reg_class,
5919 GET_MODE (x), GET_MODE (x), 0, 0,
5920 opnum, RELOAD_OTHER);
5922 else
5924 reloadnum
5925 = push_reload (x, x, loc, (rtx*) 0,
5926 context_reg_class,
5927 GET_MODE (x), GET_MODE (x), 0, 0,
5928 opnum, type);
5929 rld[reloadnum].inc
5930 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5932 value = 1;
5935 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5936 reloadnum);
5938 return value;
5940 return 0;
5942 case TRUNCATE:
5943 case SIGN_EXTEND:
5944 case ZERO_EXTEND:
5945 /* Look for parts to reload in the inner expression and reload them
5946 too, in addition to this operation. Reloading all inner parts in
5947 addition to this one shouldn't be necessary, but at this point,
5948 we don't know if we can possibly omit any part that *can* be
5949 reloaded. Targets that are better off reloading just either part
5950 (or perhaps even a different part of an outer expression), should
5951 define LEGITIMIZE_RELOAD_ADDRESS. */
5952 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5953 context, code, SCRATCH, &XEXP (x, 0), opnum,
5954 type, ind_levels, insn);
5955 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5956 context_reg_class,
5957 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5958 return 1;
5960 case MEM:
5961 /* This is probably the result of a substitution, by eliminate_regs, of
5962 an equivalent address for a pseudo that was not allocated to a hard
5963 register. Verify that the specified address is valid and reload it
5964 into a register.
5966 Since we know we are going to reload this item, don't decrement for
5967 the indirection level.
5969 Note that this is actually conservative: it would be slightly more
5970 efficient to use the value of SPILL_INDIRECT_LEVELS from
5971 reload1.c here. */
5973 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5974 opnum, ADDR_TYPE (type), ind_levels, insn);
5975 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5976 context_reg_class,
5977 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5978 return 1;
5980 case REG:
5982 int regno = REGNO (x);
5984 if (reg_equiv_constant (regno) != 0)
5986 find_reloads_address_part (reg_equiv_constant (regno), loc,
5987 context_reg_class,
5988 GET_MODE (x), opnum, type, ind_levels);
5989 return 1;
5992 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5993 that feeds this insn. */
5994 if (reg_equiv_mem (regno) != 0)
5996 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5997 context_reg_class,
5998 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5999 return 1;
6001 #endif
6003 if (reg_equiv_memory_loc (regno)
6004 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
6006 rtx tem = make_memloc (x, regno);
6007 if (reg_equiv_address (regno) != 0
6008 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
6010 x = tem;
6011 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
6012 &XEXP (x, 0), opnum, ADDR_TYPE (type),
6013 ind_levels, insn);
6014 if (!rtx_equal_p (x, tem))
6015 push_reg_equiv_alt_mem (regno, x);
6019 if (reg_renumber[regno] >= 0)
6020 regno = reg_renumber[regno];
6022 if (regno >= FIRST_PSEUDO_REGISTER
6023 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6024 index_code))
6026 push_reload (x, NULL_RTX, loc, (rtx*) 0,
6027 context_reg_class,
6028 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6029 return 1;
6032 /* If a register appearing in an address is the subject of a CLOBBER
6033 in this insn, reload it into some other register to be safe.
6034 The CLOBBER is supposed to make the register unavailable
6035 from before this insn to after it. */
6036 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
6038 push_reload (x, NULL_RTX, loc, (rtx*) 0,
6039 context_reg_class,
6040 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6041 return 1;
6044 return 0;
6046 case SUBREG:
6047 if (REG_P (SUBREG_REG (x)))
6049 /* If this is a SUBREG of a hard register and the resulting register
6050 is of the wrong class, reload the whole SUBREG. This avoids
6051 needless copies if SUBREG_REG is multi-word. */
6052 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6054 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
6056 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6057 index_code))
6059 push_reload (x, NULL_RTX, loc, (rtx*) 0,
6060 context_reg_class,
6061 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6062 return 1;
6065 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6066 is larger than the class size, then reload the whole SUBREG. */
6067 else
6069 enum reg_class rclass = context_reg_class;
6070 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6071 > reg_class_size[(int) rclass])
6073 x = find_reloads_subreg_address (x, 0, opnum,
6074 ADDR_TYPE (type),
6075 ind_levels, insn, NULL);
6076 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6077 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6078 return 1;
6082 break;
6084 default:
6085 break;
6089 const char *fmt = GET_RTX_FORMAT (code);
6090 int i;
6092 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6094 if (fmt[i] == 'e')
6095 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6096 we get here. */
6097 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6098 code, SCRATCH, &XEXP (x, i),
6099 opnum, type, ind_levels, insn);
6103 #undef REG_OK_FOR_CONTEXT
6104 return 0;
6107 /* X, which is found at *LOC, is a part of an address that needs to be
6108 reloaded into a register of class RCLASS. If X is a constant, or if
6109 X is a PLUS that contains a constant, check that the constant is a
6110 legitimate operand and that we are supposed to be able to load
6111 it into the register.
6113 If not, force the constant into memory and reload the MEM instead.
6115 MODE is the mode to use, in case X is an integer constant.
6117 OPNUM and TYPE describe the purpose of any reloads made.
6119 IND_LEVELS says how many levels of indirect addressing this machine
6120 supports. */
6122 static void
6123 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6124 enum machine_mode mode, int opnum,
6125 enum reload_type type, int ind_levels)
6127 if (CONSTANT_P (x)
6128 && (!targetm.legitimate_constant_p (mode, x)
6129 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6131 x = force_const_mem (mode, x);
6132 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6133 opnum, type, ind_levels, 0);
6136 else if (GET_CODE (x) == PLUS
6137 && CONSTANT_P (XEXP (x, 1))
6138 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6139 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6140 == NO_REGS))
6142 rtx tem;
6144 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6145 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6146 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6147 opnum, type, ind_levels, 0);
6150 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6151 mode, VOIDmode, 0, 0, opnum, type);
6154 /* X, a subreg of a pseudo, is a part of an address that needs to be
6155 reloaded.
6157 If the pseudo is equivalent to a memory location that cannot be directly
6158 addressed, make the necessary address reloads.
6160 If address reloads have been necessary, or if the address is changed
6161 by register elimination, return the rtx of the memory location;
6162 otherwise, return X.
6164 If FORCE_REPLACE is nonzero, unconditionally replace the subreg with the
6165 memory location.
6167 OPNUM and TYPE identify the purpose of the reload.
6169 IND_LEVELS says how many levels of indirect addressing are
6170 supported at this point in the address.
6172 INSN, if nonzero, is the insn in which we do the reload. It is used
6173 to determine where to put USEs for pseudos that we have to replace with
6174 stack slots. */
6176 static rtx
6177 find_reloads_subreg_address (rtx x, int force_replace, int opnum,
6178 enum reload_type type, int ind_levels, rtx insn,
6179 int *address_reloaded)
6181 int regno = REGNO (SUBREG_REG (x));
6182 int reloaded = 0;
6184 if (reg_equiv_memory_loc (regno))
6186 /* If the address is not directly addressable, or if the address is not
6187 offsettable, then it must be replaced. */
6188 if (! force_replace
6189 && (reg_equiv_address (regno)
6190 || ! offsettable_memref_p (reg_equiv_mem (regno))))
6191 force_replace = 1;
6193 if (force_replace || num_not_at_initial_offset)
6195 rtx tem = make_memloc (SUBREG_REG (x), regno);
6197 /* If the address changes because of register elimination, then
6198 it must be replaced. */
6199 if (force_replace
6200 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
6202 unsigned outer_size = GET_MODE_SIZE (GET_MODE (x));
6203 unsigned inner_size = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
6204 int offset;
6205 rtx orig = tem;
6207 /* For big-endian paradoxical subregs, SUBREG_BYTE does not
6208 hold the correct (negative) byte offset. */
6209 if (BYTES_BIG_ENDIAN && outer_size > inner_size)
6210 offset = inner_size - outer_size;
6211 else
6212 offset = SUBREG_BYTE (x);
6214 XEXP (tem, 0) = plus_constant (GET_MODE (XEXP (tem, 0)),
6215 XEXP (tem, 0), offset);
6216 PUT_MODE (tem, GET_MODE (x));
6217 if (MEM_OFFSET_KNOWN_P (tem))
6218 set_mem_offset (tem, MEM_OFFSET (tem) + offset);
6219 if (MEM_SIZE_KNOWN_P (tem)
6220 && MEM_SIZE (tem) != (HOST_WIDE_INT) outer_size)
6221 set_mem_size (tem, outer_size);
6223 /* If this was a paradoxical subreg that we replaced, the
6224 resulting memory must be sufficiently aligned to allow
6225 us to widen the mode of the memory. */
6226 if (outer_size > inner_size)
6228 rtx base;
6230 base = XEXP (tem, 0);
6231 if (GET_CODE (base) == PLUS)
6233 if (CONST_INT_P (XEXP (base, 1))
6234 && INTVAL (XEXP (base, 1)) % outer_size != 0)
6235 return x;
6236 base = XEXP (base, 0);
6238 if (!REG_P (base)
6239 || (REGNO_POINTER_ALIGN (REGNO (base))
6240 < outer_size * BITS_PER_UNIT))
6241 return x;
6244 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6245 XEXP (tem, 0), &XEXP (tem, 0),
6246 opnum, type, ind_levels, insn);
6247 /* ??? Do we need to handle nonzero offsets somehow? */
6248 if (!offset && !rtx_equal_p (tem, orig))
6249 push_reg_equiv_alt_mem (regno, tem);
6251 /* For some processors an address may be valid in the
6252 original mode but not in a smaller mode. For
6253 example, ARM accepts a scaled index register in
6254 SImode but not in HImode. Note that this is only
6255 a problem if the address in reg_equiv_mem is already
6256 invalid in the new mode; other cases would be fixed
6257 by find_reloads_address as usual.
6259 ??? We attempt to handle such cases here by doing an
6260 additional reload of the full address after the
6261 usual processing by find_reloads_address. Note that
6262 this may not work in the general case, but it seems
6263 to cover the cases where this situation currently
6264 occurs. A more general fix might be to reload the
6265 *value* instead of the address, but this would not
6266 be expected by the callers of this routine as-is.
6268 If find_reloads_address already completed replaced
6269 the address, there is nothing further to do. */
6270 if (reloaded == 0
6271 && reg_equiv_mem (regno) != 0
6272 && !strict_memory_address_addr_space_p
6273 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6274 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6276 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6277 base_reg_class (GET_MODE (tem),
6278 MEM_ADDR_SPACE (tem),
6279 MEM, SCRATCH),
6280 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0,
6281 opnum, type);
6282 reloaded = 1;
6284 /* If this is not a toplevel operand, find_reloads doesn't see
6285 this substitution. We have to emit a USE of the pseudo so
6286 that delete_output_reload can see it. */
6287 if (replace_reloads && recog_data.operand[opnum] != x)
6288 /* We mark the USE with QImode so that we recognize it
6289 as one that can be safely deleted at the end of
6290 reload. */
6291 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode,
6292 SUBREG_REG (x)),
6293 insn), QImode);
6294 x = tem;
6298 if (address_reloaded)
6299 *address_reloaded = reloaded;
6301 return x;
6304 /* Substitute into the current INSN the registers into which we have reloaded
6305 the things that need reloading. The array `replacements'
6306 contains the locations of all pointers that must be changed
6307 and says what to replace them with.
6309 Return the rtx that X translates into; usually X, but modified. */
6311 void
6312 subst_reloads (rtx insn)
6314 int i;
6316 for (i = 0; i < n_replacements; i++)
6318 struct replacement *r = &replacements[i];
6319 rtx reloadreg = rld[r->what].reg_rtx;
6320 if (reloadreg)
6322 #ifdef DEBUG_RELOAD
6323 /* This checking takes a very long time on some platforms
6324 causing the gcc.c-torture/compile/limits-fnargs.c test
6325 to time out during testing. See PR 31850.
6327 Internal consistency test. Check that we don't modify
6328 anything in the equivalence arrays. Whenever something from
6329 those arrays needs to be reloaded, it must be unshared before
6330 being substituted into; the equivalence must not be modified.
6331 Otherwise, if the equivalence is used after that, it will
6332 have been modified, and the thing substituted (probably a
6333 register) is likely overwritten and not a usable equivalence. */
6334 int check_regno;
6336 for (check_regno = 0; check_regno < max_regno; check_regno++)
6338 #define CHECK_MODF(ARRAY) \
6339 gcc_assert (!VEC_index (reg_equivs_t, reg_equivs, check_regno).ARRAY \
6340 || !loc_mentioned_in_p (r->where, \
6341 VEC_index (reg_equivs_t, reg_equivs, check_regno).ARRAY))
6343 CHECK_MODF (equiv_constant);
6344 CHECK_MODF (equiv_memory_loc);
6345 CHECK_MODF (equiv_address);
6346 CHECK_MODF (equiv_mem);
6347 #undef CHECK_MODF
6349 #endif /* DEBUG_RELOAD */
6351 /* If we're replacing a LABEL_REF with a register, there must
6352 already be an indication (to e.g. flow) which label this
6353 register refers to. */
6354 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6355 || !JUMP_P (insn)
6356 || find_reg_note (insn,
6357 REG_LABEL_OPERAND,
6358 XEXP (*r->where, 0))
6359 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6361 /* Encapsulate RELOADREG so its machine mode matches what
6362 used to be there. Note that gen_lowpart_common will
6363 do the wrong thing if RELOADREG is multi-word. RELOADREG
6364 will always be a REG here. */
6365 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6366 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6368 *r->where = reloadreg;
6370 /* If reload got no reg and isn't optional, something's wrong. */
6371 else
6372 gcc_assert (rld[r->what].optional);
6376 /* Make a copy of any replacements being done into X and move those
6377 copies to locations in Y, a copy of X. */
6379 void
6380 copy_replacements (rtx x, rtx y)
6382 copy_replacements_1 (&x, &y, n_replacements);
6385 static void
6386 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6388 int i, j;
6389 rtx x, y;
6390 struct replacement *r;
6391 enum rtx_code code;
6392 const char *fmt;
6394 for (j = 0; j < orig_replacements; j++)
6395 if (replacements[j].where == px)
6397 r = &replacements[n_replacements++];
6398 r->where = py;
6399 r->what = replacements[j].what;
6400 r->mode = replacements[j].mode;
6403 x = *px;
6404 y = *py;
6405 code = GET_CODE (x);
6406 fmt = GET_RTX_FORMAT (code);
6408 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6410 if (fmt[i] == 'e')
6411 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6412 else if (fmt[i] == 'E')
6413 for (j = XVECLEN (x, i); --j >= 0; )
6414 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6415 orig_replacements);
6419 /* Change any replacements being done to *X to be done to *Y. */
6421 void
6422 move_replacements (rtx *x, rtx *y)
6424 int i;
6426 for (i = 0; i < n_replacements; i++)
6427 if (replacements[i].where == x)
6428 replacements[i].where = y;
6431 /* If LOC was scheduled to be replaced by something, return the replacement.
6432 Otherwise, return *LOC. */
6435 find_replacement (rtx *loc)
6437 struct replacement *r;
6439 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6441 rtx reloadreg = rld[r->what].reg_rtx;
6443 if (reloadreg && r->where == loc)
6445 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6446 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6448 return reloadreg;
6450 else if (reloadreg && GET_CODE (*loc) == SUBREG
6451 && r->where == &SUBREG_REG (*loc))
6453 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6454 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6456 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6457 GET_MODE (SUBREG_REG (*loc)),
6458 SUBREG_BYTE (*loc));
6462 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6463 what's inside and make a new rtl if so. */
6464 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6465 || GET_CODE (*loc) == MULT)
6467 rtx x = find_replacement (&XEXP (*loc, 0));
6468 rtx y = find_replacement (&XEXP (*loc, 1));
6470 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6471 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6474 return *loc;
6477 /* Return nonzero if register in range [REGNO, ENDREGNO)
6478 appears either explicitly or implicitly in X
6479 other than being stored into (except for earlyclobber operands).
6481 References contained within the substructure at LOC do not count.
6482 LOC may be zero, meaning don't ignore anything.
6484 This is similar to refers_to_regno_p in rtlanal.c except that we
6485 look at equivalences for pseudos that didn't get hard registers. */
6487 static int
6488 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6489 rtx x, rtx *loc)
6491 int i;
6492 unsigned int r;
6493 RTX_CODE code;
6494 const char *fmt;
6496 if (x == 0)
6497 return 0;
6499 repeat:
6500 code = GET_CODE (x);
6502 switch (code)
6504 case REG:
6505 r = REGNO (x);
6507 /* If this is a pseudo, a hard register must not have been allocated.
6508 X must therefore either be a constant or be in memory. */
6509 if (r >= FIRST_PSEUDO_REGISTER)
6511 if (reg_equiv_memory_loc (r))
6512 return refers_to_regno_for_reload_p (regno, endregno,
6513 reg_equiv_memory_loc (r),
6514 (rtx*) 0);
6516 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6517 return 0;
6520 return (endregno > r
6521 && regno < r + (r < FIRST_PSEUDO_REGISTER
6522 ? hard_regno_nregs[r][GET_MODE (x)]
6523 : 1));
6525 case SUBREG:
6526 /* If this is a SUBREG of a hard reg, we can see exactly which
6527 registers are being modified. Otherwise, handle normally. */
6528 if (REG_P (SUBREG_REG (x))
6529 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6531 unsigned int inner_regno = subreg_regno (x);
6532 unsigned int inner_endregno
6533 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6534 ? subreg_nregs (x) : 1);
6536 return endregno > inner_regno && regno < inner_endregno;
6538 break;
6540 case CLOBBER:
6541 case SET:
6542 if (&SET_DEST (x) != loc
6543 /* Note setting a SUBREG counts as referring to the REG it is in for
6544 a pseudo but not for hard registers since we can
6545 treat each word individually. */
6546 && ((GET_CODE (SET_DEST (x)) == SUBREG
6547 && loc != &SUBREG_REG (SET_DEST (x))
6548 && REG_P (SUBREG_REG (SET_DEST (x)))
6549 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6550 && refers_to_regno_for_reload_p (regno, endregno,
6551 SUBREG_REG (SET_DEST (x)),
6552 loc))
6553 /* If the output is an earlyclobber operand, this is
6554 a conflict. */
6555 || ((!REG_P (SET_DEST (x))
6556 || earlyclobber_operand_p (SET_DEST (x)))
6557 && refers_to_regno_for_reload_p (regno, endregno,
6558 SET_DEST (x), loc))))
6559 return 1;
6561 if (code == CLOBBER || loc == &SET_SRC (x))
6562 return 0;
6563 x = SET_SRC (x);
6564 goto repeat;
6566 default:
6567 break;
6570 /* X does not match, so try its subexpressions. */
6572 fmt = GET_RTX_FORMAT (code);
6573 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6575 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6577 if (i == 0)
6579 x = XEXP (x, 0);
6580 goto repeat;
6582 else
6583 if (refers_to_regno_for_reload_p (regno, endregno,
6584 XEXP (x, i), loc))
6585 return 1;
6587 else if (fmt[i] == 'E')
6589 int j;
6590 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6591 if (loc != &XVECEXP (x, i, j)
6592 && refers_to_regno_for_reload_p (regno, endregno,
6593 XVECEXP (x, i, j), loc))
6594 return 1;
6597 return 0;
6600 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6601 we check if any register number in X conflicts with the relevant register
6602 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6603 contains a MEM (we don't bother checking for memory addresses that can't
6604 conflict because we expect this to be a rare case.
6606 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6607 that we look at equivalences for pseudos that didn't get hard registers. */
6610 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6612 int regno, endregno;
6614 /* Overly conservative. */
6615 if (GET_CODE (x) == STRICT_LOW_PART
6616 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6617 x = XEXP (x, 0);
6619 /* If either argument is a constant, then modifying X can not affect IN. */
6620 if (CONSTANT_P (x) || CONSTANT_P (in))
6621 return 0;
6622 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6623 return refers_to_mem_for_reload_p (in);
6624 else if (GET_CODE (x) == SUBREG)
6626 regno = REGNO (SUBREG_REG (x));
6627 if (regno < FIRST_PSEUDO_REGISTER)
6628 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6629 GET_MODE (SUBREG_REG (x)),
6630 SUBREG_BYTE (x),
6631 GET_MODE (x));
6632 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6633 ? subreg_nregs (x) : 1);
6635 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6637 else if (REG_P (x))
6639 regno = REGNO (x);
6641 /* If this is a pseudo, it must not have been assigned a hard register.
6642 Therefore, it must either be in memory or be a constant. */
6644 if (regno >= FIRST_PSEUDO_REGISTER)
6646 if (reg_equiv_memory_loc (regno))
6647 return refers_to_mem_for_reload_p (in);
6648 gcc_assert (reg_equiv_constant (regno));
6649 return 0;
6652 endregno = END_HARD_REGNO (x);
6654 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6656 else if (MEM_P (x))
6657 return refers_to_mem_for_reload_p (in);
6658 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6659 || GET_CODE (x) == CC0)
6660 return reg_mentioned_p (x, in);
6661 else
6663 gcc_assert (GET_CODE (x) == PLUS);
6665 /* We actually want to know if X is mentioned somewhere inside IN.
6666 We must not say that (plus (sp) (const_int 124)) is in
6667 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6668 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6669 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6670 while (MEM_P (in))
6671 in = XEXP (in, 0);
6672 if (REG_P (in))
6673 return 0;
6674 else if (GET_CODE (in) == PLUS)
6675 return (rtx_equal_p (x, in)
6676 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6677 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6678 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6679 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6682 gcc_unreachable ();
6685 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6686 registers. */
6688 static int
6689 refers_to_mem_for_reload_p (rtx x)
6691 const char *fmt;
6692 int i;
6694 if (MEM_P (x))
6695 return 1;
6697 if (REG_P (x))
6698 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6699 && reg_equiv_memory_loc (REGNO (x)));
6701 fmt = GET_RTX_FORMAT (GET_CODE (x));
6702 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6703 if (fmt[i] == 'e'
6704 && (MEM_P (XEXP (x, i))
6705 || refers_to_mem_for_reload_p (XEXP (x, i))))
6706 return 1;
6708 return 0;
6711 /* Check the insns before INSN to see if there is a suitable register
6712 containing the same value as GOAL.
6713 If OTHER is -1, look for a register in class RCLASS.
6714 Otherwise, just see if register number OTHER shares GOAL's value.
6716 Return an rtx for the register found, or zero if none is found.
6718 If RELOAD_REG_P is (short *)1,
6719 we reject any hard reg that appears in reload_reg_rtx
6720 because such a hard reg is also needed coming into this insn.
6722 If RELOAD_REG_P is any other nonzero value,
6723 it is a vector indexed by hard reg number
6724 and we reject any hard reg whose element in the vector is nonnegative
6725 as well as any that appears in reload_reg_rtx.
6727 If GOAL is zero, then GOALREG is a register number; we look
6728 for an equivalent for that register.
6730 MODE is the machine mode of the value we want an equivalence for.
6731 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6733 This function is used by jump.c as well as in the reload pass.
6735 If GOAL is the sum of the stack pointer and a constant, we treat it
6736 as if it were a constant except that sp is required to be unchanging. */
6739 find_equiv_reg (rtx goal, rtx insn, enum reg_class rclass, int other,
6740 short *reload_reg_p, int goalreg, enum machine_mode mode)
6742 rtx p = insn;
6743 rtx goaltry, valtry, value, where;
6744 rtx pat;
6745 int regno = -1;
6746 int valueno;
6747 int goal_mem = 0;
6748 int goal_const = 0;
6749 int goal_mem_addr_varies = 0;
6750 int need_stable_sp = 0;
6751 int nregs;
6752 int valuenregs;
6753 int num = 0;
6755 if (goal == 0)
6756 regno = goalreg;
6757 else if (REG_P (goal))
6758 regno = REGNO (goal);
6759 else if (MEM_P (goal))
6761 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6762 if (MEM_VOLATILE_P (goal))
6763 return 0;
6764 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6765 return 0;
6766 /* An address with side effects must be reexecuted. */
6767 switch (code)
6769 case POST_INC:
6770 case PRE_INC:
6771 case POST_DEC:
6772 case PRE_DEC:
6773 case POST_MODIFY:
6774 case PRE_MODIFY:
6775 return 0;
6776 default:
6777 break;
6779 goal_mem = 1;
6781 else if (CONSTANT_P (goal))
6782 goal_const = 1;
6783 else if (GET_CODE (goal) == PLUS
6784 && XEXP (goal, 0) == stack_pointer_rtx
6785 && CONSTANT_P (XEXP (goal, 1)))
6786 goal_const = need_stable_sp = 1;
6787 else if (GET_CODE (goal) == PLUS
6788 && XEXP (goal, 0) == frame_pointer_rtx
6789 && CONSTANT_P (XEXP (goal, 1)))
6790 goal_const = 1;
6791 else
6792 return 0;
6794 num = 0;
6795 /* Scan insns back from INSN, looking for one that copies
6796 a value into or out of GOAL.
6797 Stop and give up if we reach a label. */
6799 while (1)
6801 p = PREV_INSN (p);
6802 if (p && DEBUG_INSN_P (p))
6803 continue;
6804 num++;
6805 if (p == 0 || LABEL_P (p)
6806 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6807 return 0;
6809 /* Don't reuse register contents from before a setjmp-type
6810 function call; on the second return (from the longjmp) it
6811 might have been clobbered by a later reuse. It doesn't
6812 seem worthwhile to actually go and see if it is actually
6813 reused even if that information would be readily available;
6814 just don't reuse it across the setjmp call. */
6815 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6816 return 0;
6818 if (NONJUMP_INSN_P (p)
6819 /* If we don't want spill regs ... */
6820 && (! (reload_reg_p != 0
6821 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6822 /* ... then ignore insns introduced by reload; they aren't
6823 useful and can cause results in reload_as_needed to be
6824 different from what they were when calculating the need for
6825 spills. If we notice an input-reload insn here, we will
6826 reject it below, but it might hide a usable equivalent.
6827 That makes bad code. It may even fail: perhaps no reg was
6828 spilled for this insn because it was assumed we would find
6829 that equivalent. */
6830 || INSN_UID (p) < reload_first_uid))
6832 rtx tem;
6833 pat = single_set (p);
6835 /* First check for something that sets some reg equal to GOAL. */
6836 if (pat != 0
6837 && ((regno >= 0
6838 && true_regnum (SET_SRC (pat)) == regno
6839 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6841 (regno >= 0
6842 && true_regnum (SET_DEST (pat)) == regno
6843 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6845 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6846 /* When looking for stack pointer + const,
6847 make sure we don't use a stack adjust. */
6848 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6849 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6850 || (goal_mem
6851 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6852 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6853 || (goal_mem
6854 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6855 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6856 /* If we are looking for a constant,
6857 and something equivalent to that constant was copied
6858 into a reg, we can use that reg. */
6859 || (goal_const && REG_NOTES (p) != 0
6860 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6861 && ((rtx_equal_p (XEXP (tem, 0), goal)
6862 && (valueno
6863 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6864 || (REG_P (SET_DEST (pat))
6865 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6866 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6867 && CONST_INT_P (goal)
6868 && 0 != (goaltry
6869 = operand_subword (XEXP (tem, 0), 0, 0,
6870 VOIDmode))
6871 && rtx_equal_p (goal, goaltry)
6872 && (valtry
6873 = operand_subword (SET_DEST (pat), 0, 0,
6874 VOIDmode))
6875 && (valueno = true_regnum (valtry)) >= 0)))
6876 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6877 NULL_RTX))
6878 && REG_P (SET_DEST (pat))
6879 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6880 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6881 && CONST_INT_P (goal)
6882 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6883 VOIDmode))
6884 && rtx_equal_p (goal, goaltry)
6885 && (valtry
6886 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6887 && (valueno = true_regnum (valtry)) >= 0)))
6889 if (other >= 0)
6891 if (valueno != other)
6892 continue;
6894 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6895 continue;
6896 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6897 mode, valueno))
6898 continue;
6899 value = valtry;
6900 where = p;
6901 break;
6906 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6907 (or copying VALUE into GOAL, if GOAL is also a register).
6908 Now verify that VALUE is really valid. */
6910 /* VALUENO is the register number of VALUE; a hard register. */
6912 /* Don't try to re-use something that is killed in this insn. We want
6913 to be able to trust REG_UNUSED notes. */
6914 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6915 return 0;
6917 /* If we propose to get the value from the stack pointer or if GOAL is
6918 a MEM based on the stack pointer, we need a stable SP. */
6919 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6920 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6921 goal)))
6922 need_stable_sp = 1;
6924 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6925 if (GET_MODE (value) != mode)
6926 return 0;
6928 /* Reject VALUE if it was loaded from GOAL
6929 and is also a register that appears in the address of GOAL. */
6931 if (goal_mem && value == SET_DEST (single_set (where))
6932 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6933 goal, (rtx*) 0))
6934 return 0;
6936 /* Reject registers that overlap GOAL. */
6938 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6939 nregs = hard_regno_nregs[regno][mode];
6940 else
6941 nregs = 1;
6942 valuenregs = hard_regno_nregs[valueno][mode];
6944 if (!goal_mem && !goal_const
6945 && regno + nregs > valueno && regno < valueno + valuenregs)
6946 return 0;
6948 /* Reject VALUE if it is one of the regs reserved for reloads.
6949 Reload1 knows how to reuse them anyway, and it would get
6950 confused if we allocated one without its knowledge.
6951 (Now that insns introduced by reload are ignored above,
6952 this case shouldn't happen, but I'm not positive.) */
6954 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6956 int i;
6957 for (i = 0; i < valuenregs; ++i)
6958 if (reload_reg_p[valueno + i] >= 0)
6959 return 0;
6962 /* Reject VALUE if it is a register being used for an input reload
6963 even if it is not one of those reserved. */
6965 if (reload_reg_p != 0)
6967 int i;
6968 for (i = 0; i < n_reloads; i++)
6969 if (rld[i].reg_rtx != 0 && rld[i].in)
6971 int regno1 = REGNO (rld[i].reg_rtx);
6972 int nregs1 = hard_regno_nregs[regno1]
6973 [GET_MODE (rld[i].reg_rtx)];
6974 if (regno1 < valueno + valuenregs
6975 && regno1 + nregs1 > valueno)
6976 return 0;
6980 if (goal_mem)
6981 /* We must treat frame pointer as varying here,
6982 since it can vary--in a nonlocal goto as generated by expand_goto. */
6983 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6985 /* Now verify that the values of GOAL and VALUE remain unaltered
6986 until INSN is reached. */
6988 p = insn;
6989 while (1)
6991 p = PREV_INSN (p);
6992 if (p == where)
6993 return value;
6995 /* Don't trust the conversion past a function call
6996 if either of the two is in a call-clobbered register, or memory. */
6997 if (CALL_P (p))
6999 int i;
7001 if (goal_mem || need_stable_sp)
7002 return 0;
7004 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
7005 for (i = 0; i < nregs; ++i)
7006 if (call_used_regs[regno + i]
7007 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
7008 return 0;
7010 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
7011 for (i = 0; i < valuenregs; ++i)
7012 if (call_used_regs[valueno + i]
7013 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
7014 return 0;
7017 if (INSN_P (p))
7019 pat = PATTERN (p);
7021 /* Watch out for unspec_volatile, and volatile asms. */
7022 if (volatile_insn_p (pat))
7023 return 0;
7025 /* If this insn P stores in either GOAL or VALUE, return 0.
7026 If GOAL is a memory ref and this insn writes memory, return 0.
7027 If GOAL is a memory ref and its address is not constant,
7028 and this insn P changes a register used in GOAL, return 0. */
7030 if (GET_CODE (pat) == COND_EXEC)
7031 pat = COND_EXEC_CODE (pat);
7032 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
7034 rtx dest = SET_DEST (pat);
7035 while (GET_CODE (dest) == SUBREG
7036 || GET_CODE (dest) == ZERO_EXTRACT
7037 || GET_CODE (dest) == STRICT_LOW_PART)
7038 dest = XEXP (dest, 0);
7039 if (REG_P (dest))
7041 int xregno = REGNO (dest);
7042 int xnregs;
7043 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7044 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7045 else
7046 xnregs = 1;
7047 if (xregno < regno + nregs && xregno + xnregs > regno)
7048 return 0;
7049 if (xregno < valueno + valuenregs
7050 && xregno + xnregs > valueno)
7051 return 0;
7052 if (goal_mem_addr_varies
7053 && reg_overlap_mentioned_for_reload_p (dest, goal))
7054 return 0;
7055 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7056 return 0;
7058 else if (goal_mem && MEM_P (dest)
7059 && ! push_operand (dest, GET_MODE (dest)))
7060 return 0;
7061 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7062 && reg_equiv_memory_loc (regno) != 0)
7063 return 0;
7064 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
7065 return 0;
7067 else if (GET_CODE (pat) == PARALLEL)
7069 int i;
7070 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7072 rtx v1 = XVECEXP (pat, 0, i);
7073 if (GET_CODE (v1) == COND_EXEC)
7074 v1 = COND_EXEC_CODE (v1);
7075 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7077 rtx dest = SET_DEST (v1);
7078 while (GET_CODE (dest) == SUBREG
7079 || GET_CODE (dest) == ZERO_EXTRACT
7080 || GET_CODE (dest) == STRICT_LOW_PART)
7081 dest = XEXP (dest, 0);
7082 if (REG_P (dest))
7084 int xregno = REGNO (dest);
7085 int xnregs;
7086 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7087 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7088 else
7089 xnregs = 1;
7090 if (xregno < regno + nregs
7091 && xregno + xnregs > regno)
7092 return 0;
7093 if (xregno < valueno + valuenregs
7094 && xregno + xnregs > valueno)
7095 return 0;
7096 if (goal_mem_addr_varies
7097 && reg_overlap_mentioned_for_reload_p (dest,
7098 goal))
7099 return 0;
7100 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7101 return 0;
7103 else if (goal_mem && MEM_P (dest)
7104 && ! push_operand (dest, GET_MODE (dest)))
7105 return 0;
7106 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7107 && reg_equiv_memory_loc (regno) != 0)
7108 return 0;
7109 else if (need_stable_sp
7110 && push_operand (dest, GET_MODE (dest)))
7111 return 0;
7116 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7118 rtx link;
7120 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7121 link = XEXP (link, 1))
7123 pat = XEXP (link, 0);
7124 if (GET_CODE (pat) == CLOBBER)
7126 rtx dest = SET_DEST (pat);
7128 if (REG_P (dest))
7130 int xregno = REGNO (dest);
7131 int xnregs
7132 = hard_regno_nregs[xregno][GET_MODE (dest)];
7134 if (xregno < regno + nregs
7135 && xregno + xnregs > regno)
7136 return 0;
7137 else if (xregno < valueno + valuenregs
7138 && xregno + xnregs > valueno)
7139 return 0;
7140 else if (goal_mem_addr_varies
7141 && reg_overlap_mentioned_for_reload_p (dest,
7142 goal))
7143 return 0;
7146 else if (goal_mem && MEM_P (dest)
7147 && ! push_operand (dest, GET_MODE (dest)))
7148 return 0;
7149 else if (need_stable_sp
7150 && push_operand (dest, GET_MODE (dest)))
7151 return 0;
7156 #ifdef AUTO_INC_DEC
7157 /* If this insn auto-increments or auto-decrements
7158 either regno or valueno, return 0 now.
7159 If GOAL is a memory ref and its address is not constant,
7160 and this insn P increments a register used in GOAL, return 0. */
7162 rtx link;
7164 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7165 if (REG_NOTE_KIND (link) == REG_INC
7166 && REG_P (XEXP (link, 0)))
7168 int incno = REGNO (XEXP (link, 0));
7169 if (incno < regno + nregs && incno >= regno)
7170 return 0;
7171 if (incno < valueno + valuenregs && incno >= valueno)
7172 return 0;
7173 if (goal_mem_addr_varies
7174 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7175 goal))
7176 return 0;
7179 #endif
7184 /* Find a place where INCED appears in an increment or decrement operator
7185 within X, and return the amount INCED is incremented or decremented by.
7186 The value is always positive. */
7188 static int
7189 find_inc_amount (rtx x, rtx inced)
7191 enum rtx_code code = GET_CODE (x);
7192 const char *fmt;
7193 int i;
7195 if (code == MEM)
7197 rtx addr = XEXP (x, 0);
7198 if ((GET_CODE (addr) == PRE_DEC
7199 || GET_CODE (addr) == POST_DEC
7200 || GET_CODE (addr) == PRE_INC
7201 || GET_CODE (addr) == POST_INC)
7202 && XEXP (addr, 0) == inced)
7203 return GET_MODE_SIZE (GET_MODE (x));
7204 else if ((GET_CODE (addr) == PRE_MODIFY
7205 || GET_CODE (addr) == POST_MODIFY)
7206 && GET_CODE (XEXP (addr, 1)) == PLUS
7207 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7208 && XEXP (addr, 0) == inced
7209 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7211 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7212 return i < 0 ? -i : i;
7216 fmt = GET_RTX_FORMAT (code);
7217 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7219 if (fmt[i] == 'e')
7221 int tem = find_inc_amount (XEXP (x, i), inced);
7222 if (tem != 0)
7223 return tem;
7225 if (fmt[i] == 'E')
7227 int j;
7228 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7230 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7231 if (tem != 0)
7232 return tem;
7237 return 0;
7240 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7241 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7243 #ifdef AUTO_INC_DEC
7244 static int
7245 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7246 rtx insn)
7248 rtx link;
7250 gcc_assert (insn);
7252 if (! INSN_P (insn))
7253 return 0;
7255 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7256 if (REG_NOTE_KIND (link) == REG_INC)
7258 unsigned int test = (int) REGNO (XEXP (link, 0));
7259 if (test >= regno && test < endregno)
7260 return 1;
7262 return 0;
7264 #else
7266 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7268 #endif
7270 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7271 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7272 REG_INC. REGNO must refer to a hard register. */
7275 regno_clobbered_p (unsigned int regno, rtx insn, enum machine_mode mode,
7276 int sets)
7278 unsigned int nregs, endregno;
7280 /* regno must be a hard register. */
7281 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7283 nregs = hard_regno_nregs[regno][mode];
7284 endregno = regno + nregs;
7286 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7287 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7288 && REG_P (XEXP (PATTERN (insn), 0)))
7290 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7292 return test >= regno && test < endregno;
7295 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7296 return 1;
7298 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7300 int i = XVECLEN (PATTERN (insn), 0) - 1;
7302 for (; i >= 0; i--)
7304 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7305 if ((GET_CODE (elt) == CLOBBER
7306 || (sets == 1 && GET_CODE (elt) == SET))
7307 && REG_P (XEXP (elt, 0)))
7309 unsigned int test = REGNO (XEXP (elt, 0));
7311 if (test >= regno && test < endregno)
7312 return 1;
7314 if (sets == 2
7315 && reg_inc_found_and_valid_p (regno, endregno, elt))
7316 return 1;
7320 return 0;
7323 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7325 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
7327 int regno;
7329 if (GET_MODE (reloadreg) == mode)
7330 return reloadreg;
7332 regno = REGNO (reloadreg);
7334 if (REG_WORDS_BIG_ENDIAN)
7335 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7336 - (int) hard_regno_nregs[regno][mode];
7338 return gen_rtx_REG (mode, regno);
7341 static const char *const reload_when_needed_name[] =
7343 "RELOAD_FOR_INPUT",
7344 "RELOAD_FOR_OUTPUT",
7345 "RELOAD_FOR_INSN",
7346 "RELOAD_FOR_INPUT_ADDRESS",
7347 "RELOAD_FOR_INPADDR_ADDRESS",
7348 "RELOAD_FOR_OUTPUT_ADDRESS",
7349 "RELOAD_FOR_OUTADDR_ADDRESS",
7350 "RELOAD_FOR_OPERAND_ADDRESS",
7351 "RELOAD_FOR_OPADDR_ADDR",
7352 "RELOAD_OTHER",
7353 "RELOAD_FOR_OTHER_ADDRESS"
7356 /* These functions are used to print the variables set by 'find_reloads' */
7358 DEBUG_FUNCTION void
7359 debug_reload_to_stream (FILE *f)
7361 int r;
7362 const char *prefix;
7364 if (! f)
7365 f = stderr;
7366 for (r = 0; r < n_reloads; r++)
7368 fprintf (f, "Reload %d: ", r);
7370 if (rld[r].in != 0)
7372 fprintf (f, "reload_in (%s) = ",
7373 GET_MODE_NAME (rld[r].inmode));
7374 print_inline_rtx (f, rld[r].in, 24);
7375 fprintf (f, "\n\t");
7378 if (rld[r].out != 0)
7380 fprintf (f, "reload_out (%s) = ",
7381 GET_MODE_NAME (rld[r].outmode));
7382 print_inline_rtx (f, rld[r].out, 24);
7383 fprintf (f, "\n\t");
7386 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7388 fprintf (f, "%s (opnum = %d)",
7389 reload_when_needed_name[(int) rld[r].when_needed],
7390 rld[r].opnum);
7392 if (rld[r].optional)
7393 fprintf (f, ", optional");
7395 if (rld[r].nongroup)
7396 fprintf (f, ", nongroup");
7398 if (rld[r].inc != 0)
7399 fprintf (f, ", inc by %d", rld[r].inc);
7401 if (rld[r].nocombine)
7402 fprintf (f, ", can't combine");
7404 if (rld[r].secondary_p)
7405 fprintf (f, ", secondary_reload_p");
7407 if (rld[r].in_reg != 0)
7409 fprintf (f, "\n\treload_in_reg: ");
7410 print_inline_rtx (f, rld[r].in_reg, 24);
7413 if (rld[r].out_reg != 0)
7415 fprintf (f, "\n\treload_out_reg: ");
7416 print_inline_rtx (f, rld[r].out_reg, 24);
7419 if (rld[r].reg_rtx != 0)
7421 fprintf (f, "\n\treload_reg_rtx: ");
7422 print_inline_rtx (f, rld[r].reg_rtx, 24);
7425 prefix = "\n\t";
7426 if (rld[r].secondary_in_reload != -1)
7428 fprintf (f, "%ssecondary_in_reload = %d",
7429 prefix, rld[r].secondary_in_reload);
7430 prefix = ", ";
7433 if (rld[r].secondary_out_reload != -1)
7434 fprintf (f, "%ssecondary_out_reload = %d\n",
7435 prefix, rld[r].secondary_out_reload);
7437 prefix = "\n\t";
7438 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7440 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7441 insn_data[rld[r].secondary_in_icode].name);
7442 prefix = ", ";
7445 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7446 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7447 insn_data[rld[r].secondary_out_icode].name);
7449 fprintf (f, "\n");
7453 DEBUG_FUNCTION void
7454 debug_reload (void)
7456 debug_reload_to_stream (stderr);