2012-05-01 François Dumont <fdumont@gcc.gnu.org>
[official-gcc.git] / gcc / reload.c
blob9eddc4d414af07c0609e6e64319c7ad25904cacb
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains subroutines used only from the file reload1.c.
23 It knows how to scan one insn for operands and values
24 that need to be copied into registers to make valid code.
25 It also finds other operands and values which are valid
26 but for which equivalent values in registers exist and
27 ought to be used instead.
29 Before processing the first insn of the function, call `init_reload'.
30 init_reload actually has to be called earlier anyway.
32 To scan an insn, call `find_reloads'. This does two things:
33 1. sets up tables describing which values must be reloaded
34 for this insn, and what kind of hard regs they must be reloaded into;
35 2. optionally record the locations where those values appear in
36 the data, so they can be replaced properly later.
37 This is done only if the second arg to `find_reloads' is nonzero.
39 The third arg to `find_reloads' specifies the number of levels
40 of indirect addressing supported by the machine. If it is zero,
41 indirect addressing is not valid. If it is one, (MEM (REG n))
42 is valid even if (REG n) did not get a hard register; if it is two,
43 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
44 hard register, and similarly for higher values.
46 Then you must choose the hard regs to reload those pseudo regs into,
47 and generate appropriate load insns before this insn and perhaps
48 also store insns after this insn. Set up the array `reload_reg_rtx'
49 to contain the REG rtx's for the registers you used. In some
50 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
51 for certain reloads. Then that tells you which register to use,
52 so you do not need to allocate one. But you still do need to add extra
53 instructions to copy the value into and out of that register.
55 Finally you must call `subst_reloads' to substitute the reload reg rtx's
56 into the locations already recorded.
58 NOTE SIDE EFFECTS:
60 find_reloads can alter the operands of the instruction it is called on.
62 1. Two operands of any sort may be interchanged, if they are in a
63 commutative instruction.
64 This happens only if find_reloads thinks the instruction will compile
65 better that way.
67 2. Pseudo-registers that are equivalent to constants are replaced
68 with those constants if they are not in hard registers.
70 1 happens every time find_reloads is called.
71 2 happens only when REPLACE is 1, which is only when
72 actually doing the reloads, not when just counting them.
74 Using a reload register for several reloads in one insn:
76 When an insn has reloads, it is considered as having three parts:
77 the input reloads, the insn itself after reloading, and the output reloads.
78 Reloads of values used in memory addresses are often needed for only one part.
80 When this is so, reload_when_needed records which part needs the reload.
81 Two reloads for different parts of the insn can share the same reload
82 register.
84 When a reload is used for addresses in multiple parts, or when it is
85 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
86 a register with any other reload. */
88 #define REG_OK_STRICT
90 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
91 #undef DEBUG_RELOAD
93 #include "config.h"
94 #include "system.h"
95 #include "coretypes.h"
96 #include "tm.h"
97 #include "rtl-error.h"
98 #include "tm_p.h"
99 #include "insn-config.h"
100 #include "expr.h"
101 #include "optabs.h"
102 #include "recog.h"
103 #include "df.h"
104 #include "reload.h"
105 #include "regs.h"
106 #include "addresses.h"
107 #include "hard-reg-set.h"
108 #include "flags.h"
109 #include "output.h"
110 #include "function.h"
111 #include "params.h"
112 #include "target.h"
113 #include "ira.h"
115 /* True if X is a constant that can be forced into the constant pool.
116 MODE is the mode of the operand, or VOIDmode if not known. */
117 #define CONST_POOL_OK_P(MODE, X) \
118 ((MODE) != VOIDmode \
119 && CONSTANT_P (X) \
120 && GET_CODE (X) != HIGH \
121 && !targetm.cannot_force_const_mem (MODE, X))
123 /* True if C is a non-empty register class that has too few registers
124 to be safely used as a reload target class. */
126 static inline bool
127 small_register_class_p (reg_class_t rclass)
129 return (reg_class_size [(int) rclass] == 1
130 || (reg_class_size [(int) rclass] >= 1
131 && targetm.class_likely_spilled_p (rclass)));
135 /* All reloads of the current insn are recorded here. See reload.h for
136 comments. */
137 int n_reloads;
138 struct reload rld[MAX_RELOADS];
140 /* All the "earlyclobber" operands of the current insn
141 are recorded here. */
142 int n_earlyclobbers;
143 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
145 int reload_n_operands;
147 /* Replacing reloads.
149 If `replace_reloads' is nonzero, then as each reload is recorded
150 an entry is made for it in the table `replacements'.
151 Then later `subst_reloads' can look through that table and
152 perform all the replacements needed. */
154 /* Nonzero means record the places to replace. */
155 static int replace_reloads;
157 /* Each replacement is recorded with a structure like this. */
158 struct replacement
160 rtx *where; /* Location to store in */
161 int what; /* which reload this is for */
162 enum machine_mode mode; /* mode it must have */
165 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
167 /* Number of replacements currently recorded. */
168 static int n_replacements;
170 /* Used to track what is modified by an operand. */
171 struct decomposition
173 int reg_flag; /* Nonzero if referencing a register. */
174 int safe; /* Nonzero if this can't conflict with anything. */
175 rtx base; /* Base address for MEM. */
176 HOST_WIDE_INT start; /* Starting offset or register number. */
177 HOST_WIDE_INT end; /* Ending offset or register number. */
180 #ifdef SECONDARY_MEMORY_NEEDED
182 /* Save MEMs needed to copy from one class of registers to another. One MEM
183 is used per mode, but normally only one or two modes are ever used.
185 We keep two versions, before and after register elimination. The one
186 after register elimination is record separately for each operand. This
187 is done in case the address is not valid to be sure that we separately
188 reload each. */
190 static rtx secondary_memlocs[NUM_MACHINE_MODES];
191 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
192 static int secondary_memlocs_elim_used = 0;
193 #endif
195 /* The instruction we are doing reloads for;
196 so we can test whether a register dies in it. */
197 static rtx this_insn;
199 /* Nonzero if this instruction is a user-specified asm with operands. */
200 static int this_insn_is_asm;
202 /* If hard_regs_live_known is nonzero,
203 we can tell which hard regs are currently live,
204 at least enough to succeed in choosing dummy reloads. */
205 static int hard_regs_live_known;
207 /* Indexed by hard reg number,
208 element is nonnegative if hard reg has been spilled.
209 This vector is passed to `find_reloads' as an argument
210 and is not changed here. */
211 static short *static_reload_reg_p;
213 /* Set to 1 in subst_reg_equivs if it changes anything. */
214 static int subst_reg_equivs_changed;
216 /* On return from push_reload, holds the reload-number for the OUT
217 operand, which can be different for that from the input operand. */
218 static int output_reloadnum;
220 /* Compare two RTX's. */
221 #define MATCHES(x, y) \
222 (x == y || (x != 0 && (REG_P (x) \
223 ? REG_P (y) && REGNO (x) == REGNO (y) \
224 : rtx_equal_p (x, y) && ! side_effects_p (x))))
226 /* Indicates if two reloads purposes are for similar enough things that we
227 can merge their reloads. */
228 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
229 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
230 || ((when1) == (when2) && (op1) == (op2)) \
231 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
232 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
233 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
234 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
235 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
237 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
238 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
239 ((when1) != (when2) \
240 || ! ((op1) == (op2) \
241 || (when1) == RELOAD_FOR_INPUT \
242 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
243 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
245 /* If we are going to reload an address, compute the reload type to
246 use. */
247 #define ADDR_TYPE(type) \
248 ((type) == RELOAD_FOR_INPUT_ADDRESS \
249 ? RELOAD_FOR_INPADDR_ADDRESS \
250 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
251 ? RELOAD_FOR_OUTADDR_ADDRESS \
252 : (type)))
254 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
255 enum machine_mode, enum reload_type,
256 enum insn_code *, secondary_reload_info *);
257 static enum reg_class find_valid_class (enum machine_mode, enum machine_mode,
258 int, unsigned int);
259 static void push_replacement (rtx *, int, enum machine_mode);
260 static void dup_replacements (rtx *, rtx *);
261 static void combine_reloads (void);
262 static int find_reusable_reload (rtx *, rtx, enum reg_class,
263 enum reload_type, int, int);
264 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
265 enum machine_mode, reg_class_t, int, int);
266 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
267 static struct decomposition decompose (rtx);
268 static int immune_p (rtx, rtx, struct decomposition);
269 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
270 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int, rtx,
271 int *);
272 static rtx make_memloc (rtx, int);
273 static int maybe_memory_address_addr_space_p (enum machine_mode, rtx,
274 addr_space_t, rtx *);
275 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
276 int, enum reload_type, int, rtx);
277 static rtx subst_reg_equivs (rtx, rtx);
278 static rtx subst_indexed_address (rtx);
279 static void update_auto_inc_notes (rtx, int, int);
280 static int find_reloads_address_1 (enum machine_mode, addr_space_t, rtx, int,
281 enum rtx_code, enum rtx_code, rtx *,
282 int, enum reload_type,int, rtx);
283 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
284 enum machine_mode, int,
285 enum reload_type, int);
286 static rtx find_reloads_subreg_address (rtx, int, int, enum reload_type,
287 int, rtx, int *);
288 static void copy_replacements_1 (rtx *, rtx *, int);
289 static int find_inc_amount (rtx, rtx);
290 static int refers_to_mem_for_reload_p (rtx);
291 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
292 rtx, rtx *);
294 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
295 list yet. */
297 static void
298 push_reg_equiv_alt_mem (int regno, rtx mem)
300 rtx it;
302 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
303 if (rtx_equal_p (XEXP (it, 0), mem))
304 return;
306 reg_equiv_alt_mem_list (regno)
307 = alloc_EXPR_LIST (REG_EQUIV, mem,
308 reg_equiv_alt_mem_list (regno));
311 /* Determine if any secondary reloads are needed for loading (if IN_P is
312 nonzero) or storing (if IN_P is zero) X to or from a reload register of
313 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
314 are needed, push them.
316 Return the reload number of the secondary reload we made, or -1 if
317 we didn't need one. *PICODE is set to the insn_code to use if we do
318 need a secondary reload. */
320 static int
321 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
322 enum reg_class reload_class,
323 enum machine_mode reload_mode, enum reload_type type,
324 enum insn_code *picode, secondary_reload_info *prev_sri)
326 enum reg_class rclass = NO_REGS;
327 enum reg_class scratch_class;
328 enum machine_mode mode = reload_mode;
329 enum insn_code icode = CODE_FOR_nothing;
330 enum insn_code t_icode = CODE_FOR_nothing;
331 enum reload_type secondary_type;
332 int s_reload, t_reload = -1;
333 const char *scratch_constraint;
334 char letter;
335 secondary_reload_info sri;
337 if (type == RELOAD_FOR_INPUT_ADDRESS
338 || type == RELOAD_FOR_OUTPUT_ADDRESS
339 || type == RELOAD_FOR_INPADDR_ADDRESS
340 || type == RELOAD_FOR_OUTADDR_ADDRESS)
341 secondary_type = type;
342 else
343 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
345 *picode = CODE_FOR_nothing;
347 /* If X is a paradoxical SUBREG, use the inner value to determine both the
348 mode and object being reloaded. */
349 if (paradoxical_subreg_p (x))
351 x = SUBREG_REG (x);
352 reload_mode = GET_MODE (x);
355 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
356 is still a pseudo-register by now, it *must* have an equivalent MEM
357 but we don't want to assume that), use that equivalent when seeing if
358 a secondary reload is needed since whether or not a reload is needed
359 might be sensitive to the form of the MEM. */
361 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
362 && reg_equiv_mem (REGNO (x)))
363 x = reg_equiv_mem (REGNO (x));
365 sri.icode = CODE_FOR_nothing;
366 sri.prev_sri = prev_sri;
367 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
368 reload_mode, &sri);
369 icode = (enum insn_code) sri.icode;
371 /* If we don't need any secondary registers, done. */
372 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
373 return -1;
375 if (rclass != NO_REGS)
376 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
377 reload_mode, type, &t_icode, &sri);
379 /* If we will be using an insn, the secondary reload is for a
380 scratch register. */
382 if (icode != CODE_FOR_nothing)
384 /* If IN_P is nonzero, the reload register will be the output in
385 operand 0. If IN_P is zero, the reload register will be the input
386 in operand 1. Outputs should have an initial "=", which we must
387 skip. */
389 /* ??? It would be useful to be able to handle only two, or more than
390 three, operands, but for now we can only handle the case of having
391 exactly three: output, input and one temp/scratch. */
392 gcc_assert (insn_data[(int) icode].n_operands == 3);
394 /* ??? We currently have no way to represent a reload that needs
395 an icode to reload from an intermediate tertiary reload register.
396 We should probably have a new field in struct reload to tag a
397 chain of scratch operand reloads onto. */
398 gcc_assert (rclass == NO_REGS);
400 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
401 gcc_assert (*scratch_constraint == '=');
402 scratch_constraint++;
403 if (*scratch_constraint == '&')
404 scratch_constraint++;
405 letter = *scratch_constraint;
406 scratch_class = (letter == 'r' ? GENERAL_REGS
407 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) letter,
408 scratch_constraint));
410 rclass = scratch_class;
411 mode = insn_data[(int) icode].operand[2].mode;
414 /* This case isn't valid, so fail. Reload is allowed to use the same
415 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
416 in the case of a secondary register, we actually need two different
417 registers for correct code. We fail here to prevent the possibility of
418 silently generating incorrect code later.
420 The convention is that secondary input reloads are valid only if the
421 secondary_class is different from class. If you have such a case, you
422 can not use secondary reloads, you must work around the problem some
423 other way.
425 Allow this when a reload_in/out pattern is being used. I.e. assume
426 that the generated code handles this case. */
428 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
429 || t_icode != CODE_FOR_nothing);
431 /* See if we can reuse an existing secondary reload. */
432 for (s_reload = 0; s_reload < n_reloads; s_reload++)
433 if (rld[s_reload].secondary_p
434 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
435 || reg_class_subset_p (rld[s_reload].rclass, rclass))
436 && ((in_p && rld[s_reload].inmode == mode)
437 || (! in_p && rld[s_reload].outmode == mode))
438 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
439 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
440 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
441 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
442 && (small_register_class_p (rclass)
443 || targetm.small_register_classes_for_mode_p (VOIDmode))
444 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
445 opnum, rld[s_reload].opnum))
447 if (in_p)
448 rld[s_reload].inmode = mode;
449 if (! in_p)
450 rld[s_reload].outmode = mode;
452 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
453 rld[s_reload].rclass = rclass;
455 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
456 rld[s_reload].optional &= optional;
457 rld[s_reload].secondary_p = 1;
458 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
459 opnum, rld[s_reload].opnum))
460 rld[s_reload].when_needed = RELOAD_OTHER;
462 break;
465 if (s_reload == n_reloads)
467 #ifdef SECONDARY_MEMORY_NEEDED
468 /* If we need a memory location to copy between the two reload regs,
469 set it up now. Note that we do the input case before making
470 the reload and the output case after. This is due to the
471 way reloads are output. */
473 if (in_p && icode == CODE_FOR_nothing
474 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
476 get_secondary_mem (x, reload_mode, opnum, type);
478 /* We may have just added new reloads. Make sure we add
479 the new reload at the end. */
480 s_reload = n_reloads;
482 #endif
484 /* We need to make a new secondary reload for this register class. */
485 rld[s_reload].in = rld[s_reload].out = 0;
486 rld[s_reload].rclass = rclass;
488 rld[s_reload].inmode = in_p ? mode : VOIDmode;
489 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
490 rld[s_reload].reg_rtx = 0;
491 rld[s_reload].optional = optional;
492 rld[s_reload].inc = 0;
493 /* Maybe we could combine these, but it seems too tricky. */
494 rld[s_reload].nocombine = 1;
495 rld[s_reload].in_reg = 0;
496 rld[s_reload].out_reg = 0;
497 rld[s_reload].opnum = opnum;
498 rld[s_reload].when_needed = secondary_type;
499 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
500 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
501 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
502 rld[s_reload].secondary_out_icode
503 = ! in_p ? t_icode : CODE_FOR_nothing;
504 rld[s_reload].secondary_p = 1;
506 n_reloads++;
508 #ifdef SECONDARY_MEMORY_NEEDED
509 if (! in_p && icode == CODE_FOR_nothing
510 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
511 get_secondary_mem (x, mode, opnum, type);
512 #endif
515 *picode = icode;
516 return s_reload;
519 /* If a secondary reload is needed, return its class. If both an intermediate
520 register and a scratch register is needed, we return the class of the
521 intermediate register. */
522 reg_class_t
523 secondary_reload_class (bool in_p, reg_class_t rclass, enum machine_mode mode,
524 rtx x)
526 enum insn_code icode;
527 secondary_reload_info sri;
529 sri.icode = CODE_FOR_nothing;
530 sri.prev_sri = NULL;
531 rclass
532 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
533 icode = (enum insn_code) sri.icode;
535 /* If there are no secondary reloads at all, we return NO_REGS.
536 If an intermediate register is needed, we return its class. */
537 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
538 return rclass;
540 /* No intermediate register is needed, but we have a special reload
541 pattern, which we assume for now needs a scratch register. */
542 return scratch_reload_class (icode);
545 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
546 three operands, verify that operand 2 is an output operand, and return
547 its register class.
548 ??? We'd like to be able to handle any pattern with at least 2 operands,
549 for zero or more scratch registers, but that needs more infrastructure. */
550 enum reg_class
551 scratch_reload_class (enum insn_code icode)
553 const char *scratch_constraint;
554 char scratch_letter;
555 enum reg_class rclass;
557 gcc_assert (insn_data[(int) icode].n_operands == 3);
558 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
559 gcc_assert (*scratch_constraint == '=');
560 scratch_constraint++;
561 if (*scratch_constraint == '&')
562 scratch_constraint++;
563 scratch_letter = *scratch_constraint;
564 if (scratch_letter == 'r')
565 return GENERAL_REGS;
566 rclass = REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
567 scratch_constraint);
568 gcc_assert (rclass != NO_REGS);
569 return rclass;
572 #ifdef SECONDARY_MEMORY_NEEDED
574 /* Return a memory location that will be used to copy X in mode MODE.
575 If we haven't already made a location for this mode in this insn,
576 call find_reloads_address on the location being returned. */
579 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
580 int opnum, enum reload_type type)
582 rtx loc;
583 int mem_valid;
585 /* By default, if MODE is narrower than a word, widen it to a word.
586 This is required because most machines that require these memory
587 locations do not support short load and stores from all registers
588 (e.g., FP registers). */
590 #ifdef SECONDARY_MEMORY_NEEDED_MODE
591 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
592 #else
593 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
594 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
595 #endif
597 /* If we already have made a MEM for this operand in MODE, return it. */
598 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
599 return secondary_memlocs_elim[(int) mode][opnum];
601 /* If this is the first time we've tried to get a MEM for this mode,
602 allocate a new one. `something_changed' in reload will get set
603 by noticing that the frame size has changed. */
605 if (secondary_memlocs[(int) mode] == 0)
607 #ifdef SECONDARY_MEMORY_NEEDED_RTX
608 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
609 #else
610 secondary_memlocs[(int) mode]
611 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
612 #endif
615 /* Get a version of the address doing any eliminations needed. If that
616 didn't give us a new MEM, make a new one if it isn't valid. */
618 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
619 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
620 MEM_ADDR_SPACE (loc));
622 if (! mem_valid && loc == secondary_memlocs[(int) mode])
623 loc = copy_rtx (loc);
625 /* The only time the call below will do anything is if the stack
626 offset is too large. In that case IND_LEVELS doesn't matter, so we
627 can just pass a zero. Adjust the type to be the address of the
628 corresponding object. If the address was valid, save the eliminated
629 address. If it wasn't valid, we need to make a reload each time, so
630 don't save it. */
632 if (! mem_valid)
634 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
635 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
636 : RELOAD_OTHER);
638 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
639 opnum, type, 0, 0);
642 secondary_memlocs_elim[(int) mode][opnum] = loc;
643 if (secondary_memlocs_elim_used <= (int)mode)
644 secondary_memlocs_elim_used = (int)mode + 1;
645 return loc;
648 /* Clear any secondary memory locations we've made. */
650 void
651 clear_secondary_mem (void)
653 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
655 #endif /* SECONDARY_MEMORY_NEEDED */
658 /* Find the largest class which has at least one register valid in
659 mode INNER, and which for every such register, that register number
660 plus N is also valid in OUTER (if in range) and is cheap to move
661 into REGNO. Such a class must exist. */
663 static enum reg_class
664 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
665 enum machine_mode inner ATTRIBUTE_UNUSED, int n,
666 unsigned int dest_regno ATTRIBUTE_UNUSED)
668 int best_cost = -1;
669 int rclass;
670 int regno;
671 enum reg_class best_class = NO_REGS;
672 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
673 unsigned int best_size = 0;
674 int cost;
676 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
678 int bad = 0;
679 int good = 0;
680 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
681 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
683 if (HARD_REGNO_MODE_OK (regno, inner))
685 good = 1;
686 if (! TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
687 || ! HARD_REGNO_MODE_OK (regno + n, outer))
688 bad = 1;
692 if (bad || !good)
693 continue;
694 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
696 if ((reg_class_size[rclass] > best_size
697 && (best_cost < 0 || best_cost >= cost))
698 || best_cost > cost)
700 best_class = (enum reg_class) rclass;
701 best_size = reg_class_size[rclass];
702 best_cost = register_move_cost (outer, (enum reg_class) rclass,
703 dest_class);
707 gcc_assert (best_size != 0);
709 return best_class;
712 /* Return the number of a previously made reload that can be combined with
713 a new one, or n_reloads if none of the existing reloads can be used.
714 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
715 push_reload, they determine the kind of the new reload that we try to
716 combine. P_IN points to the corresponding value of IN, which can be
717 modified by this function.
718 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
720 static int
721 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
722 enum reload_type type, int opnum, int dont_share)
724 rtx in = *p_in;
725 int i;
726 /* We can't merge two reloads if the output of either one is
727 earlyclobbered. */
729 if (earlyclobber_operand_p (out))
730 return n_reloads;
732 /* We can use an existing reload if the class is right
733 and at least one of IN and OUT is a match
734 and the other is at worst neutral.
735 (A zero compared against anything is neutral.)
737 For targets with small register classes, don't use existing reloads
738 unless they are for the same thing since that can cause us to need
739 more reload registers than we otherwise would. */
741 for (i = 0; i < n_reloads; i++)
742 if ((reg_class_subset_p (rclass, rld[i].rclass)
743 || reg_class_subset_p (rld[i].rclass, rclass))
744 /* If the existing reload has a register, it must fit our class. */
745 && (rld[i].reg_rtx == 0
746 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
747 true_regnum (rld[i].reg_rtx)))
748 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
749 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
750 || (out != 0 && MATCHES (rld[i].out, out)
751 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
752 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
753 && (small_register_class_p (rclass)
754 || targetm.small_register_classes_for_mode_p (VOIDmode))
755 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
756 return i;
758 /* Reloading a plain reg for input can match a reload to postincrement
759 that reg, since the postincrement's value is the right value.
760 Likewise, it can match a preincrement reload, since we regard
761 the preincrementation as happening before any ref in this insn
762 to that register. */
763 for (i = 0; i < n_reloads; i++)
764 if ((reg_class_subset_p (rclass, rld[i].rclass)
765 || reg_class_subset_p (rld[i].rclass, rclass))
766 /* If the existing reload has a register, it must fit our
767 class. */
768 && (rld[i].reg_rtx == 0
769 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
770 true_regnum (rld[i].reg_rtx)))
771 && out == 0 && rld[i].out == 0 && rld[i].in != 0
772 && ((REG_P (in)
773 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
774 && MATCHES (XEXP (rld[i].in, 0), in))
775 || (REG_P (rld[i].in)
776 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
777 && MATCHES (XEXP (in, 0), rld[i].in)))
778 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
779 && (small_register_class_p (rclass)
780 || targetm.small_register_classes_for_mode_p (VOIDmode))
781 && MERGABLE_RELOADS (type, rld[i].when_needed,
782 opnum, rld[i].opnum))
784 /* Make sure reload_in ultimately has the increment,
785 not the plain register. */
786 if (REG_P (in))
787 *p_in = rld[i].in;
788 return i;
790 return n_reloads;
793 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
794 expression. MODE is the mode that X will be used in. OUTPUT is true if
795 the function is invoked for the output part of an enclosing reload. */
797 static bool
798 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, bool output)
800 rtx inner;
802 /* Only SUBREGs are problematical. */
803 if (GET_CODE (x) != SUBREG)
804 return false;
806 inner = SUBREG_REG (x);
808 /* If INNER is a constant or PLUS, then INNER will need reloading. */
809 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
810 return true;
812 /* If INNER is not a hard register, then INNER will not need reloading. */
813 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
814 return false;
816 /* If INNER is not ok for MODE, then INNER will need reloading. */
817 if (!HARD_REGNO_MODE_OK (subreg_regno (x), mode))
818 return true;
820 /* If this is for an output, and the outer part is a word or smaller,
821 INNER is larger than a word and the number of registers in INNER is
822 not the same as the number of words in INNER, then INNER will need
823 reloading (with an in-out reload). */
824 return (output
825 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
826 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
827 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
828 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
831 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
832 requiring an extra reload register. The caller has already found that
833 IN contains some reference to REGNO, so check that we can produce the
834 new value in a single step. E.g. if we have
835 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
836 instruction that adds one to a register, this should succeed.
837 However, if we have something like
838 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
839 needs to be loaded into a register first, we need a separate reload
840 register.
841 Such PLUS reloads are generated by find_reload_address_part.
842 The out-of-range PLUS expressions are usually introduced in the instruction
843 patterns by register elimination and substituting pseudos without a home
844 by their function-invariant equivalences. */
845 static int
846 can_reload_into (rtx in, int regno, enum machine_mode mode)
848 rtx dst, test_insn;
849 int r = 0;
850 struct recog_data save_recog_data;
852 /* For matching constraints, we often get notional input reloads where
853 we want to use the original register as the reload register. I.e.
854 technically this is a non-optional input-output reload, but IN is
855 already a valid register, and has been chosen as the reload register.
856 Speed this up, since it trivially works. */
857 if (REG_P (in))
858 return 1;
860 /* To test MEMs properly, we'd have to take into account all the reloads
861 that are already scheduled, which can become quite complicated.
862 And since we've already handled address reloads for this MEM, it
863 should always succeed anyway. */
864 if (MEM_P (in))
865 return 1;
867 /* If we can make a simple SET insn that does the job, everything should
868 be fine. */
869 dst = gen_rtx_REG (mode, regno);
870 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
871 save_recog_data = recog_data;
872 if (recog_memoized (test_insn) >= 0)
874 extract_insn (test_insn);
875 r = constrain_operands (1);
877 recog_data = save_recog_data;
878 return r;
881 /* Record one reload that needs to be performed.
882 IN is an rtx saying where the data are to be found before this instruction.
883 OUT says where they must be stored after the instruction.
884 (IN is zero for data not read, and OUT is zero for data not written.)
885 INLOC and OUTLOC point to the places in the instructions where
886 IN and OUT were found.
887 If IN and OUT are both nonzero, it means the same register must be used
888 to reload both IN and OUT.
890 RCLASS is a register class required for the reloaded data.
891 INMODE is the machine mode that the instruction requires
892 for the reg that replaces IN and OUTMODE is likewise for OUT.
894 If IN is zero, then OUT's location and mode should be passed as
895 INLOC and INMODE.
897 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
899 OPTIONAL nonzero means this reload does not need to be performed:
900 it can be discarded if that is more convenient.
902 OPNUM and TYPE say what the purpose of this reload is.
904 The return value is the reload-number for this reload.
906 If both IN and OUT are nonzero, in some rare cases we might
907 want to make two separate reloads. (Actually we never do this now.)
908 Therefore, the reload-number for OUT is stored in
909 output_reloadnum when we return; the return value applies to IN.
910 Usually (presently always), when IN and OUT are nonzero,
911 the two reload-numbers are equal, but the caller should be careful to
912 distinguish them. */
915 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
916 enum reg_class rclass, enum machine_mode inmode,
917 enum machine_mode outmode, int strict_low, int optional,
918 int opnum, enum reload_type type)
920 int i;
921 int dont_share = 0;
922 int dont_remove_subreg = 0;
923 #ifdef LIMIT_RELOAD_CLASS
924 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
925 #endif
926 int secondary_in_reload = -1, secondary_out_reload = -1;
927 enum insn_code secondary_in_icode = CODE_FOR_nothing;
928 enum insn_code secondary_out_icode = CODE_FOR_nothing;
930 /* INMODE and/or OUTMODE could be VOIDmode if no mode
931 has been specified for the operand. In that case,
932 use the operand's mode as the mode to reload. */
933 if (inmode == VOIDmode && in != 0)
934 inmode = GET_MODE (in);
935 if (outmode == VOIDmode && out != 0)
936 outmode = GET_MODE (out);
938 /* If find_reloads and friends until now missed to replace a pseudo
939 with a constant of reg_equiv_constant something went wrong
940 beforehand.
941 Note that it can't simply be done here if we missed it earlier
942 since the constant might need to be pushed into the literal pool
943 and the resulting memref would probably need further
944 reloading. */
945 if (in != 0 && REG_P (in))
947 int regno = REGNO (in);
949 gcc_assert (regno < FIRST_PSEUDO_REGISTER
950 || reg_renumber[regno] >= 0
951 || reg_equiv_constant (regno) == NULL_RTX);
954 /* reg_equiv_constant only contains constants which are obviously
955 not appropriate as destination. So if we would need to replace
956 the destination pseudo with a constant we are in real
957 trouble. */
958 if (out != 0 && REG_P (out))
960 int regno = REGNO (out);
962 gcc_assert (regno < FIRST_PSEUDO_REGISTER
963 || reg_renumber[regno] >= 0
964 || reg_equiv_constant (regno) == NULL_RTX);
967 /* If we have a read-write operand with an address side-effect,
968 change either IN or OUT so the side-effect happens only once. */
969 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
970 switch (GET_CODE (XEXP (in, 0)))
972 case POST_INC: case POST_DEC: case POST_MODIFY:
973 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
974 break;
976 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
977 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
978 break;
980 default:
981 break;
984 /* If we are reloading a (SUBREG constant ...), really reload just the
985 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
986 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
987 a pseudo and hence will become a MEM) with M1 wider than M2 and the
988 register is a pseudo, also reload the inside expression.
989 For machines that extend byte loads, do this for any SUBREG of a pseudo
990 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
991 M2 is an integral mode that gets extended when loaded.
992 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
993 where either M1 is not valid for R or M2 is wider than a word but we
994 only need one register to store an M2-sized quantity in R.
995 (However, if OUT is nonzero, we need to reload the reg *and*
996 the subreg, so do nothing here, and let following statement handle it.)
998 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
999 we can't handle it here because CONST_INT does not indicate a mode.
1001 Similarly, we must reload the inside expression if we have a
1002 STRICT_LOW_PART (presumably, in == out in this case).
1004 Also reload the inner expression if it does not require a secondary
1005 reload but the SUBREG does.
1007 Finally, reload the inner expression if it is a register that is in
1008 the class whose registers cannot be referenced in a different size
1009 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1010 cannot reload just the inside since we might end up with the wrong
1011 register class. But if it is inside a STRICT_LOW_PART, we have
1012 no choice, so we hope we do get the right register class there. */
1014 if (in != 0 && GET_CODE (in) == SUBREG
1015 && (subreg_lowpart_p (in) || strict_low)
1016 #ifdef CANNOT_CHANGE_MODE_CLASS
1017 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1018 #endif
1019 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (in))]
1020 && (CONSTANT_P (SUBREG_REG (in))
1021 || GET_CODE (SUBREG_REG (in)) == PLUS
1022 || strict_low
1023 || (((REG_P (SUBREG_REG (in))
1024 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1025 || MEM_P (SUBREG_REG (in)))
1026 && ((GET_MODE_PRECISION (inmode)
1027 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1028 #ifdef LOAD_EXTEND_OP
1029 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1030 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1031 <= UNITS_PER_WORD)
1032 && (GET_MODE_PRECISION (inmode)
1033 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1034 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1035 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1036 #endif
1037 #ifdef WORD_REGISTER_OPERATIONS
1038 || ((GET_MODE_PRECISION (inmode)
1039 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1040 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1041 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1042 / UNITS_PER_WORD)))
1043 #endif
1045 || (REG_P (SUBREG_REG (in))
1046 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1047 /* The case where out is nonzero
1048 is handled differently in the following statement. */
1049 && (out == 0 || subreg_lowpart_p (in))
1050 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1051 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1052 > UNITS_PER_WORD)
1053 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1054 / UNITS_PER_WORD)
1055 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1056 [GET_MODE (SUBREG_REG (in))]))
1057 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1058 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1059 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1060 SUBREG_REG (in))
1061 == NO_REGS))
1062 #ifdef CANNOT_CHANGE_MODE_CLASS
1063 || (REG_P (SUBREG_REG (in))
1064 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1065 && REG_CANNOT_CHANGE_MODE_P
1066 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1067 #endif
1070 #ifdef LIMIT_RELOAD_CLASS
1071 in_subreg_loc = inloc;
1072 #endif
1073 inloc = &SUBREG_REG (in);
1074 in = *inloc;
1075 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1076 if (MEM_P (in))
1077 /* This is supposed to happen only for paradoxical subregs made by
1078 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1079 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1080 #endif
1081 inmode = GET_MODE (in);
1084 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1085 where M1 is not valid for R if it was not handled by the code above.
1087 Similar issue for (SUBREG constant ...) if it was not handled by the
1088 code above. This can happen if SUBREG_BYTE != 0.
1090 However, we must reload the inner reg *as well as* the subreg in
1091 that case. */
1093 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1095 enum reg_class in_class = rclass;
1097 if (REG_P (SUBREG_REG (in)))
1098 in_class
1099 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1100 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1101 GET_MODE (SUBREG_REG (in)),
1102 SUBREG_BYTE (in),
1103 GET_MODE (in)),
1104 REGNO (SUBREG_REG (in)));
1106 /* This relies on the fact that emit_reload_insns outputs the
1107 instructions for input reloads of type RELOAD_OTHER in the same
1108 order as the reloads. Thus if the outer reload is also of type
1109 RELOAD_OTHER, we are guaranteed that this inner reload will be
1110 output before the outer reload. */
1111 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1112 in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1113 dont_remove_subreg = 1;
1116 /* Similarly for paradoxical and problematical SUBREGs on the output.
1117 Note that there is no reason we need worry about the previous value
1118 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1119 entitled to clobber it all (except in the case of a word mode subreg
1120 or of a STRICT_LOW_PART, in that latter case the constraint should
1121 label it input-output.) */
1122 if (out != 0 && GET_CODE (out) == SUBREG
1123 && (subreg_lowpart_p (out) || strict_low)
1124 #ifdef CANNOT_CHANGE_MODE_CLASS
1125 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1126 #endif
1127 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (out))]
1128 && (CONSTANT_P (SUBREG_REG (out))
1129 || strict_low
1130 || (((REG_P (SUBREG_REG (out))
1131 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1132 || MEM_P (SUBREG_REG (out)))
1133 && ((GET_MODE_PRECISION (outmode)
1134 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1135 #ifdef WORD_REGISTER_OPERATIONS
1136 || ((GET_MODE_PRECISION (outmode)
1137 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1138 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1139 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1140 / UNITS_PER_WORD)))
1141 #endif
1143 || (REG_P (SUBREG_REG (out))
1144 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1145 /* The case of a word mode subreg
1146 is handled differently in the following statement. */
1147 && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1148 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1149 > UNITS_PER_WORD))
1150 && ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode))
1151 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1152 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1153 SUBREG_REG (out))
1154 == NO_REGS))
1155 #ifdef CANNOT_CHANGE_MODE_CLASS
1156 || (REG_P (SUBREG_REG (out))
1157 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1158 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1159 GET_MODE (SUBREG_REG (out)),
1160 outmode))
1161 #endif
1164 #ifdef LIMIT_RELOAD_CLASS
1165 out_subreg_loc = outloc;
1166 #endif
1167 outloc = &SUBREG_REG (out);
1168 out = *outloc;
1169 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1170 gcc_assert (!MEM_P (out)
1171 || GET_MODE_SIZE (GET_MODE (out))
1172 <= GET_MODE_SIZE (outmode));
1173 #endif
1174 outmode = GET_MODE (out);
1177 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1178 where either M1 is not valid for R or M2 is wider than a word but we
1179 only need one register to store an M2-sized quantity in R.
1181 However, we must reload the inner reg *as well as* the subreg in
1182 that case and the inner reg is an in-out reload. */
1184 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1186 enum reg_class in_out_class
1187 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1188 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1189 GET_MODE (SUBREG_REG (out)),
1190 SUBREG_BYTE (out),
1191 GET_MODE (out)),
1192 REGNO (SUBREG_REG (out)));
1194 /* This relies on the fact that emit_reload_insns outputs the
1195 instructions for output reloads of type RELOAD_OTHER in reverse
1196 order of the reloads. Thus if the outer reload is also of type
1197 RELOAD_OTHER, we are guaranteed that this inner reload will be
1198 output after the outer reload. */
1199 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1200 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1201 0, 0, opnum, RELOAD_OTHER);
1202 dont_remove_subreg = 1;
1205 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1206 if (in != 0 && out != 0 && MEM_P (out)
1207 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1208 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1209 dont_share = 1;
1211 /* If IN is a SUBREG of a hard register, make a new REG. This
1212 simplifies some of the cases below. */
1214 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1215 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1216 && ! dont_remove_subreg)
1217 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1219 /* Similarly for OUT. */
1220 if (out != 0 && GET_CODE (out) == SUBREG
1221 && REG_P (SUBREG_REG (out))
1222 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1223 && ! dont_remove_subreg)
1224 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1226 /* Narrow down the class of register wanted if that is
1227 desirable on this machine for efficiency. */
1229 reg_class_t preferred_class = rclass;
1231 if (in != 0)
1232 preferred_class = targetm.preferred_reload_class (in, rclass);
1234 /* Output reloads may need analogous treatment, different in detail. */
1235 if (out != 0)
1236 preferred_class
1237 = targetm.preferred_output_reload_class (out, preferred_class);
1239 /* Discard what the target said if we cannot do it. */
1240 if (preferred_class != NO_REGS
1241 || (optional && type == RELOAD_FOR_OUTPUT))
1242 rclass = (enum reg_class) preferred_class;
1245 /* Make sure we use a class that can handle the actual pseudo
1246 inside any subreg. For example, on the 386, QImode regs
1247 can appear within SImode subregs. Although GENERAL_REGS
1248 can handle SImode, QImode needs a smaller class. */
1249 #ifdef LIMIT_RELOAD_CLASS
1250 if (in_subreg_loc)
1251 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1252 else if (in != 0 && GET_CODE (in) == SUBREG)
1253 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1255 if (out_subreg_loc)
1256 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1257 if (out != 0 && GET_CODE (out) == SUBREG)
1258 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1259 #endif
1261 /* Verify that this class is at least possible for the mode that
1262 is specified. */
1263 if (this_insn_is_asm)
1265 enum machine_mode mode;
1266 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1267 mode = inmode;
1268 else
1269 mode = outmode;
1270 if (mode == VOIDmode)
1272 error_for_asm (this_insn, "cannot reload integer constant "
1273 "operand in %<asm%>");
1274 mode = word_mode;
1275 if (in != 0)
1276 inmode = word_mode;
1277 if (out != 0)
1278 outmode = word_mode;
1280 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1281 if (HARD_REGNO_MODE_OK (i, mode)
1282 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1283 break;
1284 if (i == FIRST_PSEUDO_REGISTER)
1286 error_for_asm (this_insn, "impossible register constraint "
1287 "in %<asm%>");
1288 /* Avoid further trouble with this insn. */
1289 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1290 /* We used to continue here setting class to ALL_REGS, but it triggers
1291 sanity check on i386 for:
1292 void foo(long double d)
1294 asm("" :: "a" (d));
1296 Returning zero here ought to be safe as we take care in
1297 find_reloads to not process the reloads when instruction was
1298 replaced by USE. */
1300 return 0;
1304 /* Optional output reloads are always OK even if we have no register class,
1305 since the function of these reloads is only to have spill_reg_store etc.
1306 set, so that the storing insn can be deleted later. */
1307 gcc_assert (rclass != NO_REGS
1308 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1310 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1312 if (i == n_reloads)
1314 /* See if we need a secondary reload register to move between CLASS
1315 and IN or CLASS and OUT. Get the icode and push any required reloads
1316 needed for each of them if so. */
1318 if (in != 0)
1319 secondary_in_reload
1320 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1321 &secondary_in_icode, NULL);
1322 if (out != 0 && GET_CODE (out) != SCRATCH)
1323 secondary_out_reload
1324 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1325 type, &secondary_out_icode, NULL);
1327 /* We found no existing reload suitable for re-use.
1328 So add an additional reload. */
1330 #ifdef SECONDARY_MEMORY_NEEDED
1331 /* If a memory location is needed for the copy, make one. */
1332 if (in != 0
1333 && (REG_P (in)
1334 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1335 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
1336 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
1337 rclass, inmode))
1338 get_secondary_mem (in, inmode, opnum, type);
1339 #endif
1341 i = n_reloads;
1342 rld[i].in = in;
1343 rld[i].out = out;
1344 rld[i].rclass = rclass;
1345 rld[i].inmode = inmode;
1346 rld[i].outmode = outmode;
1347 rld[i].reg_rtx = 0;
1348 rld[i].optional = optional;
1349 rld[i].inc = 0;
1350 rld[i].nocombine = 0;
1351 rld[i].in_reg = inloc ? *inloc : 0;
1352 rld[i].out_reg = outloc ? *outloc : 0;
1353 rld[i].opnum = opnum;
1354 rld[i].when_needed = type;
1355 rld[i].secondary_in_reload = secondary_in_reload;
1356 rld[i].secondary_out_reload = secondary_out_reload;
1357 rld[i].secondary_in_icode = secondary_in_icode;
1358 rld[i].secondary_out_icode = secondary_out_icode;
1359 rld[i].secondary_p = 0;
1361 n_reloads++;
1363 #ifdef SECONDARY_MEMORY_NEEDED
1364 if (out != 0
1365 && (REG_P (out)
1366 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1367 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1368 && SECONDARY_MEMORY_NEEDED (rclass,
1369 REGNO_REG_CLASS (reg_or_subregno (out)),
1370 outmode))
1371 get_secondary_mem (out, outmode, opnum, type);
1372 #endif
1374 else
1376 /* We are reusing an existing reload,
1377 but we may have additional information for it.
1378 For example, we may now have both IN and OUT
1379 while the old one may have just one of them. */
1381 /* The modes can be different. If they are, we want to reload in
1382 the larger mode, so that the value is valid for both modes. */
1383 if (inmode != VOIDmode
1384 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1385 rld[i].inmode = inmode;
1386 if (outmode != VOIDmode
1387 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1388 rld[i].outmode = outmode;
1389 if (in != 0)
1391 rtx in_reg = inloc ? *inloc : 0;
1392 /* If we merge reloads for two distinct rtl expressions that
1393 are identical in content, there might be duplicate address
1394 reloads. Remove the extra set now, so that if we later find
1395 that we can inherit this reload, we can get rid of the
1396 address reloads altogether.
1398 Do not do this if both reloads are optional since the result
1399 would be an optional reload which could potentially leave
1400 unresolved address replacements.
1402 It is not sufficient to call transfer_replacements since
1403 choose_reload_regs will remove the replacements for address
1404 reloads of inherited reloads which results in the same
1405 problem. */
1406 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1407 && ! (rld[i].optional && optional))
1409 /* We must keep the address reload with the lower operand
1410 number alive. */
1411 if (opnum > rld[i].opnum)
1413 remove_address_replacements (in);
1414 in = rld[i].in;
1415 in_reg = rld[i].in_reg;
1417 else
1418 remove_address_replacements (rld[i].in);
1420 /* When emitting reloads we don't necessarily look at the in-
1421 and outmode, but also directly at the operands (in and out).
1422 So we can't simply overwrite them with whatever we have found
1423 for this (to-be-merged) reload, we have to "merge" that too.
1424 Reusing another reload already verified that we deal with the
1425 same operands, just possibly in different modes. So we
1426 overwrite the operands only when the new mode is larger.
1427 See also PR33613. */
1428 if (!rld[i].in
1429 || GET_MODE_SIZE (GET_MODE (in))
1430 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1431 rld[i].in = in;
1432 if (!rld[i].in_reg
1433 || (in_reg
1434 && GET_MODE_SIZE (GET_MODE (in_reg))
1435 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1436 rld[i].in_reg = in_reg;
1438 if (out != 0)
1440 if (!rld[i].out
1441 || (out
1442 && GET_MODE_SIZE (GET_MODE (out))
1443 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1444 rld[i].out = out;
1445 if (outloc
1446 && (!rld[i].out_reg
1447 || GET_MODE_SIZE (GET_MODE (*outloc))
1448 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1449 rld[i].out_reg = *outloc;
1451 if (reg_class_subset_p (rclass, rld[i].rclass))
1452 rld[i].rclass = rclass;
1453 rld[i].optional &= optional;
1454 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1455 opnum, rld[i].opnum))
1456 rld[i].when_needed = RELOAD_OTHER;
1457 rld[i].opnum = MIN (rld[i].opnum, opnum);
1460 /* If the ostensible rtx being reloaded differs from the rtx found
1461 in the location to substitute, this reload is not safe to combine
1462 because we cannot reliably tell whether it appears in the insn. */
1464 if (in != 0 && in != *inloc)
1465 rld[i].nocombine = 1;
1467 #if 0
1468 /* This was replaced by changes in find_reloads_address_1 and the new
1469 function inc_for_reload, which go with a new meaning of reload_inc. */
1471 /* If this is an IN/OUT reload in an insn that sets the CC,
1472 it must be for an autoincrement. It doesn't work to store
1473 the incremented value after the insn because that would clobber the CC.
1474 So we must do the increment of the value reloaded from,
1475 increment it, store it back, then decrement again. */
1476 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1478 out = 0;
1479 rld[i].out = 0;
1480 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1481 /* If we did not find a nonzero amount-to-increment-by,
1482 that contradicts the belief that IN is being incremented
1483 in an address in this insn. */
1484 gcc_assert (rld[i].inc != 0);
1486 #endif
1488 /* If we will replace IN and OUT with the reload-reg,
1489 record where they are located so that substitution need
1490 not do a tree walk. */
1492 if (replace_reloads)
1494 if (inloc != 0)
1496 struct replacement *r = &replacements[n_replacements++];
1497 r->what = i;
1498 r->where = inloc;
1499 r->mode = inmode;
1501 if (outloc != 0 && outloc != inloc)
1503 struct replacement *r = &replacements[n_replacements++];
1504 r->what = i;
1505 r->where = outloc;
1506 r->mode = outmode;
1510 /* If this reload is just being introduced and it has both
1511 an incoming quantity and an outgoing quantity that are
1512 supposed to be made to match, see if either one of the two
1513 can serve as the place to reload into.
1515 If one of them is acceptable, set rld[i].reg_rtx
1516 to that one. */
1518 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1520 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1521 inmode, outmode,
1522 rld[i].rclass, i,
1523 earlyclobber_operand_p (out));
1525 /* If the outgoing register already contains the same value
1526 as the incoming one, we can dispense with loading it.
1527 The easiest way to tell the caller that is to give a phony
1528 value for the incoming operand (same as outgoing one). */
1529 if (rld[i].reg_rtx == out
1530 && (REG_P (in) || CONSTANT_P (in))
1531 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1532 static_reload_reg_p, i, inmode))
1533 rld[i].in = out;
1536 /* If this is an input reload and the operand contains a register that
1537 dies in this insn and is used nowhere else, see if it is the right class
1538 to be used for this reload. Use it if so. (This occurs most commonly
1539 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1540 this if it is also an output reload that mentions the register unless
1541 the output is a SUBREG that clobbers an entire register.
1543 Note that the operand might be one of the spill regs, if it is a
1544 pseudo reg and we are in a block where spilling has not taken place.
1545 But if there is no spilling in this block, that is OK.
1546 An explicitly used hard reg cannot be a spill reg. */
1548 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1550 rtx note;
1551 int regno;
1552 enum machine_mode rel_mode = inmode;
1554 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1555 rel_mode = outmode;
1557 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1558 if (REG_NOTE_KIND (note) == REG_DEAD
1559 && REG_P (XEXP (note, 0))
1560 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1561 && reg_mentioned_p (XEXP (note, 0), in)
1562 /* Check that a former pseudo is valid; see find_dummy_reload. */
1563 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1564 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1565 ORIGINAL_REGNO (XEXP (note, 0)))
1566 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1567 && ! refers_to_regno_for_reload_p (regno,
1568 end_hard_regno (rel_mode,
1569 regno),
1570 PATTERN (this_insn), inloc)
1571 /* If this is also an output reload, IN cannot be used as
1572 the reload register if it is set in this insn unless IN
1573 is also OUT. */
1574 && (out == 0 || in == out
1575 || ! hard_reg_set_here_p (regno,
1576 end_hard_regno (rel_mode, regno),
1577 PATTERN (this_insn)))
1578 /* ??? Why is this code so different from the previous?
1579 Is there any simple coherent way to describe the two together?
1580 What's going on here. */
1581 && (in != out
1582 || (GET_CODE (in) == SUBREG
1583 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1584 / UNITS_PER_WORD)
1585 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1586 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1587 /* Make sure the operand fits in the reg that dies. */
1588 && (GET_MODE_SIZE (rel_mode)
1589 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1590 && HARD_REGNO_MODE_OK (regno, inmode)
1591 && HARD_REGNO_MODE_OK (regno, outmode))
1593 unsigned int offs;
1594 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1595 hard_regno_nregs[regno][outmode]);
1597 for (offs = 0; offs < nregs; offs++)
1598 if (fixed_regs[regno + offs]
1599 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1600 regno + offs))
1601 break;
1603 if (offs == nregs
1604 && (! (refers_to_regno_for_reload_p
1605 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1606 || can_reload_into (in, regno, inmode)))
1608 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1609 break;
1614 if (out)
1615 output_reloadnum = i;
1617 return i;
1620 /* Record an additional place we must replace a value
1621 for which we have already recorded a reload.
1622 RELOADNUM is the value returned by push_reload
1623 when the reload was recorded.
1624 This is used in insn patterns that use match_dup. */
1626 static void
1627 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
1629 if (replace_reloads)
1631 struct replacement *r = &replacements[n_replacements++];
1632 r->what = reloadnum;
1633 r->where = loc;
1634 r->mode = mode;
1638 /* Duplicate any replacement we have recorded to apply at
1639 location ORIG_LOC to also be performed at DUP_LOC.
1640 This is used in insn patterns that use match_dup. */
1642 static void
1643 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1645 int i, n = n_replacements;
1647 for (i = 0; i < n; i++)
1649 struct replacement *r = &replacements[i];
1650 if (r->where == orig_loc)
1651 push_replacement (dup_loc, r->what, r->mode);
1655 /* Transfer all replacements that used to be in reload FROM to be in
1656 reload TO. */
1658 void
1659 transfer_replacements (int to, int from)
1661 int i;
1663 for (i = 0; i < n_replacements; i++)
1664 if (replacements[i].what == from)
1665 replacements[i].what = to;
1668 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1669 or a subpart of it. If we have any replacements registered for IN_RTX,
1670 cancel the reloads that were supposed to load them.
1671 Return nonzero if we canceled any reloads. */
1673 remove_address_replacements (rtx in_rtx)
1675 int i, j;
1676 char reload_flags[MAX_RELOADS];
1677 int something_changed = 0;
1679 memset (reload_flags, 0, sizeof reload_flags);
1680 for (i = 0, j = 0; i < n_replacements; i++)
1682 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1683 reload_flags[replacements[i].what] |= 1;
1684 else
1686 replacements[j++] = replacements[i];
1687 reload_flags[replacements[i].what] |= 2;
1690 /* Note that the following store must be done before the recursive calls. */
1691 n_replacements = j;
1693 for (i = n_reloads - 1; i >= 0; i--)
1695 if (reload_flags[i] == 1)
1697 deallocate_reload_reg (i);
1698 remove_address_replacements (rld[i].in);
1699 rld[i].in = 0;
1700 something_changed = 1;
1703 return something_changed;
1706 /* If there is only one output reload, and it is not for an earlyclobber
1707 operand, try to combine it with a (logically unrelated) input reload
1708 to reduce the number of reload registers needed.
1710 This is safe if the input reload does not appear in
1711 the value being output-reloaded, because this implies
1712 it is not needed any more once the original insn completes.
1714 If that doesn't work, see we can use any of the registers that
1715 die in this insn as a reload register. We can if it is of the right
1716 class and does not appear in the value being output-reloaded. */
1718 static void
1719 combine_reloads (void)
1721 int i, regno;
1722 int output_reload = -1;
1723 int secondary_out = -1;
1724 rtx note;
1726 /* Find the output reload; return unless there is exactly one
1727 and that one is mandatory. */
1729 for (i = 0; i < n_reloads; i++)
1730 if (rld[i].out != 0)
1732 if (output_reload >= 0)
1733 return;
1734 output_reload = i;
1737 if (output_reload < 0 || rld[output_reload].optional)
1738 return;
1740 /* An input-output reload isn't combinable. */
1742 if (rld[output_reload].in != 0)
1743 return;
1745 /* If this reload is for an earlyclobber operand, we can't do anything. */
1746 if (earlyclobber_operand_p (rld[output_reload].out))
1747 return;
1749 /* If there is a reload for part of the address of this operand, we would
1750 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1751 its life to the point where doing this combine would not lower the
1752 number of spill registers needed. */
1753 for (i = 0; i < n_reloads; i++)
1754 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1755 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1756 && rld[i].opnum == rld[output_reload].opnum)
1757 return;
1759 /* Check each input reload; can we combine it? */
1761 for (i = 0; i < n_reloads; i++)
1762 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1763 /* Life span of this reload must not extend past main insn. */
1764 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1765 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1766 && rld[i].when_needed != RELOAD_OTHER
1767 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1768 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1769 [(int) rld[output_reload].outmode])
1770 && rld[i].inc == 0
1771 && rld[i].reg_rtx == 0
1772 #ifdef SECONDARY_MEMORY_NEEDED
1773 /* Don't combine two reloads with different secondary
1774 memory locations. */
1775 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1776 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1777 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1778 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1779 #endif
1780 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1781 ? (rld[i].rclass == rld[output_reload].rclass)
1782 : (reg_class_subset_p (rld[i].rclass,
1783 rld[output_reload].rclass)
1784 || reg_class_subset_p (rld[output_reload].rclass,
1785 rld[i].rclass)))
1786 && (MATCHES (rld[i].in, rld[output_reload].out)
1787 /* Args reversed because the first arg seems to be
1788 the one that we imagine being modified
1789 while the second is the one that might be affected. */
1790 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1791 rld[i].in)
1792 /* However, if the input is a register that appears inside
1793 the output, then we also can't share.
1794 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1795 If the same reload reg is used for both reg 69 and the
1796 result to be stored in memory, then that result
1797 will clobber the address of the memory ref. */
1798 && ! (REG_P (rld[i].in)
1799 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1800 rld[output_reload].out))))
1801 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1802 rld[i].when_needed != RELOAD_FOR_INPUT)
1803 && (reg_class_size[(int) rld[i].rclass]
1804 || targetm.small_register_classes_for_mode_p (VOIDmode))
1805 /* We will allow making things slightly worse by combining an
1806 input and an output, but no worse than that. */
1807 && (rld[i].when_needed == RELOAD_FOR_INPUT
1808 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1810 int j;
1812 /* We have found a reload to combine with! */
1813 rld[i].out = rld[output_reload].out;
1814 rld[i].out_reg = rld[output_reload].out_reg;
1815 rld[i].outmode = rld[output_reload].outmode;
1816 /* Mark the old output reload as inoperative. */
1817 rld[output_reload].out = 0;
1818 /* The combined reload is needed for the entire insn. */
1819 rld[i].when_needed = RELOAD_OTHER;
1820 /* If the output reload had a secondary reload, copy it. */
1821 if (rld[output_reload].secondary_out_reload != -1)
1823 rld[i].secondary_out_reload
1824 = rld[output_reload].secondary_out_reload;
1825 rld[i].secondary_out_icode
1826 = rld[output_reload].secondary_out_icode;
1829 #ifdef SECONDARY_MEMORY_NEEDED
1830 /* Copy any secondary MEM. */
1831 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1832 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1833 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1834 #endif
1835 /* If required, minimize the register class. */
1836 if (reg_class_subset_p (rld[output_reload].rclass,
1837 rld[i].rclass))
1838 rld[i].rclass = rld[output_reload].rclass;
1840 /* Transfer all replacements from the old reload to the combined. */
1841 for (j = 0; j < n_replacements; j++)
1842 if (replacements[j].what == output_reload)
1843 replacements[j].what = i;
1845 return;
1848 /* If this insn has only one operand that is modified or written (assumed
1849 to be the first), it must be the one corresponding to this reload. It
1850 is safe to use anything that dies in this insn for that output provided
1851 that it does not occur in the output (we already know it isn't an
1852 earlyclobber. If this is an asm insn, give up. */
1854 if (INSN_CODE (this_insn) == -1)
1855 return;
1857 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1858 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1859 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1860 return;
1862 /* See if some hard register that dies in this insn and is not used in
1863 the output is the right class. Only works if the register we pick
1864 up can fully hold our output reload. */
1865 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1866 if (REG_NOTE_KIND (note) == REG_DEAD
1867 && REG_P (XEXP (note, 0))
1868 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1869 rld[output_reload].out)
1870 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1871 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1872 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1873 regno)
1874 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1875 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1876 /* Ensure that a secondary or tertiary reload for this output
1877 won't want this register. */
1878 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1879 || (!(TEST_HARD_REG_BIT
1880 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1881 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1882 || !(TEST_HARD_REG_BIT
1883 (reg_class_contents[(int) rld[secondary_out].rclass],
1884 regno)))))
1885 && !fixed_regs[regno]
1886 /* Check that a former pseudo is valid; see find_dummy_reload. */
1887 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1888 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1889 ORIGINAL_REGNO (XEXP (note, 0)))
1890 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1892 rld[output_reload].reg_rtx
1893 = gen_rtx_REG (rld[output_reload].outmode, regno);
1894 return;
1898 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1899 See if one of IN and OUT is a register that may be used;
1900 this is desirable since a spill-register won't be needed.
1901 If so, return the register rtx that proves acceptable.
1903 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1904 RCLASS is the register class required for the reload.
1906 If FOR_REAL is >= 0, it is the number of the reload,
1907 and in some cases when it can be discovered that OUT doesn't need
1908 to be computed, clear out rld[FOR_REAL].out.
1910 If FOR_REAL is -1, this should not be done, because this call
1911 is just to see if a register can be found, not to find and install it.
1913 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1914 puts an additional constraint on being able to use IN for OUT since
1915 IN must not appear elsewhere in the insn (it is assumed that IN itself
1916 is safe from the earlyclobber). */
1918 static rtx
1919 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1920 enum machine_mode inmode, enum machine_mode outmode,
1921 reg_class_t rclass, int for_real, int earlyclobber)
1923 rtx in = real_in;
1924 rtx out = real_out;
1925 int in_offset = 0;
1926 int out_offset = 0;
1927 rtx value = 0;
1929 /* If operands exceed a word, we can't use either of them
1930 unless they have the same size. */
1931 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1932 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1933 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1934 return 0;
1936 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1937 respectively refers to a hard register. */
1939 /* Find the inside of any subregs. */
1940 while (GET_CODE (out) == SUBREG)
1942 if (REG_P (SUBREG_REG (out))
1943 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1944 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1945 GET_MODE (SUBREG_REG (out)),
1946 SUBREG_BYTE (out),
1947 GET_MODE (out));
1948 out = SUBREG_REG (out);
1950 while (GET_CODE (in) == SUBREG)
1952 if (REG_P (SUBREG_REG (in))
1953 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1954 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1955 GET_MODE (SUBREG_REG (in)),
1956 SUBREG_BYTE (in),
1957 GET_MODE (in));
1958 in = SUBREG_REG (in);
1961 /* Narrow down the reg class, the same way push_reload will;
1962 otherwise we might find a dummy now, but push_reload won't. */
1964 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
1965 if (preferred_class != NO_REGS)
1966 rclass = (enum reg_class) preferred_class;
1969 /* See if OUT will do. */
1970 if (REG_P (out)
1971 && REGNO (out) < FIRST_PSEUDO_REGISTER)
1973 unsigned int regno = REGNO (out) + out_offset;
1974 unsigned int nwords = hard_regno_nregs[regno][outmode];
1975 rtx saved_rtx;
1977 /* When we consider whether the insn uses OUT,
1978 ignore references within IN. They don't prevent us
1979 from copying IN into OUT, because those refs would
1980 move into the insn that reloads IN.
1982 However, we only ignore IN in its role as this reload.
1983 If the insn uses IN elsewhere and it contains OUT,
1984 that counts. We can't be sure it's the "same" operand
1985 so it might not go through this reload. */
1986 saved_rtx = *inloc;
1987 *inloc = const0_rtx;
1989 if (regno < FIRST_PSEUDO_REGISTER
1990 && HARD_REGNO_MODE_OK (regno, outmode)
1991 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
1992 PATTERN (this_insn), outloc))
1994 unsigned int i;
1996 for (i = 0; i < nwords; i++)
1997 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1998 regno + i))
1999 break;
2001 if (i == nwords)
2003 if (REG_P (real_out))
2004 value = real_out;
2005 else
2006 value = gen_rtx_REG (outmode, regno);
2010 *inloc = saved_rtx;
2013 /* Consider using IN if OUT was not acceptable
2014 or if OUT dies in this insn (like the quotient in a divmod insn).
2015 We can't use IN unless it is dies in this insn,
2016 which means we must know accurately which hard regs are live.
2017 Also, the result can't go in IN if IN is used within OUT,
2018 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2019 if (hard_regs_live_known
2020 && REG_P (in)
2021 && REGNO (in) < FIRST_PSEUDO_REGISTER
2022 && (value == 0
2023 || find_reg_note (this_insn, REG_UNUSED, real_out))
2024 && find_reg_note (this_insn, REG_DEAD, real_in)
2025 && !fixed_regs[REGNO (in)]
2026 && HARD_REGNO_MODE_OK (REGNO (in),
2027 /* The only case where out and real_out might
2028 have different modes is where real_out
2029 is a subreg, and in that case, out
2030 has a real mode. */
2031 (GET_MODE (out) != VOIDmode
2032 ? GET_MODE (out) : outmode))
2033 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2034 /* However only do this if we can be sure that this input
2035 operand doesn't correspond with an uninitialized pseudo.
2036 global can assign some hardreg to it that is the same as
2037 the one assigned to a different, also live pseudo (as it
2038 can ignore the conflict). We must never introduce writes
2039 to such hardregs, as they would clobber the other live
2040 pseudo. See PR 20973. */
2041 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
2042 ORIGINAL_REGNO (in))
2043 /* Similarly, only do this if we can be sure that the death
2044 note is still valid. global can assign some hardreg to
2045 the pseudo referenced in the note and simultaneously a
2046 subword of this hardreg to a different, also live pseudo,
2047 because only another subword of the hardreg is actually
2048 used in the insn. This cannot happen if the pseudo has
2049 been assigned exactly one hardreg. See PR 33732. */
2050 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2052 unsigned int regno = REGNO (in) + in_offset;
2053 unsigned int nwords = hard_regno_nregs[regno][inmode];
2055 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2056 && ! hard_reg_set_here_p (regno, regno + nwords,
2057 PATTERN (this_insn))
2058 && (! earlyclobber
2059 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2060 PATTERN (this_insn), inloc)))
2062 unsigned int i;
2064 for (i = 0; i < nwords; i++)
2065 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2066 regno + i))
2067 break;
2069 if (i == nwords)
2071 /* If we were going to use OUT as the reload reg
2072 and changed our mind, it means OUT is a dummy that
2073 dies here. So don't bother copying value to it. */
2074 if (for_real >= 0 && value == real_out)
2075 rld[for_real].out = 0;
2076 if (REG_P (real_in))
2077 value = real_in;
2078 else
2079 value = gen_rtx_REG (inmode, regno);
2084 return value;
2087 /* This page contains subroutines used mainly for determining
2088 whether the IN or an OUT of a reload can serve as the
2089 reload register. */
2091 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2094 earlyclobber_operand_p (rtx x)
2096 int i;
2098 for (i = 0; i < n_earlyclobbers; i++)
2099 if (reload_earlyclobbers[i] == x)
2100 return 1;
2102 return 0;
2105 /* Return 1 if expression X alters a hard reg in the range
2106 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2107 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2108 X should be the body of an instruction. */
2110 static int
2111 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2113 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2115 rtx op0 = SET_DEST (x);
2117 while (GET_CODE (op0) == SUBREG)
2118 op0 = SUBREG_REG (op0);
2119 if (REG_P (op0))
2121 unsigned int r = REGNO (op0);
2123 /* See if this reg overlaps range under consideration. */
2124 if (r < end_regno
2125 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2126 return 1;
2129 else if (GET_CODE (x) == PARALLEL)
2131 int i = XVECLEN (x, 0) - 1;
2133 for (; i >= 0; i--)
2134 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2135 return 1;
2138 return 0;
2141 /* Return 1 if ADDR is a valid memory address for mode MODE
2142 in address space AS, and check that each pseudo reg has the
2143 proper kind of hard reg. */
2146 strict_memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2147 rtx addr, addr_space_t as)
2149 #ifdef GO_IF_LEGITIMATE_ADDRESS
2150 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2151 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2152 return 0;
2154 win:
2155 return 1;
2156 #else
2157 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2158 #endif
2161 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2162 if they are the same hard reg, and has special hacks for
2163 autoincrement and autodecrement.
2164 This is specifically intended for find_reloads to use
2165 in determining whether two operands match.
2166 X is the operand whose number is the lower of the two.
2168 The value is 2 if Y contains a pre-increment that matches
2169 a non-incrementing address in X. */
2171 /* ??? To be completely correct, we should arrange to pass
2172 for X the output operand and for Y the input operand.
2173 For now, we assume that the output operand has the lower number
2174 because that is natural in (SET output (... input ...)). */
2177 operands_match_p (rtx x, rtx y)
2179 int i;
2180 RTX_CODE code = GET_CODE (x);
2181 const char *fmt;
2182 int success_2;
2184 if (x == y)
2185 return 1;
2186 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2187 && (REG_P (y) || (GET_CODE (y) == SUBREG
2188 && REG_P (SUBREG_REG (y)))))
2190 int j;
2192 if (code == SUBREG)
2194 i = REGNO (SUBREG_REG (x));
2195 if (i >= FIRST_PSEUDO_REGISTER)
2196 goto slow;
2197 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2198 GET_MODE (SUBREG_REG (x)),
2199 SUBREG_BYTE (x),
2200 GET_MODE (x));
2202 else
2203 i = REGNO (x);
2205 if (GET_CODE (y) == SUBREG)
2207 j = REGNO (SUBREG_REG (y));
2208 if (j >= FIRST_PSEUDO_REGISTER)
2209 goto slow;
2210 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2211 GET_MODE (SUBREG_REG (y)),
2212 SUBREG_BYTE (y),
2213 GET_MODE (y));
2215 else
2216 j = REGNO (y);
2218 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2219 multiple hard register group of scalar integer registers, so that
2220 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2221 register. */
2222 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2223 && SCALAR_INT_MODE_P (GET_MODE (x))
2224 && i < FIRST_PSEUDO_REGISTER)
2225 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2226 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2227 && SCALAR_INT_MODE_P (GET_MODE (y))
2228 && j < FIRST_PSEUDO_REGISTER)
2229 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2231 return i == j;
2233 /* If two operands must match, because they are really a single
2234 operand of an assembler insn, then two postincrements are invalid
2235 because the assembler insn would increment only once.
2236 On the other hand, a postincrement matches ordinary indexing
2237 if the postincrement is the output operand. */
2238 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2239 return operands_match_p (XEXP (x, 0), y);
2240 /* Two preincrements are invalid
2241 because the assembler insn would increment only once.
2242 On the other hand, a preincrement matches ordinary indexing
2243 if the preincrement is the input operand.
2244 In this case, return 2, since some callers need to do special
2245 things when this happens. */
2246 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2247 || GET_CODE (y) == PRE_MODIFY)
2248 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2250 slow:
2252 /* Now we have disposed of all the cases in which different rtx codes
2253 can match. */
2254 if (code != GET_CODE (y))
2255 return 0;
2257 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2258 if (GET_MODE (x) != GET_MODE (y))
2259 return 0;
2261 /* MEMs referring to different address space are not equivalent. */
2262 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2263 return 0;
2265 switch (code)
2267 case CONST_INT:
2268 case CONST_DOUBLE:
2269 case CONST_FIXED:
2270 return 0;
2272 case LABEL_REF:
2273 return XEXP (x, 0) == XEXP (y, 0);
2274 case SYMBOL_REF:
2275 return XSTR (x, 0) == XSTR (y, 0);
2277 default:
2278 break;
2281 /* Compare the elements. If any pair of corresponding elements
2282 fail to match, return 0 for the whole things. */
2284 success_2 = 0;
2285 fmt = GET_RTX_FORMAT (code);
2286 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2288 int val, j;
2289 switch (fmt[i])
2291 case 'w':
2292 if (XWINT (x, i) != XWINT (y, i))
2293 return 0;
2294 break;
2296 case 'i':
2297 if (XINT (x, i) != XINT (y, i))
2298 return 0;
2299 break;
2301 case 'e':
2302 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2303 if (val == 0)
2304 return 0;
2305 /* If any subexpression returns 2,
2306 we should return 2 if we are successful. */
2307 if (val == 2)
2308 success_2 = 1;
2309 break;
2311 case '0':
2312 break;
2314 case 'E':
2315 if (XVECLEN (x, i) != XVECLEN (y, i))
2316 return 0;
2317 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2319 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2320 if (val == 0)
2321 return 0;
2322 if (val == 2)
2323 success_2 = 1;
2325 break;
2327 /* It is believed that rtx's at this level will never
2328 contain anything but integers and other rtx's,
2329 except for within LABEL_REFs and SYMBOL_REFs. */
2330 default:
2331 gcc_unreachable ();
2334 return 1 + success_2;
2337 /* Describe the range of registers or memory referenced by X.
2338 If X is a register, set REG_FLAG and put the first register
2339 number into START and the last plus one into END.
2340 If X is a memory reference, put a base address into BASE
2341 and a range of integer offsets into START and END.
2342 If X is pushing on the stack, we can assume it causes no trouble,
2343 so we set the SAFE field. */
2345 static struct decomposition
2346 decompose (rtx x)
2348 struct decomposition val;
2349 int all_const = 0;
2351 memset (&val, 0, sizeof (val));
2353 switch (GET_CODE (x))
2355 case MEM:
2357 rtx base = NULL_RTX, offset = 0;
2358 rtx addr = XEXP (x, 0);
2360 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2361 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2363 val.base = XEXP (addr, 0);
2364 val.start = -GET_MODE_SIZE (GET_MODE (x));
2365 val.end = GET_MODE_SIZE (GET_MODE (x));
2366 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2367 return val;
2370 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2372 if (GET_CODE (XEXP (addr, 1)) == PLUS
2373 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2374 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2376 val.base = XEXP (addr, 0);
2377 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2378 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2379 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2380 return val;
2384 if (GET_CODE (addr) == CONST)
2386 addr = XEXP (addr, 0);
2387 all_const = 1;
2389 if (GET_CODE (addr) == PLUS)
2391 if (CONSTANT_P (XEXP (addr, 0)))
2393 base = XEXP (addr, 1);
2394 offset = XEXP (addr, 0);
2396 else if (CONSTANT_P (XEXP (addr, 1)))
2398 base = XEXP (addr, 0);
2399 offset = XEXP (addr, 1);
2403 if (offset == 0)
2405 base = addr;
2406 offset = const0_rtx;
2408 if (GET_CODE (offset) == CONST)
2409 offset = XEXP (offset, 0);
2410 if (GET_CODE (offset) == PLUS)
2412 if (CONST_INT_P (XEXP (offset, 0)))
2414 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2415 offset = XEXP (offset, 0);
2417 else if (CONST_INT_P (XEXP (offset, 1)))
2419 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2420 offset = XEXP (offset, 1);
2422 else
2424 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2425 offset = const0_rtx;
2428 else if (!CONST_INT_P (offset))
2430 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2431 offset = const0_rtx;
2434 if (all_const && GET_CODE (base) == PLUS)
2435 base = gen_rtx_CONST (GET_MODE (base), base);
2437 gcc_assert (CONST_INT_P (offset));
2439 val.start = INTVAL (offset);
2440 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2441 val.base = base;
2443 break;
2445 case REG:
2446 val.reg_flag = 1;
2447 val.start = true_regnum (x);
2448 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2450 /* A pseudo with no hard reg. */
2451 val.start = REGNO (x);
2452 val.end = val.start + 1;
2454 else
2455 /* A hard reg. */
2456 val.end = end_hard_regno (GET_MODE (x), val.start);
2457 break;
2459 case SUBREG:
2460 if (!REG_P (SUBREG_REG (x)))
2461 /* This could be more precise, but it's good enough. */
2462 return decompose (SUBREG_REG (x));
2463 val.reg_flag = 1;
2464 val.start = true_regnum (x);
2465 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2466 return decompose (SUBREG_REG (x));
2467 else
2468 /* A hard reg. */
2469 val.end = val.start + subreg_nregs (x);
2470 break;
2472 case SCRATCH:
2473 /* This hasn't been assigned yet, so it can't conflict yet. */
2474 val.safe = 1;
2475 break;
2477 default:
2478 gcc_assert (CONSTANT_P (x));
2479 val.safe = 1;
2480 break;
2482 return val;
2485 /* Return 1 if altering Y will not modify the value of X.
2486 Y is also described by YDATA, which should be decompose (Y). */
2488 static int
2489 immune_p (rtx x, rtx y, struct decomposition ydata)
2491 struct decomposition xdata;
2493 if (ydata.reg_flag)
2494 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2495 if (ydata.safe)
2496 return 1;
2498 gcc_assert (MEM_P (y));
2499 /* If Y is memory and X is not, Y can't affect X. */
2500 if (!MEM_P (x))
2501 return 1;
2503 xdata = decompose (x);
2505 if (! rtx_equal_p (xdata.base, ydata.base))
2507 /* If bases are distinct symbolic constants, there is no overlap. */
2508 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2509 return 1;
2510 /* Constants and stack slots never overlap. */
2511 if (CONSTANT_P (xdata.base)
2512 && (ydata.base == frame_pointer_rtx
2513 || ydata.base == hard_frame_pointer_rtx
2514 || ydata.base == stack_pointer_rtx))
2515 return 1;
2516 if (CONSTANT_P (ydata.base)
2517 && (xdata.base == frame_pointer_rtx
2518 || xdata.base == hard_frame_pointer_rtx
2519 || xdata.base == stack_pointer_rtx))
2520 return 1;
2521 /* If either base is variable, we don't know anything. */
2522 return 0;
2525 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2528 /* Similar, but calls decompose. */
2531 safe_from_earlyclobber (rtx op, rtx clobber)
2533 struct decomposition early_data;
2535 early_data = decompose (clobber);
2536 return immune_p (op, clobber, early_data);
2539 /* Main entry point of this file: search the body of INSN
2540 for values that need reloading and record them with push_reload.
2541 REPLACE nonzero means record also where the values occur
2542 so that subst_reloads can be used.
2544 IND_LEVELS says how many levels of indirection are supported by this
2545 machine; a value of zero means that a memory reference is not a valid
2546 memory address.
2548 LIVE_KNOWN says we have valid information about which hard
2549 regs are live at each point in the program; this is true when
2550 we are called from global_alloc but false when stupid register
2551 allocation has been done.
2553 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2554 which is nonnegative if the reg has been commandeered for reloading into.
2555 It is copied into STATIC_RELOAD_REG_P and referenced from there
2556 by various subroutines.
2558 Return TRUE if some operands need to be changed, because of swapping
2559 commutative operands, reg_equiv_address substitution, or whatever. */
2562 find_reloads (rtx insn, int replace, int ind_levels, int live_known,
2563 short *reload_reg_p)
2565 int insn_code_number;
2566 int i, j;
2567 int noperands;
2568 /* These start out as the constraints for the insn
2569 and they are chewed up as we consider alternatives. */
2570 const char *constraints[MAX_RECOG_OPERANDS];
2571 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2572 a register. */
2573 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2574 char pref_or_nothing[MAX_RECOG_OPERANDS];
2575 /* Nonzero for a MEM operand whose entire address needs a reload.
2576 May be -1 to indicate the entire address may or may not need a reload. */
2577 int address_reloaded[MAX_RECOG_OPERANDS];
2578 /* Nonzero for an address operand that needs to be completely reloaded.
2579 May be -1 to indicate the entire operand may or may not need a reload. */
2580 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2581 /* Value of enum reload_type to use for operand. */
2582 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2583 /* Value of enum reload_type to use within address of operand. */
2584 enum reload_type address_type[MAX_RECOG_OPERANDS];
2585 /* Save the usage of each operand. */
2586 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2587 int no_input_reloads = 0, no_output_reloads = 0;
2588 int n_alternatives;
2589 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2590 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2591 char this_alternative_win[MAX_RECOG_OPERANDS];
2592 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2593 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2594 int this_alternative_matches[MAX_RECOG_OPERANDS];
2595 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2596 int this_alternative_number;
2597 int goal_alternative_number = 0;
2598 int operand_reloadnum[MAX_RECOG_OPERANDS];
2599 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2600 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2601 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2602 char goal_alternative_win[MAX_RECOG_OPERANDS];
2603 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2604 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2605 int goal_alternative_swapped;
2606 int best;
2607 int commutative;
2608 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2609 rtx substed_operand[MAX_RECOG_OPERANDS];
2610 rtx body = PATTERN (insn);
2611 rtx set = single_set (insn);
2612 int goal_earlyclobber = 0, this_earlyclobber;
2613 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
2614 int retval = 0;
2616 this_insn = insn;
2617 n_reloads = 0;
2618 n_replacements = 0;
2619 n_earlyclobbers = 0;
2620 replace_reloads = replace;
2621 hard_regs_live_known = live_known;
2622 static_reload_reg_p = reload_reg_p;
2624 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2625 neither are insns that SET cc0. Insns that use CC0 are not allowed
2626 to have any input reloads. */
2627 if (JUMP_P (insn) || CALL_P (insn))
2628 no_output_reloads = 1;
2630 #ifdef HAVE_cc0
2631 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2632 no_input_reloads = 1;
2633 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2634 no_output_reloads = 1;
2635 #endif
2637 #ifdef SECONDARY_MEMORY_NEEDED
2638 /* The eliminated forms of any secondary memory locations are per-insn, so
2639 clear them out here. */
2641 if (secondary_memlocs_elim_used)
2643 memset (secondary_memlocs_elim, 0,
2644 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2645 secondary_memlocs_elim_used = 0;
2647 #endif
2649 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2650 is cheap to move between them. If it is not, there may not be an insn
2651 to do the copy, so we may need a reload. */
2652 if (GET_CODE (body) == SET
2653 && REG_P (SET_DEST (body))
2654 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2655 && REG_P (SET_SRC (body))
2656 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2657 && register_move_cost (GET_MODE (SET_SRC (body)),
2658 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2659 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2660 return 0;
2662 extract_insn (insn);
2664 noperands = reload_n_operands = recog_data.n_operands;
2665 n_alternatives = recog_data.n_alternatives;
2667 /* Just return "no reloads" if insn has no operands with constraints. */
2668 if (noperands == 0 || n_alternatives == 0)
2669 return 0;
2671 insn_code_number = INSN_CODE (insn);
2672 this_insn_is_asm = insn_code_number < 0;
2674 memcpy (operand_mode, recog_data.operand_mode,
2675 noperands * sizeof (enum machine_mode));
2676 memcpy (constraints, recog_data.constraints,
2677 noperands * sizeof (const char *));
2679 commutative = -1;
2681 /* If we will need to know, later, whether some pair of operands
2682 are the same, we must compare them now and save the result.
2683 Reloading the base and index registers will clobber them
2684 and afterward they will fail to match. */
2686 for (i = 0; i < noperands; i++)
2688 const char *p;
2689 int c;
2690 char *end;
2692 substed_operand[i] = recog_data.operand[i];
2693 p = constraints[i];
2695 modified[i] = RELOAD_READ;
2697 /* Scan this operand's constraint to see if it is an output operand,
2698 an in-out operand, is commutative, or should match another. */
2700 while ((c = *p))
2702 p += CONSTRAINT_LEN (c, p);
2703 switch (c)
2705 case '=':
2706 modified[i] = RELOAD_WRITE;
2707 break;
2708 case '+':
2709 modified[i] = RELOAD_READ_WRITE;
2710 break;
2711 case '%':
2713 /* The last operand should not be marked commutative. */
2714 gcc_assert (i != noperands - 1);
2716 /* We currently only support one commutative pair of
2717 operands. Some existing asm code currently uses more
2718 than one pair. Previously, that would usually work,
2719 but sometimes it would crash the compiler. We
2720 continue supporting that case as well as we can by
2721 silently ignoring all but the first pair. In the
2722 future we may handle it correctly. */
2723 if (commutative < 0)
2724 commutative = i;
2725 else
2726 gcc_assert (this_insn_is_asm);
2728 break;
2729 /* Use of ISDIGIT is tempting here, but it may get expensive because
2730 of locale support we don't want. */
2731 case '0': case '1': case '2': case '3': case '4':
2732 case '5': case '6': case '7': case '8': case '9':
2734 c = strtoul (p - 1, &end, 10);
2735 p = end;
2737 operands_match[c][i]
2738 = operands_match_p (recog_data.operand[c],
2739 recog_data.operand[i]);
2741 /* An operand may not match itself. */
2742 gcc_assert (c != i);
2744 /* If C can be commuted with C+1, and C might need to match I,
2745 then C+1 might also need to match I. */
2746 if (commutative >= 0)
2748 if (c == commutative || c == commutative + 1)
2750 int other = c + (c == commutative ? 1 : -1);
2751 operands_match[other][i]
2752 = operands_match_p (recog_data.operand[other],
2753 recog_data.operand[i]);
2755 if (i == commutative || i == commutative + 1)
2757 int other = i + (i == commutative ? 1 : -1);
2758 operands_match[c][other]
2759 = operands_match_p (recog_data.operand[c],
2760 recog_data.operand[other]);
2762 /* Note that C is supposed to be less than I.
2763 No need to consider altering both C and I because in
2764 that case we would alter one into the other. */
2771 /* Examine each operand that is a memory reference or memory address
2772 and reload parts of the addresses into index registers.
2773 Also here any references to pseudo regs that didn't get hard regs
2774 but are equivalent to constants get replaced in the insn itself
2775 with those constants. Nobody will ever see them again.
2777 Finally, set up the preferred classes of each operand. */
2779 for (i = 0; i < noperands; i++)
2781 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2783 address_reloaded[i] = 0;
2784 address_operand_reloaded[i] = 0;
2785 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2786 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2787 : RELOAD_OTHER);
2788 address_type[i]
2789 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2790 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2791 : RELOAD_OTHER);
2793 if (*constraints[i] == 0)
2794 /* Ignore things like match_operator operands. */
2796 else if (constraints[i][0] == 'p'
2797 || EXTRA_ADDRESS_CONSTRAINT (constraints[i][0], constraints[i]))
2799 address_operand_reloaded[i]
2800 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2801 recog_data.operand[i],
2802 recog_data.operand_loc[i],
2803 i, operand_type[i], ind_levels, insn);
2805 /* If we now have a simple operand where we used to have a
2806 PLUS or MULT, re-recognize and try again. */
2807 if ((OBJECT_P (*recog_data.operand_loc[i])
2808 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2809 && (GET_CODE (recog_data.operand[i]) == MULT
2810 || GET_CODE (recog_data.operand[i]) == PLUS))
2812 INSN_CODE (insn) = -1;
2813 retval = find_reloads (insn, replace, ind_levels, live_known,
2814 reload_reg_p);
2815 return retval;
2818 recog_data.operand[i] = *recog_data.operand_loc[i];
2819 substed_operand[i] = recog_data.operand[i];
2821 /* Address operands are reloaded in their existing mode,
2822 no matter what is specified in the machine description. */
2823 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2825 /* If the address is a single CONST_INT pick address mode
2826 instead otherwise we will later not know in which mode
2827 the reload should be performed. */
2828 if (operand_mode[i] == VOIDmode)
2829 operand_mode[i] = Pmode;
2832 else if (code == MEM)
2834 address_reloaded[i]
2835 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2836 recog_data.operand_loc[i],
2837 XEXP (recog_data.operand[i], 0),
2838 &XEXP (recog_data.operand[i], 0),
2839 i, address_type[i], ind_levels, insn);
2840 recog_data.operand[i] = *recog_data.operand_loc[i];
2841 substed_operand[i] = recog_data.operand[i];
2843 else if (code == SUBREG)
2845 rtx reg = SUBREG_REG (recog_data.operand[i]);
2846 rtx op
2847 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2848 ind_levels,
2849 set != 0
2850 && &SET_DEST (set) == recog_data.operand_loc[i],
2851 insn,
2852 &address_reloaded[i]);
2854 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2855 that didn't get a hard register, emit a USE with a REG_EQUAL
2856 note in front so that we might inherit a previous, possibly
2857 wider reload. */
2859 if (replace
2860 && MEM_P (op)
2861 && REG_P (reg)
2862 && (GET_MODE_SIZE (GET_MODE (reg))
2863 >= GET_MODE_SIZE (GET_MODE (op)))
2864 && reg_equiv_constant (REGNO (reg)) == 0)
2865 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2866 insn),
2867 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2869 substed_operand[i] = recog_data.operand[i] = op;
2871 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2872 /* We can get a PLUS as an "operand" as a result of register
2873 elimination. See eliminate_regs and gen_reload. We handle
2874 a unary operator by reloading the operand. */
2875 substed_operand[i] = recog_data.operand[i]
2876 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2877 ind_levels, 0, insn,
2878 &address_reloaded[i]);
2879 else if (code == REG)
2881 /* This is equivalent to calling find_reloads_toplev.
2882 The code is duplicated for speed.
2883 When we find a pseudo always equivalent to a constant,
2884 we replace it by the constant. We must be sure, however,
2885 that we don't try to replace it in the insn in which it
2886 is being set. */
2887 int regno = REGNO (recog_data.operand[i]);
2888 if (reg_equiv_constant (regno) != 0
2889 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2891 /* Record the existing mode so that the check if constants are
2892 allowed will work when operand_mode isn't specified. */
2894 if (operand_mode[i] == VOIDmode)
2895 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2897 substed_operand[i] = recog_data.operand[i]
2898 = reg_equiv_constant (regno);
2900 if (reg_equiv_memory_loc (regno) != 0
2901 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2902 /* We need not give a valid is_set_dest argument since the case
2903 of a constant equivalence was checked above. */
2904 substed_operand[i] = recog_data.operand[i]
2905 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2906 ind_levels, 0, insn,
2907 &address_reloaded[i]);
2909 /* If the operand is still a register (we didn't replace it with an
2910 equivalent), get the preferred class to reload it into. */
2911 code = GET_CODE (recog_data.operand[i]);
2912 preferred_class[i]
2913 = ((code == REG && REGNO (recog_data.operand[i])
2914 >= FIRST_PSEUDO_REGISTER)
2915 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2916 : NO_REGS);
2917 pref_or_nothing[i]
2918 = (code == REG
2919 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2920 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2923 /* If this is simply a copy from operand 1 to operand 0, merge the
2924 preferred classes for the operands. */
2925 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2926 && recog_data.operand[1] == SET_SRC (set))
2928 preferred_class[0] = preferred_class[1]
2929 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2930 pref_or_nothing[0] |= pref_or_nothing[1];
2931 pref_or_nothing[1] |= pref_or_nothing[0];
2934 /* Now see what we need for pseudo-regs that didn't get hard regs
2935 or got the wrong kind of hard reg. For this, we must consider
2936 all the operands together against the register constraints. */
2938 best = MAX_RECOG_OPERANDS * 2 + 600;
2940 goal_alternative_swapped = 0;
2942 /* The constraints are made of several alternatives.
2943 Each operand's constraint looks like foo,bar,... with commas
2944 separating the alternatives. The first alternatives for all
2945 operands go together, the second alternatives go together, etc.
2947 First loop over alternatives. */
2949 for (this_alternative_number = 0;
2950 this_alternative_number < n_alternatives;
2951 this_alternative_number++)
2953 int swapped;
2955 if (!recog_data.alternative_enabled_p[this_alternative_number])
2957 int i;
2959 for (i = 0; i < recog_data.n_operands; i++)
2960 constraints[i] = skip_alternative (constraints[i]);
2962 continue;
2965 /* If insn is commutative (it's safe to exchange a certain pair
2966 of operands) then we need to try each alternative twice, the
2967 second time matching those two operands as if we had
2968 exchanged them. To do this, really exchange them in
2969 operands. */
2970 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
2972 /* Loop over operands for one constraint alternative. */
2973 /* LOSERS counts those that don't fit this alternative
2974 and would require loading. */
2975 int losers = 0;
2976 /* BAD is set to 1 if it some operand can't fit this alternative
2977 even after reloading. */
2978 int bad = 0;
2979 /* REJECT is a count of how undesirable this alternative says it is
2980 if any reloading is required. If the alternative matches exactly
2981 then REJECT is ignored, but otherwise it gets this much
2982 counted against it in addition to the reloading needed. Each
2983 ? counts three times here since we want the disparaging caused by
2984 a bad register class to only count 1/3 as much. */
2985 int reject = 0;
2987 if (swapped)
2989 enum reg_class tclass;
2990 int t;
2992 recog_data.operand[commutative] = substed_operand[commutative + 1];
2993 recog_data.operand[commutative + 1] = substed_operand[commutative];
2994 /* Swap the duplicates too. */
2995 for (i = 0; i < recog_data.n_dups; i++)
2996 if (recog_data.dup_num[i] == commutative
2997 || recog_data.dup_num[i] == commutative + 1)
2998 *recog_data.dup_loc[i]
2999 = recog_data.operand[(int) recog_data.dup_num[i]];
3001 tclass = preferred_class[commutative];
3002 preferred_class[commutative] = preferred_class[commutative + 1];
3003 preferred_class[commutative + 1] = tclass;
3005 t = pref_or_nothing[commutative];
3006 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3007 pref_or_nothing[commutative + 1] = t;
3009 t = address_reloaded[commutative];
3010 address_reloaded[commutative] = address_reloaded[commutative + 1];
3011 address_reloaded[commutative + 1] = t;
3014 this_earlyclobber = 0;
3016 for (i = 0; i < noperands; i++)
3018 const char *p = constraints[i];
3019 char *end;
3020 int len;
3021 int win = 0;
3022 int did_match = 0;
3023 /* 0 => this operand can be reloaded somehow for this alternative. */
3024 int badop = 1;
3025 /* 0 => this operand can be reloaded if the alternative allows regs. */
3026 int winreg = 0;
3027 int c;
3028 int m;
3029 rtx operand = recog_data.operand[i];
3030 int offset = 0;
3031 /* Nonzero means this is a MEM that must be reloaded into a reg
3032 regardless of what the constraint says. */
3033 int force_reload = 0;
3034 int offmemok = 0;
3035 /* Nonzero if a constant forced into memory would be OK for this
3036 operand. */
3037 int constmemok = 0;
3038 int earlyclobber = 0;
3040 /* If the predicate accepts a unary operator, it means that
3041 we need to reload the operand, but do not do this for
3042 match_operator and friends. */
3043 if (UNARY_P (operand) && *p != 0)
3044 operand = XEXP (operand, 0);
3046 /* If the operand is a SUBREG, extract
3047 the REG or MEM (or maybe even a constant) within.
3048 (Constants can occur as a result of reg_equiv_constant.) */
3050 while (GET_CODE (operand) == SUBREG)
3052 /* Offset only matters when operand is a REG and
3053 it is a hard reg. This is because it is passed
3054 to reg_fits_class_p if it is a REG and all pseudos
3055 return 0 from that function. */
3056 if (REG_P (SUBREG_REG (operand))
3057 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3059 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3060 GET_MODE (SUBREG_REG (operand)),
3061 SUBREG_BYTE (operand),
3062 GET_MODE (operand)) < 0)
3063 force_reload = 1;
3064 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3065 GET_MODE (SUBREG_REG (operand)),
3066 SUBREG_BYTE (operand),
3067 GET_MODE (operand));
3069 operand = SUBREG_REG (operand);
3070 /* Force reload if this is a constant or PLUS or if there may
3071 be a problem accessing OPERAND in the outer mode. */
3072 if (CONSTANT_P (operand)
3073 || GET_CODE (operand) == PLUS
3074 /* We must force a reload of paradoxical SUBREGs
3075 of a MEM because the alignment of the inner value
3076 may not be enough to do the outer reference. On
3077 big-endian machines, it may also reference outside
3078 the object.
3080 On machines that extend byte operations and we have a
3081 SUBREG where both the inner and outer modes are no wider
3082 than a word and the inner mode is narrower, is integral,
3083 and gets extended when loaded from memory, combine.c has
3084 made assumptions about the behavior of the machine in such
3085 register access. If the data is, in fact, in memory we
3086 must always load using the size assumed to be in the
3087 register and let the insn do the different-sized
3088 accesses.
3090 This is doubly true if WORD_REGISTER_OPERATIONS. In
3091 this case eliminate_regs has left non-paradoxical
3092 subregs for push_reload to see. Make sure it does
3093 by forcing the reload.
3095 ??? When is it right at this stage to have a subreg
3096 of a mem that is _not_ to be handled specially? IMO
3097 those should have been reduced to just a mem. */
3098 || ((MEM_P (operand)
3099 || (REG_P (operand)
3100 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3101 #ifndef WORD_REGISTER_OPERATIONS
3102 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3103 < BIGGEST_ALIGNMENT)
3104 && (GET_MODE_SIZE (operand_mode[i])
3105 > GET_MODE_SIZE (GET_MODE (operand))))
3106 || BYTES_BIG_ENDIAN
3107 #ifdef LOAD_EXTEND_OP
3108 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3109 && (GET_MODE_SIZE (GET_MODE (operand))
3110 <= UNITS_PER_WORD)
3111 && (GET_MODE_SIZE (operand_mode[i])
3112 > GET_MODE_SIZE (GET_MODE (operand)))
3113 && INTEGRAL_MODE_P (GET_MODE (operand))
3114 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3115 #endif
3117 #endif
3120 force_reload = 1;
3123 this_alternative[i] = NO_REGS;
3124 this_alternative_win[i] = 0;
3125 this_alternative_match_win[i] = 0;
3126 this_alternative_offmemok[i] = 0;
3127 this_alternative_earlyclobber[i] = 0;
3128 this_alternative_matches[i] = -1;
3130 /* An empty constraint or empty alternative
3131 allows anything which matched the pattern. */
3132 if (*p == 0 || *p == ',')
3133 win = 1, badop = 0;
3135 /* Scan this alternative's specs for this operand;
3136 set WIN if the operand fits any letter in this alternative.
3137 Otherwise, clear BADOP if this operand could
3138 fit some letter after reloads,
3139 or set WINREG if this operand could fit after reloads
3140 provided the constraint allows some registers. */
3143 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3145 case '\0':
3146 len = 0;
3147 break;
3148 case ',':
3149 c = '\0';
3150 break;
3152 case '=': case '+': case '*':
3153 break;
3155 case '%':
3156 /* We only support one commutative marker, the first
3157 one. We already set commutative above. */
3158 break;
3160 case '?':
3161 reject += 6;
3162 break;
3164 case '!':
3165 reject = 600;
3166 break;
3168 case '#':
3169 /* Ignore rest of this alternative as far as
3170 reloading is concerned. */
3172 p++;
3173 while (*p && *p != ',');
3174 len = 0;
3175 break;
3177 case '0': case '1': case '2': case '3': case '4':
3178 case '5': case '6': case '7': case '8': case '9':
3179 m = strtoul (p, &end, 10);
3180 p = end;
3181 len = 0;
3183 this_alternative_matches[i] = m;
3184 /* We are supposed to match a previous operand.
3185 If we do, we win if that one did.
3186 If we do not, count both of the operands as losers.
3187 (This is too conservative, since most of the time
3188 only a single reload insn will be needed to make
3189 the two operands win. As a result, this alternative
3190 may be rejected when it is actually desirable.) */
3191 if ((swapped && (m != commutative || i != commutative + 1))
3192 /* If we are matching as if two operands were swapped,
3193 also pretend that operands_match had been computed
3194 with swapped.
3195 But if I is the second of those and C is the first,
3196 don't exchange them, because operands_match is valid
3197 only on one side of its diagonal. */
3198 ? (operands_match
3199 [(m == commutative || m == commutative + 1)
3200 ? 2 * commutative + 1 - m : m]
3201 [(i == commutative || i == commutative + 1)
3202 ? 2 * commutative + 1 - i : i])
3203 : operands_match[m][i])
3205 /* If we are matching a non-offsettable address where an
3206 offsettable address was expected, then we must reject
3207 this combination, because we can't reload it. */
3208 if (this_alternative_offmemok[m]
3209 && MEM_P (recog_data.operand[m])
3210 && this_alternative[m] == NO_REGS
3211 && ! this_alternative_win[m])
3212 bad = 1;
3214 did_match = this_alternative_win[m];
3216 else
3218 /* Operands don't match. */
3219 rtx value;
3220 int loc1, loc2;
3221 /* Retroactively mark the operand we had to match
3222 as a loser, if it wasn't already. */
3223 if (this_alternative_win[m])
3224 losers++;
3225 this_alternative_win[m] = 0;
3226 if (this_alternative[m] == NO_REGS)
3227 bad = 1;
3228 /* But count the pair only once in the total badness of
3229 this alternative, if the pair can be a dummy reload.
3230 The pointers in operand_loc are not swapped; swap
3231 them by hand if necessary. */
3232 if (swapped && i == commutative)
3233 loc1 = commutative + 1;
3234 else if (swapped && i == commutative + 1)
3235 loc1 = commutative;
3236 else
3237 loc1 = i;
3238 if (swapped && m == commutative)
3239 loc2 = commutative + 1;
3240 else if (swapped && m == commutative + 1)
3241 loc2 = commutative;
3242 else
3243 loc2 = m;
3244 value
3245 = find_dummy_reload (recog_data.operand[i],
3246 recog_data.operand[m],
3247 recog_data.operand_loc[loc1],
3248 recog_data.operand_loc[loc2],
3249 operand_mode[i], operand_mode[m],
3250 this_alternative[m], -1,
3251 this_alternative_earlyclobber[m]);
3253 if (value != 0)
3254 losers--;
3256 /* This can be fixed with reloads if the operand
3257 we are supposed to match can be fixed with reloads. */
3258 badop = 0;
3259 this_alternative[i] = this_alternative[m];
3261 /* If we have to reload this operand and some previous
3262 operand also had to match the same thing as this
3263 operand, we don't know how to do that. So reject this
3264 alternative. */
3265 if (! did_match || force_reload)
3266 for (j = 0; j < i; j++)
3267 if (this_alternative_matches[j]
3268 == this_alternative_matches[i])
3269 badop = 1;
3270 break;
3272 case 'p':
3273 /* All necessary reloads for an address_operand
3274 were handled in find_reloads_address. */
3275 this_alternative[i]
3276 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3277 ADDRESS, SCRATCH);
3278 win = 1;
3279 badop = 0;
3280 break;
3282 case TARGET_MEM_CONSTRAINT:
3283 if (force_reload)
3284 break;
3285 if (MEM_P (operand)
3286 || (REG_P (operand)
3287 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3288 && reg_renumber[REGNO (operand)] < 0))
3289 win = 1;
3290 if (CONST_POOL_OK_P (operand_mode[i], operand))
3291 badop = 0;
3292 constmemok = 1;
3293 break;
3295 case '<':
3296 if (MEM_P (operand)
3297 && ! address_reloaded[i]
3298 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3299 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3300 win = 1;
3301 break;
3303 case '>':
3304 if (MEM_P (operand)
3305 && ! address_reloaded[i]
3306 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3307 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3308 win = 1;
3309 break;
3311 /* Memory operand whose address is not offsettable. */
3312 case 'V':
3313 if (force_reload)
3314 break;
3315 if (MEM_P (operand)
3316 && ! (ind_levels ? offsettable_memref_p (operand)
3317 : offsettable_nonstrict_memref_p (operand))
3318 /* Certain mem addresses will become offsettable
3319 after they themselves are reloaded. This is important;
3320 we don't want our own handling of unoffsettables
3321 to override the handling of reg_equiv_address. */
3322 && !(REG_P (XEXP (operand, 0))
3323 && (ind_levels == 0
3324 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3325 win = 1;
3326 break;
3328 /* Memory operand whose address is offsettable. */
3329 case 'o':
3330 if (force_reload)
3331 break;
3332 if ((MEM_P (operand)
3333 /* If IND_LEVELS, find_reloads_address won't reload a
3334 pseudo that didn't get a hard reg, so we have to
3335 reject that case. */
3336 && ((ind_levels ? offsettable_memref_p (operand)
3337 : offsettable_nonstrict_memref_p (operand))
3338 /* A reloaded address is offsettable because it is now
3339 just a simple register indirect. */
3340 || address_reloaded[i] == 1))
3341 || (REG_P (operand)
3342 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3343 && reg_renumber[REGNO (operand)] < 0
3344 /* If reg_equiv_address is nonzero, we will be
3345 loading it into a register; hence it will be
3346 offsettable, but we cannot say that reg_equiv_mem
3347 is offsettable without checking. */
3348 && ((reg_equiv_mem (REGNO (operand)) != 0
3349 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3350 || (reg_equiv_address (REGNO (operand)) != 0))))
3351 win = 1;
3352 if (CONST_POOL_OK_P (operand_mode[i], operand)
3353 || MEM_P (operand))
3354 badop = 0;
3355 constmemok = 1;
3356 offmemok = 1;
3357 break;
3359 case '&':
3360 /* Output operand that is stored before the need for the
3361 input operands (and their index registers) is over. */
3362 earlyclobber = 1, this_earlyclobber = 1;
3363 break;
3365 case 'E':
3366 case 'F':
3367 if (GET_CODE (operand) == CONST_DOUBLE
3368 || (GET_CODE (operand) == CONST_VECTOR
3369 && (GET_MODE_CLASS (GET_MODE (operand))
3370 == MODE_VECTOR_FLOAT)))
3371 win = 1;
3372 break;
3374 case 'G':
3375 case 'H':
3376 if (GET_CODE (operand) == CONST_DOUBLE
3377 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (operand, c, p))
3378 win = 1;
3379 break;
3381 case 's':
3382 if (CONST_INT_P (operand)
3383 || (GET_CODE (operand) == CONST_DOUBLE
3384 && GET_MODE (operand) == VOIDmode))
3385 break;
3386 case 'i':
3387 if (CONSTANT_P (operand)
3388 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (operand)))
3389 win = 1;
3390 break;
3392 case 'n':
3393 if (CONST_INT_P (operand)
3394 || (GET_CODE (operand) == CONST_DOUBLE
3395 && GET_MODE (operand) == VOIDmode))
3396 win = 1;
3397 break;
3399 case 'I':
3400 case 'J':
3401 case 'K':
3402 case 'L':
3403 case 'M':
3404 case 'N':
3405 case 'O':
3406 case 'P':
3407 if (CONST_INT_P (operand)
3408 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (operand), c, p))
3409 win = 1;
3410 break;
3412 case 'X':
3413 force_reload = 0;
3414 win = 1;
3415 break;
3417 case 'g':
3418 if (! force_reload
3419 /* A PLUS is never a valid operand, but reload can make
3420 it from a register when eliminating registers. */
3421 && GET_CODE (operand) != PLUS
3422 /* A SCRATCH is not a valid operand. */
3423 && GET_CODE (operand) != SCRATCH
3424 && (! CONSTANT_P (operand)
3425 || ! flag_pic
3426 || LEGITIMATE_PIC_OPERAND_P (operand))
3427 && (GENERAL_REGS == ALL_REGS
3428 || !REG_P (operand)
3429 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3430 && reg_renumber[REGNO (operand)] < 0)))
3431 win = 1;
3432 /* Drop through into 'r' case. */
3434 case 'r':
3435 this_alternative[i]
3436 = reg_class_subunion[this_alternative[i]][(int) GENERAL_REGS];
3437 goto reg;
3439 default:
3440 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS)
3442 #ifdef EXTRA_CONSTRAINT_STR
3443 if (EXTRA_MEMORY_CONSTRAINT (c, p))
3445 if (force_reload)
3446 break;
3447 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3448 win = 1;
3449 /* If the address was already reloaded,
3450 we win as well. */
3451 else if (MEM_P (operand)
3452 && address_reloaded[i] == 1)
3453 win = 1;
3454 /* Likewise if the address will be reloaded because
3455 reg_equiv_address is nonzero. For reg_equiv_mem
3456 we have to check. */
3457 else if (REG_P (operand)
3458 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3459 && reg_renumber[REGNO (operand)] < 0
3460 && ((reg_equiv_mem (REGNO (operand)) != 0
3461 && EXTRA_CONSTRAINT_STR (reg_equiv_mem (REGNO (operand)), c, p))
3462 || (reg_equiv_address (REGNO (operand)) != 0)))
3463 win = 1;
3465 /* If we didn't already win, we can reload
3466 constants via force_const_mem, and other
3467 MEMs by reloading the address like for 'o'. */
3468 if (CONST_POOL_OK_P (operand_mode[i], operand)
3469 || MEM_P (operand))
3470 badop = 0;
3471 constmemok = 1;
3472 offmemok = 1;
3473 break;
3475 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
3477 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3478 win = 1;
3480 /* If we didn't already win, we can reload
3481 the address into a base register. */
3482 this_alternative[i]
3483 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3484 ADDRESS, SCRATCH);
3485 badop = 0;
3486 break;
3489 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3490 win = 1;
3491 #endif
3492 break;
3495 this_alternative[i]
3496 = (reg_class_subunion
3497 [this_alternative[i]]
3498 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)]);
3499 reg:
3500 if (GET_MODE (operand) == BLKmode)
3501 break;
3502 winreg = 1;
3503 if (REG_P (operand)
3504 && reg_fits_class_p (operand, this_alternative[i],
3505 offset, GET_MODE (recog_data.operand[i])))
3506 win = 1;
3507 break;
3509 while ((p += len), c);
3511 if (swapped == (commutative >= 0 ? 1 : 0))
3512 constraints[i] = p;
3514 /* If this operand could be handled with a reg,
3515 and some reg is allowed, then this operand can be handled. */
3516 if (winreg && this_alternative[i] != NO_REGS
3517 && (win || !class_only_fixed_regs[this_alternative[i]]))
3518 badop = 0;
3520 /* Record which operands fit this alternative. */
3521 this_alternative_earlyclobber[i] = earlyclobber;
3522 if (win && ! force_reload)
3523 this_alternative_win[i] = 1;
3524 else if (did_match && ! force_reload)
3525 this_alternative_match_win[i] = 1;
3526 else
3528 int const_to_mem = 0;
3530 this_alternative_offmemok[i] = offmemok;
3531 losers++;
3532 if (badop)
3533 bad = 1;
3534 /* Alternative loses if it has no regs for a reg operand. */
3535 if (REG_P (operand)
3536 && this_alternative[i] == NO_REGS
3537 && this_alternative_matches[i] < 0)
3538 bad = 1;
3540 /* If this is a constant that is reloaded into the desired
3541 class by copying it to memory first, count that as another
3542 reload. This is consistent with other code and is
3543 required to avoid choosing another alternative when
3544 the constant is moved into memory by this function on
3545 an early reload pass. Note that the test here is
3546 precisely the same as in the code below that calls
3547 force_const_mem. */
3548 if (CONST_POOL_OK_P (operand_mode[i], operand)
3549 && ((targetm.preferred_reload_class (operand,
3550 this_alternative[i])
3551 == NO_REGS)
3552 || no_input_reloads))
3554 const_to_mem = 1;
3555 if (this_alternative[i] != NO_REGS)
3556 losers++;
3559 /* Alternative loses if it requires a type of reload not
3560 permitted for this insn. We can always reload SCRATCH
3561 and objects with a REG_UNUSED note. */
3562 if (GET_CODE (operand) != SCRATCH
3563 && modified[i] != RELOAD_READ && no_output_reloads
3564 && ! find_reg_note (insn, REG_UNUSED, operand))
3565 bad = 1;
3566 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3567 && ! const_to_mem)
3568 bad = 1;
3570 /* If we can't reload this value at all, reject this
3571 alternative. Note that we could also lose due to
3572 LIMIT_RELOAD_CLASS, but we don't check that
3573 here. */
3575 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3577 if (targetm.preferred_reload_class (operand,
3578 this_alternative[i])
3579 == NO_REGS)
3580 reject = 600;
3582 if (operand_type[i] == RELOAD_FOR_OUTPUT
3583 && (targetm.preferred_output_reload_class (operand,
3584 this_alternative[i])
3585 == NO_REGS))
3586 reject = 600;
3589 /* We prefer to reload pseudos over reloading other things,
3590 since such reloads may be able to be eliminated later.
3591 If we are reloading a SCRATCH, we won't be generating any
3592 insns, just using a register, so it is also preferred.
3593 So bump REJECT in other cases. Don't do this in the
3594 case where we are forcing a constant into memory and
3595 it will then win since we don't want to have a different
3596 alternative match then. */
3597 if (! (REG_P (operand)
3598 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3599 && GET_CODE (operand) != SCRATCH
3600 && ! (const_to_mem && constmemok))
3601 reject += 2;
3603 /* Input reloads can be inherited more often than output
3604 reloads can be removed, so penalize output reloads. */
3605 if (operand_type[i] != RELOAD_FOR_INPUT
3606 && GET_CODE (operand) != SCRATCH)
3607 reject++;
3610 /* If this operand is a pseudo register that didn't get
3611 a hard reg and this alternative accepts some
3612 register, see if the class that we want is a subset
3613 of the preferred class for this register. If not,
3614 but it intersects that class, use the preferred class
3615 instead. If it does not intersect the preferred
3616 class, show that usage of this alternative should be
3617 discouraged; it will be discouraged more still if the
3618 register is `preferred or nothing'. We do this
3619 because it increases the chance of reusing our spill
3620 register in a later insn and avoiding a pair of
3621 memory stores and loads.
3623 Don't bother with this if this alternative will
3624 accept this operand.
3626 Don't do this for a multiword operand, since it is
3627 only a small win and has the risk of requiring more
3628 spill registers, which could cause a large loss.
3630 Don't do this if the preferred class has only one
3631 register because we might otherwise exhaust the
3632 class. */
3634 if (! win && ! did_match
3635 && this_alternative[i] != NO_REGS
3636 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3637 && reg_class_size [(int) preferred_class[i]] > 0
3638 && ! small_register_class_p (preferred_class[i]))
3640 if (! reg_class_subset_p (this_alternative[i],
3641 preferred_class[i]))
3643 /* Since we don't have a way of forming the intersection,
3644 we just do something special if the preferred class
3645 is a subset of the class we have; that's the most
3646 common case anyway. */
3647 if (reg_class_subset_p (preferred_class[i],
3648 this_alternative[i]))
3649 this_alternative[i] = preferred_class[i];
3650 else
3651 reject += (2 + 2 * pref_or_nothing[i]);
3656 /* Now see if any output operands that are marked "earlyclobber"
3657 in this alternative conflict with any input operands
3658 or any memory addresses. */
3660 for (i = 0; i < noperands; i++)
3661 if (this_alternative_earlyclobber[i]
3662 && (this_alternative_win[i] || this_alternative_match_win[i]))
3664 struct decomposition early_data;
3666 early_data = decompose (recog_data.operand[i]);
3668 gcc_assert (modified[i] != RELOAD_READ);
3670 if (this_alternative[i] == NO_REGS)
3672 this_alternative_earlyclobber[i] = 0;
3673 gcc_assert (this_insn_is_asm);
3674 error_for_asm (this_insn,
3675 "%<&%> constraint used with no register class");
3678 for (j = 0; j < noperands; j++)
3679 /* Is this an input operand or a memory ref? */
3680 if ((MEM_P (recog_data.operand[j])
3681 || modified[j] != RELOAD_WRITE)
3682 && j != i
3683 /* Ignore things like match_operator operands. */
3684 && !recog_data.is_operator[j]
3685 /* Don't count an input operand that is constrained to match
3686 the early clobber operand. */
3687 && ! (this_alternative_matches[j] == i
3688 && rtx_equal_p (recog_data.operand[i],
3689 recog_data.operand[j]))
3690 /* Is it altered by storing the earlyclobber operand? */
3691 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3692 early_data))
3694 /* If the output is in a non-empty few-regs class,
3695 it's costly to reload it, so reload the input instead. */
3696 if (small_register_class_p (this_alternative[i])
3697 && (REG_P (recog_data.operand[j])
3698 || GET_CODE (recog_data.operand[j]) == SUBREG))
3700 losers++;
3701 this_alternative_win[j] = 0;
3702 this_alternative_match_win[j] = 0;
3704 else
3705 break;
3707 /* If an earlyclobber operand conflicts with something,
3708 it must be reloaded, so request this and count the cost. */
3709 if (j != noperands)
3711 losers++;
3712 this_alternative_win[i] = 0;
3713 this_alternative_match_win[j] = 0;
3714 for (j = 0; j < noperands; j++)
3715 if (this_alternative_matches[j] == i
3716 && this_alternative_match_win[j])
3718 this_alternative_win[j] = 0;
3719 this_alternative_match_win[j] = 0;
3720 losers++;
3725 /* If one alternative accepts all the operands, no reload required,
3726 choose that alternative; don't consider the remaining ones. */
3727 if (losers == 0)
3729 /* Unswap these so that they are never swapped at `finish'. */
3730 if (swapped)
3732 recog_data.operand[commutative] = substed_operand[commutative];
3733 recog_data.operand[commutative + 1]
3734 = substed_operand[commutative + 1];
3736 for (i = 0; i < noperands; i++)
3738 goal_alternative_win[i] = this_alternative_win[i];
3739 goal_alternative_match_win[i] = this_alternative_match_win[i];
3740 goal_alternative[i] = this_alternative[i];
3741 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3742 goal_alternative_matches[i] = this_alternative_matches[i];
3743 goal_alternative_earlyclobber[i]
3744 = this_alternative_earlyclobber[i];
3746 goal_alternative_number = this_alternative_number;
3747 goal_alternative_swapped = swapped;
3748 goal_earlyclobber = this_earlyclobber;
3749 goto finish;
3752 /* REJECT, set by the ! and ? constraint characters and when a register
3753 would be reloaded into a non-preferred class, discourages the use of
3754 this alternative for a reload goal. REJECT is incremented by six
3755 for each ? and two for each non-preferred class. */
3756 losers = losers * 6 + reject;
3758 /* If this alternative can be made to work by reloading,
3759 and it needs less reloading than the others checked so far,
3760 record it as the chosen goal for reloading. */
3761 if (! bad)
3763 if (best > losers)
3765 for (i = 0; i < noperands; i++)
3767 goal_alternative[i] = this_alternative[i];
3768 goal_alternative_win[i] = this_alternative_win[i];
3769 goal_alternative_match_win[i]
3770 = this_alternative_match_win[i];
3771 goal_alternative_offmemok[i]
3772 = this_alternative_offmemok[i];
3773 goal_alternative_matches[i] = this_alternative_matches[i];
3774 goal_alternative_earlyclobber[i]
3775 = this_alternative_earlyclobber[i];
3777 goal_alternative_swapped = swapped;
3778 best = losers;
3779 goal_alternative_number = this_alternative_number;
3780 goal_earlyclobber = this_earlyclobber;
3784 if (swapped)
3786 enum reg_class tclass;
3787 int t;
3789 /* If the commutative operands have been swapped, swap
3790 them back in order to check the next alternative. */
3791 recog_data.operand[commutative] = substed_operand[commutative];
3792 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3793 /* Unswap the duplicates too. */
3794 for (i = 0; i < recog_data.n_dups; i++)
3795 if (recog_data.dup_num[i] == commutative
3796 || recog_data.dup_num[i] == commutative + 1)
3797 *recog_data.dup_loc[i]
3798 = recog_data.operand[(int) recog_data.dup_num[i]];
3800 /* Unswap the operand related information as well. */
3801 tclass = preferred_class[commutative];
3802 preferred_class[commutative] = preferred_class[commutative + 1];
3803 preferred_class[commutative + 1] = tclass;
3805 t = pref_or_nothing[commutative];
3806 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3807 pref_or_nothing[commutative + 1] = t;
3809 t = address_reloaded[commutative];
3810 address_reloaded[commutative] = address_reloaded[commutative + 1];
3811 address_reloaded[commutative + 1] = t;
3816 /* The operands don't meet the constraints.
3817 goal_alternative describes the alternative
3818 that we could reach by reloading the fewest operands.
3819 Reload so as to fit it. */
3821 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3823 /* No alternative works with reloads?? */
3824 if (insn_code_number >= 0)
3825 fatal_insn ("unable to generate reloads for:", insn);
3826 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3827 /* Avoid further trouble with this insn. */
3828 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3829 n_reloads = 0;
3830 return 0;
3833 /* Jump to `finish' from above if all operands are valid already.
3834 In that case, goal_alternative_win is all 1. */
3835 finish:
3837 /* Right now, for any pair of operands I and J that are required to match,
3838 with I < J,
3839 goal_alternative_matches[J] is I.
3840 Set up goal_alternative_matched as the inverse function:
3841 goal_alternative_matched[I] = J. */
3843 for (i = 0; i < noperands; i++)
3844 goal_alternative_matched[i] = -1;
3846 for (i = 0; i < noperands; i++)
3847 if (! goal_alternative_win[i]
3848 && goal_alternative_matches[i] >= 0)
3849 goal_alternative_matched[goal_alternative_matches[i]] = i;
3851 for (i = 0; i < noperands; i++)
3852 goal_alternative_win[i] |= goal_alternative_match_win[i];
3854 /* If the best alternative is with operands 1 and 2 swapped,
3855 consider them swapped before reporting the reloads. Update the
3856 operand numbers of any reloads already pushed. */
3858 if (goal_alternative_swapped)
3860 rtx tem;
3862 tem = substed_operand[commutative];
3863 substed_operand[commutative] = substed_operand[commutative + 1];
3864 substed_operand[commutative + 1] = tem;
3865 tem = recog_data.operand[commutative];
3866 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3867 recog_data.operand[commutative + 1] = tem;
3868 tem = *recog_data.operand_loc[commutative];
3869 *recog_data.operand_loc[commutative]
3870 = *recog_data.operand_loc[commutative + 1];
3871 *recog_data.operand_loc[commutative + 1] = tem;
3873 for (i = 0; i < n_reloads; i++)
3875 if (rld[i].opnum == commutative)
3876 rld[i].opnum = commutative + 1;
3877 else if (rld[i].opnum == commutative + 1)
3878 rld[i].opnum = commutative;
3882 for (i = 0; i < noperands; i++)
3884 operand_reloadnum[i] = -1;
3886 /* If this is an earlyclobber operand, we need to widen the scope.
3887 The reload must remain valid from the start of the insn being
3888 reloaded until after the operand is stored into its destination.
3889 We approximate this with RELOAD_OTHER even though we know that we
3890 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3892 One special case that is worth checking is when we have an
3893 output that is earlyclobber but isn't used past the insn (typically
3894 a SCRATCH). In this case, we only need have the reload live
3895 through the insn itself, but not for any of our input or output
3896 reloads.
3897 But we must not accidentally narrow the scope of an existing
3898 RELOAD_OTHER reload - leave these alone.
3900 In any case, anything needed to address this operand can remain
3901 however they were previously categorized. */
3903 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3904 operand_type[i]
3905 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3906 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3909 /* Any constants that aren't allowed and can't be reloaded
3910 into registers are here changed into memory references. */
3911 for (i = 0; i < noperands; i++)
3912 if (! goal_alternative_win[i])
3914 rtx op = recog_data.operand[i];
3915 rtx subreg = NULL_RTX;
3916 rtx plus = NULL_RTX;
3917 enum machine_mode mode = operand_mode[i];
3919 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3920 push_reload so we have to let them pass here. */
3921 if (GET_CODE (op) == SUBREG)
3923 subreg = op;
3924 op = SUBREG_REG (op);
3925 mode = GET_MODE (op);
3928 if (GET_CODE (op) == PLUS)
3930 plus = op;
3931 op = XEXP (op, 1);
3934 if (CONST_POOL_OK_P (mode, op)
3935 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3936 == NO_REGS)
3937 || no_input_reloads))
3939 int this_address_reloaded;
3940 rtx tem = force_const_mem (mode, op);
3942 /* If we stripped a SUBREG or a PLUS above add it back. */
3943 if (plus != NULL_RTX)
3944 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3946 if (subreg != NULL_RTX)
3947 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3949 this_address_reloaded = 0;
3950 substed_operand[i] = recog_data.operand[i]
3951 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3952 0, insn, &this_address_reloaded);
3954 /* If the alternative accepts constant pool refs directly
3955 there will be no reload needed at all. */
3956 if (plus == NULL_RTX
3957 && subreg == NULL_RTX
3958 && alternative_allows_const_pool_ref (this_address_reloaded == 0
3959 ? substed_operand[i]
3960 : NULL,
3961 recog_data.constraints[i],
3962 goal_alternative_number))
3963 goal_alternative_win[i] = 1;
3967 /* Record the values of the earlyclobber operands for the caller. */
3968 if (goal_earlyclobber)
3969 for (i = 0; i < noperands; i++)
3970 if (goal_alternative_earlyclobber[i])
3971 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3973 /* Now record reloads for all the operands that need them. */
3974 for (i = 0; i < noperands; i++)
3975 if (! goal_alternative_win[i])
3977 /* Operands that match previous ones have already been handled. */
3978 if (goal_alternative_matches[i] >= 0)
3980 /* Handle an operand with a nonoffsettable address
3981 appearing where an offsettable address will do
3982 by reloading the address into a base register.
3984 ??? We can also do this when the operand is a register and
3985 reg_equiv_mem is not offsettable, but this is a bit tricky,
3986 so we don't bother with it. It may not be worth doing. */
3987 else if (goal_alternative_matched[i] == -1
3988 && goal_alternative_offmemok[i]
3989 && MEM_P (recog_data.operand[i]))
3991 /* If the address to be reloaded is a VOIDmode constant,
3992 use the default address mode as mode of the reload register,
3993 as would have been done by find_reloads_address. */
3994 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
3995 enum machine_mode address_mode;
3996 address_mode = GET_MODE (XEXP (recog_data.operand[i], 0));
3997 if (address_mode == VOIDmode)
3998 address_mode = targetm.addr_space.address_mode (as);
4000 operand_reloadnum[i]
4001 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
4002 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
4003 base_reg_class (VOIDmode, as, MEM, SCRATCH),
4004 address_mode,
4005 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
4006 rld[operand_reloadnum[i]].inc
4007 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4009 /* If this operand is an output, we will have made any
4010 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4011 now we are treating part of the operand as an input, so
4012 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
4014 if (modified[i] == RELOAD_WRITE)
4016 for (j = 0; j < n_reloads; j++)
4018 if (rld[j].opnum == i)
4020 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4021 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4022 else if (rld[j].when_needed
4023 == RELOAD_FOR_OUTADDR_ADDRESS)
4024 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4029 else if (goal_alternative_matched[i] == -1)
4031 operand_reloadnum[i]
4032 = push_reload ((modified[i] != RELOAD_WRITE
4033 ? recog_data.operand[i] : 0),
4034 (modified[i] != RELOAD_READ
4035 ? recog_data.operand[i] : 0),
4036 (modified[i] != RELOAD_WRITE
4037 ? recog_data.operand_loc[i] : 0),
4038 (modified[i] != RELOAD_READ
4039 ? recog_data.operand_loc[i] : 0),
4040 (enum reg_class) goal_alternative[i],
4041 (modified[i] == RELOAD_WRITE
4042 ? VOIDmode : operand_mode[i]),
4043 (modified[i] == RELOAD_READ
4044 ? VOIDmode : operand_mode[i]),
4045 (insn_code_number < 0 ? 0
4046 : insn_data[insn_code_number].operand[i].strict_low),
4047 0, i, operand_type[i]);
4049 /* In a matching pair of operands, one must be input only
4050 and the other must be output only.
4051 Pass the input operand as IN and the other as OUT. */
4052 else if (modified[i] == RELOAD_READ
4053 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4055 operand_reloadnum[i]
4056 = push_reload (recog_data.operand[i],
4057 recog_data.operand[goal_alternative_matched[i]],
4058 recog_data.operand_loc[i],
4059 recog_data.operand_loc[goal_alternative_matched[i]],
4060 (enum reg_class) goal_alternative[i],
4061 operand_mode[i],
4062 operand_mode[goal_alternative_matched[i]],
4063 0, 0, i, RELOAD_OTHER);
4064 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4066 else if (modified[i] == RELOAD_WRITE
4067 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4069 operand_reloadnum[goal_alternative_matched[i]]
4070 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4071 recog_data.operand[i],
4072 recog_data.operand_loc[goal_alternative_matched[i]],
4073 recog_data.operand_loc[i],
4074 (enum reg_class) goal_alternative[i],
4075 operand_mode[goal_alternative_matched[i]],
4076 operand_mode[i],
4077 0, 0, i, RELOAD_OTHER);
4078 operand_reloadnum[i] = output_reloadnum;
4080 else
4082 gcc_assert (insn_code_number < 0);
4083 error_for_asm (insn, "inconsistent operand constraints "
4084 "in an %<asm%>");
4085 /* Avoid further trouble with this insn. */
4086 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4087 n_reloads = 0;
4088 return 0;
4091 else if (goal_alternative_matched[i] < 0
4092 && goal_alternative_matches[i] < 0
4093 && address_operand_reloaded[i] != 1
4094 && optimize)
4096 /* For each non-matching operand that's a MEM or a pseudo-register
4097 that didn't get a hard register, make an optional reload.
4098 This may get done even if the insn needs no reloads otherwise. */
4100 rtx operand = recog_data.operand[i];
4102 while (GET_CODE (operand) == SUBREG)
4103 operand = SUBREG_REG (operand);
4104 if ((MEM_P (operand)
4105 || (REG_P (operand)
4106 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4107 /* If this is only for an output, the optional reload would not
4108 actually cause us to use a register now, just note that
4109 something is stored here. */
4110 && (goal_alternative[i] != NO_REGS
4111 || modified[i] == RELOAD_WRITE)
4112 && ! no_input_reloads
4113 /* An optional output reload might allow to delete INSN later.
4114 We mustn't make in-out reloads on insns that are not permitted
4115 output reloads.
4116 If this is an asm, we can't delete it; we must not even call
4117 push_reload for an optional output reload in this case,
4118 because we can't be sure that the constraint allows a register,
4119 and push_reload verifies the constraints for asms. */
4120 && (modified[i] == RELOAD_READ
4121 || (! no_output_reloads && ! this_insn_is_asm)))
4122 operand_reloadnum[i]
4123 = push_reload ((modified[i] != RELOAD_WRITE
4124 ? recog_data.operand[i] : 0),
4125 (modified[i] != RELOAD_READ
4126 ? recog_data.operand[i] : 0),
4127 (modified[i] != RELOAD_WRITE
4128 ? recog_data.operand_loc[i] : 0),
4129 (modified[i] != RELOAD_READ
4130 ? recog_data.operand_loc[i] : 0),
4131 (enum reg_class) goal_alternative[i],
4132 (modified[i] == RELOAD_WRITE
4133 ? VOIDmode : operand_mode[i]),
4134 (modified[i] == RELOAD_READ
4135 ? VOIDmode : operand_mode[i]),
4136 (insn_code_number < 0 ? 0
4137 : insn_data[insn_code_number].operand[i].strict_low),
4138 1, i, operand_type[i]);
4139 /* If a memory reference remains (either as a MEM or a pseudo that
4140 did not get a hard register), yet we can't make an optional
4141 reload, check if this is actually a pseudo register reference;
4142 we then need to emit a USE and/or a CLOBBER so that reload
4143 inheritance will do the right thing. */
4144 else if (replace
4145 && (MEM_P (operand)
4146 || (REG_P (operand)
4147 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4148 && reg_renumber [REGNO (operand)] < 0)))
4150 operand = *recog_data.operand_loc[i];
4152 while (GET_CODE (operand) == SUBREG)
4153 operand = SUBREG_REG (operand);
4154 if (REG_P (operand))
4156 if (modified[i] != RELOAD_WRITE)
4157 /* We mark the USE with QImode so that we recognize
4158 it as one that can be safely deleted at the end
4159 of reload. */
4160 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4161 insn), QImode);
4162 if (modified[i] != RELOAD_READ)
4163 emit_insn_after (gen_clobber (operand), insn);
4167 else if (goal_alternative_matches[i] >= 0
4168 && goal_alternative_win[goal_alternative_matches[i]]
4169 && modified[i] == RELOAD_READ
4170 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4171 && ! no_input_reloads && ! no_output_reloads
4172 && optimize)
4174 /* Similarly, make an optional reload for a pair of matching
4175 objects that are in MEM or a pseudo that didn't get a hard reg. */
4177 rtx operand = recog_data.operand[i];
4179 while (GET_CODE (operand) == SUBREG)
4180 operand = SUBREG_REG (operand);
4181 if ((MEM_P (operand)
4182 || (REG_P (operand)
4183 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4184 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4185 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4186 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4187 recog_data.operand[i],
4188 recog_data.operand_loc[goal_alternative_matches[i]],
4189 recog_data.operand_loc[i],
4190 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4191 operand_mode[goal_alternative_matches[i]],
4192 operand_mode[i],
4193 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4196 /* Perform whatever substitutions on the operands we are supposed
4197 to make due to commutativity or replacement of registers
4198 with equivalent constants or memory slots. */
4200 for (i = 0; i < noperands; i++)
4202 /* We only do this on the last pass through reload, because it is
4203 possible for some data (like reg_equiv_address) to be changed during
4204 later passes. Moreover, we lose the opportunity to get a useful
4205 reload_{in,out}_reg when we do these replacements. */
4207 if (replace)
4209 rtx substitution = substed_operand[i];
4211 *recog_data.operand_loc[i] = substitution;
4213 /* If we're replacing an operand with a LABEL_REF, we need to
4214 make sure that there's a REG_LABEL_OPERAND note attached to
4215 this instruction. */
4216 if (GET_CODE (substitution) == LABEL_REF
4217 && !find_reg_note (insn, REG_LABEL_OPERAND,
4218 XEXP (substitution, 0))
4219 /* For a JUMP_P, if it was a branch target it must have
4220 already been recorded as such. */
4221 && (!JUMP_P (insn)
4222 || !label_is_jump_target_p (XEXP (substitution, 0),
4223 insn)))
4225 add_reg_note (insn, REG_LABEL_OPERAND, XEXP (substitution, 0));
4226 if (LABEL_P (XEXP (substitution, 0)))
4227 ++LABEL_NUSES (XEXP (substitution, 0));
4231 else
4232 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4235 /* If this insn pattern contains any MATCH_DUP's, make sure that
4236 they will be substituted if the operands they match are substituted.
4237 Also do now any substitutions we already did on the operands.
4239 Don't do this if we aren't making replacements because we might be
4240 propagating things allocated by frame pointer elimination into places
4241 it doesn't expect. */
4243 if (insn_code_number >= 0 && replace)
4244 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4246 int opno = recog_data.dup_num[i];
4247 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4248 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4251 #if 0
4252 /* This loses because reloading of prior insns can invalidate the equivalence
4253 (or at least find_equiv_reg isn't smart enough to find it any more),
4254 causing this insn to need more reload regs than it needed before.
4255 It may be too late to make the reload regs available.
4256 Now this optimization is done safely in choose_reload_regs. */
4258 /* For each reload of a reg into some other class of reg,
4259 search for an existing equivalent reg (same value now) in the right class.
4260 We can use it as long as we don't need to change its contents. */
4261 for (i = 0; i < n_reloads; i++)
4262 if (rld[i].reg_rtx == 0
4263 && rld[i].in != 0
4264 && REG_P (rld[i].in)
4265 && rld[i].out == 0)
4267 rld[i].reg_rtx
4268 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4269 static_reload_reg_p, 0, rld[i].inmode);
4270 /* Prevent generation of insn to load the value
4271 because the one we found already has the value. */
4272 if (rld[i].reg_rtx)
4273 rld[i].in = rld[i].reg_rtx;
4275 #endif
4277 /* If we detected error and replaced asm instruction by USE, forget about the
4278 reloads. */
4279 if (GET_CODE (PATTERN (insn)) == USE
4280 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4281 n_reloads = 0;
4283 /* Perhaps an output reload can be combined with another
4284 to reduce needs by one. */
4285 if (!goal_earlyclobber)
4286 combine_reloads ();
4288 /* If we have a pair of reloads for parts of an address, they are reloading
4289 the same object, the operands themselves were not reloaded, and they
4290 are for two operands that are supposed to match, merge the reloads and
4291 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4293 for (i = 0; i < n_reloads; i++)
4295 int k;
4297 for (j = i + 1; j < n_reloads; j++)
4298 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4299 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4300 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4301 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4302 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4303 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4304 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4305 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4306 && rtx_equal_p (rld[i].in, rld[j].in)
4307 && (operand_reloadnum[rld[i].opnum] < 0
4308 || rld[operand_reloadnum[rld[i].opnum]].optional)
4309 && (operand_reloadnum[rld[j].opnum] < 0
4310 || rld[operand_reloadnum[rld[j].opnum]].optional)
4311 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4312 || (goal_alternative_matches[rld[j].opnum]
4313 == rld[i].opnum)))
4315 for (k = 0; k < n_replacements; k++)
4316 if (replacements[k].what == j)
4317 replacements[k].what = i;
4319 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4320 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4321 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4322 else
4323 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4324 rld[j].in = 0;
4328 /* Scan all the reloads and update their type.
4329 If a reload is for the address of an operand and we didn't reload
4330 that operand, change the type. Similarly, change the operand number
4331 of a reload when two operands match. If a reload is optional, treat it
4332 as though the operand isn't reloaded.
4334 ??? This latter case is somewhat odd because if we do the optional
4335 reload, it means the object is hanging around. Thus we need only
4336 do the address reload if the optional reload was NOT done.
4338 Change secondary reloads to be the address type of their operand, not
4339 the normal type.
4341 If an operand's reload is now RELOAD_OTHER, change any
4342 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4343 RELOAD_FOR_OTHER_ADDRESS. */
4345 for (i = 0; i < n_reloads; i++)
4347 if (rld[i].secondary_p
4348 && rld[i].when_needed == operand_type[rld[i].opnum])
4349 rld[i].when_needed = address_type[rld[i].opnum];
4351 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4352 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4353 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4354 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4355 && (operand_reloadnum[rld[i].opnum] < 0
4356 || rld[operand_reloadnum[rld[i].opnum]].optional))
4358 /* If we have a secondary reload to go along with this reload,
4359 change its type to RELOAD_FOR_OPADDR_ADDR. */
4361 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4362 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4363 && rld[i].secondary_in_reload != -1)
4365 int secondary_in_reload = rld[i].secondary_in_reload;
4367 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4369 /* If there's a tertiary reload we have to change it also. */
4370 if (secondary_in_reload > 0
4371 && rld[secondary_in_reload].secondary_in_reload != -1)
4372 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4373 = RELOAD_FOR_OPADDR_ADDR;
4376 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4377 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4378 && rld[i].secondary_out_reload != -1)
4380 int secondary_out_reload = rld[i].secondary_out_reload;
4382 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4384 /* If there's a tertiary reload we have to change it also. */
4385 if (secondary_out_reload
4386 && rld[secondary_out_reload].secondary_out_reload != -1)
4387 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4388 = RELOAD_FOR_OPADDR_ADDR;
4391 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4392 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4393 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4394 else
4395 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4398 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4399 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4400 && operand_reloadnum[rld[i].opnum] >= 0
4401 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4402 == RELOAD_OTHER))
4403 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4405 if (goal_alternative_matches[rld[i].opnum] >= 0)
4406 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4409 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4410 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4411 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4413 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4414 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4415 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4416 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4417 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4418 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4419 This is complicated by the fact that a single operand can have more
4420 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4421 choose_reload_regs without affecting code quality, and cases that
4422 actually fail are extremely rare, so it turns out to be better to fix
4423 the problem here by not generating cases that choose_reload_regs will
4424 fail for. */
4425 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4426 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4427 a single operand.
4428 We can reduce the register pressure by exploiting that a
4429 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4430 does not conflict with any of them, if it is only used for the first of
4431 the RELOAD_FOR_X_ADDRESS reloads. */
4433 int first_op_addr_num = -2;
4434 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4435 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4436 int need_change = 0;
4437 /* We use last_op_addr_reload and the contents of the above arrays
4438 first as flags - -2 means no instance encountered, -1 means exactly
4439 one instance encountered.
4440 If more than one instance has been encountered, we store the reload
4441 number of the first reload of the kind in question; reload numbers
4442 are known to be non-negative. */
4443 for (i = 0; i < noperands; i++)
4444 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4445 for (i = n_reloads - 1; i >= 0; i--)
4447 switch (rld[i].when_needed)
4449 case RELOAD_FOR_OPERAND_ADDRESS:
4450 if (++first_op_addr_num >= 0)
4452 first_op_addr_num = i;
4453 need_change = 1;
4455 break;
4456 case RELOAD_FOR_INPUT_ADDRESS:
4457 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4459 first_inpaddr_num[rld[i].opnum] = i;
4460 need_change = 1;
4462 break;
4463 case RELOAD_FOR_OUTPUT_ADDRESS:
4464 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4466 first_outpaddr_num[rld[i].opnum] = i;
4467 need_change = 1;
4469 break;
4470 default:
4471 break;
4475 if (need_change)
4477 for (i = 0; i < n_reloads; i++)
4479 int first_num;
4480 enum reload_type type;
4482 switch (rld[i].when_needed)
4484 case RELOAD_FOR_OPADDR_ADDR:
4485 first_num = first_op_addr_num;
4486 type = RELOAD_FOR_OPERAND_ADDRESS;
4487 break;
4488 case RELOAD_FOR_INPADDR_ADDRESS:
4489 first_num = first_inpaddr_num[rld[i].opnum];
4490 type = RELOAD_FOR_INPUT_ADDRESS;
4491 break;
4492 case RELOAD_FOR_OUTADDR_ADDRESS:
4493 first_num = first_outpaddr_num[rld[i].opnum];
4494 type = RELOAD_FOR_OUTPUT_ADDRESS;
4495 break;
4496 default:
4497 continue;
4499 if (first_num < 0)
4500 continue;
4501 else if (i > first_num)
4502 rld[i].when_needed = type;
4503 else
4505 /* Check if the only TYPE reload that uses reload I is
4506 reload FIRST_NUM. */
4507 for (j = n_reloads - 1; j > first_num; j--)
4509 if (rld[j].when_needed == type
4510 && (rld[i].secondary_p
4511 ? rld[j].secondary_in_reload == i
4512 : reg_mentioned_p (rld[i].in, rld[j].in)))
4514 rld[i].when_needed = type;
4515 break;
4523 /* See if we have any reloads that are now allowed to be merged
4524 because we've changed when the reload is needed to
4525 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4526 check for the most common cases. */
4528 for (i = 0; i < n_reloads; i++)
4529 if (rld[i].in != 0 && rld[i].out == 0
4530 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4531 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4532 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4533 for (j = 0; j < n_reloads; j++)
4534 if (i != j && rld[j].in != 0 && rld[j].out == 0
4535 && rld[j].when_needed == rld[i].when_needed
4536 && MATCHES (rld[i].in, rld[j].in)
4537 && rld[i].rclass == rld[j].rclass
4538 && !rld[i].nocombine && !rld[j].nocombine
4539 && rld[i].reg_rtx == rld[j].reg_rtx)
4541 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4542 transfer_replacements (i, j);
4543 rld[j].in = 0;
4546 #ifdef HAVE_cc0
4547 /* If we made any reloads for addresses, see if they violate a
4548 "no input reloads" requirement for this insn. But loads that we
4549 do after the insn (such as for output addresses) are fine. */
4550 if (no_input_reloads)
4551 for (i = 0; i < n_reloads; i++)
4552 gcc_assert (rld[i].in == 0
4553 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4554 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4555 #endif
4557 /* Compute reload_mode and reload_nregs. */
4558 for (i = 0; i < n_reloads; i++)
4560 rld[i].mode
4561 = (rld[i].inmode == VOIDmode
4562 || (GET_MODE_SIZE (rld[i].outmode)
4563 > GET_MODE_SIZE (rld[i].inmode)))
4564 ? rld[i].outmode : rld[i].inmode;
4566 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4569 /* Special case a simple move with an input reload and a
4570 destination of a hard reg, if the hard reg is ok, use it. */
4571 for (i = 0; i < n_reloads; i++)
4572 if (rld[i].when_needed == RELOAD_FOR_INPUT
4573 && GET_CODE (PATTERN (insn)) == SET
4574 && REG_P (SET_DEST (PATTERN (insn)))
4575 && (SET_SRC (PATTERN (insn)) == rld[i].in
4576 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4577 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4579 rtx dest = SET_DEST (PATTERN (insn));
4580 unsigned int regno = REGNO (dest);
4582 if (regno < FIRST_PSEUDO_REGISTER
4583 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4584 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4586 int nr = hard_regno_nregs[regno][rld[i].mode];
4587 int ok = 1, nri;
4589 for (nri = 1; nri < nr; nri ++)
4590 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4591 ok = 0;
4593 if (ok)
4594 rld[i].reg_rtx = dest;
4598 return retval;
4601 /* Return true if alternative number ALTNUM in constraint-string
4602 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4603 MEM gives the reference if it didn't need any reloads, otherwise it
4604 is null. */
4606 static bool
4607 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4608 const char *constraint, int altnum)
4610 int c;
4612 /* Skip alternatives before the one requested. */
4613 while (altnum > 0)
4615 while (*constraint++ != ',')
4617 altnum--;
4619 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4620 If one of them is present, this alternative accepts the result of
4621 passing a constant-pool reference through find_reloads_toplev.
4623 The same is true of extra memory constraints if the address
4624 was reloaded into a register. However, the target may elect
4625 to disallow the original constant address, forcing it to be
4626 reloaded into a register instead. */
4627 for (; (c = *constraint) && c != ',' && c != '#';
4628 constraint += CONSTRAINT_LEN (c, constraint))
4630 if (c == TARGET_MEM_CONSTRAINT || c == 'o')
4631 return true;
4632 #ifdef EXTRA_CONSTRAINT_STR
4633 if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
4634 && (mem == NULL || EXTRA_CONSTRAINT_STR (mem, c, constraint)))
4635 return true;
4636 #endif
4638 return false;
4641 /* Scan X for memory references and scan the addresses for reloading.
4642 Also checks for references to "constant" regs that we want to eliminate
4643 and replaces them with the values they stand for.
4644 We may alter X destructively if it contains a reference to such.
4645 If X is just a constant reg, we return the equivalent value
4646 instead of X.
4648 IND_LEVELS says how many levels of indirect addressing this machine
4649 supports.
4651 OPNUM and TYPE identify the purpose of the reload.
4653 IS_SET_DEST is true if X is the destination of a SET, which is not
4654 appropriate to be replaced by a constant.
4656 INSN, if nonzero, is the insn in which we do the reload. It is used
4657 to determine if we may generate output reloads, and where to put USEs
4658 for pseudos that we have to replace with stack slots.
4660 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4661 result of find_reloads_address. */
4663 static rtx
4664 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4665 int ind_levels, int is_set_dest, rtx insn,
4666 int *address_reloaded)
4668 RTX_CODE code = GET_CODE (x);
4670 const char *fmt = GET_RTX_FORMAT (code);
4671 int i;
4672 int copied;
4674 if (code == REG)
4676 /* This code is duplicated for speed in find_reloads. */
4677 int regno = REGNO (x);
4678 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4679 x = reg_equiv_constant (regno);
4680 #if 0
4681 /* This creates (subreg (mem...)) which would cause an unnecessary
4682 reload of the mem. */
4683 else if (reg_equiv_mem (regno) != 0)
4684 x = reg_equiv_mem (regno);
4685 #endif
4686 else if (reg_equiv_memory_loc (regno)
4687 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4689 rtx mem = make_memloc (x, regno);
4690 if (reg_equiv_address (regno)
4691 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4693 /* If this is not a toplevel operand, find_reloads doesn't see
4694 this substitution. We have to emit a USE of the pseudo so
4695 that delete_output_reload can see it. */
4696 if (replace_reloads && recog_data.operand[opnum] != x)
4697 /* We mark the USE with QImode so that we recognize it
4698 as one that can be safely deleted at the end of
4699 reload. */
4700 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4701 QImode);
4702 x = mem;
4703 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4704 opnum, type, ind_levels, insn);
4705 if (!rtx_equal_p (x, mem))
4706 push_reg_equiv_alt_mem (regno, x);
4707 if (address_reloaded)
4708 *address_reloaded = i;
4711 return x;
4713 if (code == MEM)
4715 rtx tem = x;
4717 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4718 opnum, type, ind_levels, insn);
4719 if (address_reloaded)
4720 *address_reloaded = i;
4722 return tem;
4725 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4727 /* Check for SUBREG containing a REG that's equivalent to a
4728 constant. If the constant has a known value, truncate it
4729 right now. Similarly if we are extracting a single-word of a
4730 multi-word constant. If the constant is symbolic, allow it
4731 to be substituted normally. push_reload will strip the
4732 subreg later. The constant must not be VOIDmode, because we
4733 will lose the mode of the register (this should never happen
4734 because one of the cases above should handle it). */
4736 int regno = REGNO (SUBREG_REG (x));
4737 rtx tem;
4739 if (regno >= FIRST_PSEUDO_REGISTER
4740 && reg_renumber[regno] < 0
4741 && reg_equiv_constant (regno) != 0)
4743 tem =
4744 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4745 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4746 gcc_assert (tem);
4747 if (CONSTANT_P (tem)
4748 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4750 tem = force_const_mem (GET_MODE (x), tem);
4751 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4752 &XEXP (tem, 0), opnum, type,
4753 ind_levels, insn);
4754 if (address_reloaded)
4755 *address_reloaded = i;
4757 return tem;
4760 /* If the subreg contains a reg that will be converted to a mem,
4761 convert the subreg to a narrower memref now.
4762 Otherwise, we would get (subreg (mem ...) ...),
4763 which would force reload of the mem.
4765 We also need to do this if there is an equivalent MEM that is
4766 not offsettable. In that case, alter_subreg would produce an
4767 invalid address on big-endian machines.
4769 For machines that extend byte loads, we must not reload using
4770 a wider mode if we have a paradoxical SUBREG. find_reloads will
4771 force a reload in that case. So we should not do anything here. */
4773 if (regno >= FIRST_PSEUDO_REGISTER
4774 #ifdef LOAD_EXTEND_OP
4775 && !paradoxical_subreg_p (x)
4776 #endif
4777 && (reg_equiv_address (regno) != 0
4778 || (reg_equiv_mem (regno) != 0
4779 && (! strict_memory_address_addr_space_p
4780 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
4781 MEM_ADDR_SPACE (reg_equiv_mem (regno)))
4782 || ! offsettable_memref_p (reg_equiv_mem (regno))
4783 || num_not_at_initial_offset))))
4784 x = find_reloads_subreg_address (x, 1, opnum, type, ind_levels,
4785 insn, address_reloaded);
4788 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4790 if (fmt[i] == 'e')
4792 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4793 ind_levels, is_set_dest, insn,
4794 address_reloaded);
4795 /* If we have replaced a reg with it's equivalent memory loc -
4796 that can still be handled here e.g. if it's in a paradoxical
4797 subreg - we must make the change in a copy, rather than using
4798 a destructive change. This way, find_reloads can still elect
4799 not to do the change. */
4800 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4802 x = shallow_copy_rtx (x);
4803 copied = 1;
4805 XEXP (x, i) = new_part;
4808 return x;
4811 /* Return a mem ref for the memory equivalent of reg REGNO.
4812 This mem ref is not shared with anything. */
4814 static rtx
4815 make_memloc (rtx ad, int regno)
4817 /* We must rerun eliminate_regs, in case the elimination
4818 offsets have changed. */
4819 rtx tem
4820 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4823 /* If TEM might contain a pseudo, we must copy it to avoid
4824 modifying it when we do the substitution for the reload. */
4825 if (rtx_varies_p (tem, 0))
4826 tem = copy_rtx (tem);
4828 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4829 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4831 /* Copy the result if it's still the same as the equivalence, to avoid
4832 modifying it when we do the substitution for the reload. */
4833 if (tem == reg_equiv_memory_loc (regno))
4834 tem = copy_rtx (tem);
4835 return tem;
4838 /* Returns true if AD could be turned into a valid memory reference
4839 to mode MODE in address space AS by reloading the part pointed to
4840 by PART into a register. */
4842 static int
4843 maybe_memory_address_addr_space_p (enum machine_mode mode, rtx ad,
4844 addr_space_t as, rtx *part)
4846 int retv;
4847 rtx tem = *part;
4848 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4850 *part = reg;
4851 retv = memory_address_addr_space_p (mode, ad, as);
4852 *part = tem;
4854 return retv;
4857 /* Record all reloads needed for handling memory address AD
4858 which appears in *LOC in a memory reference to mode MODE
4859 which itself is found in location *MEMREFLOC.
4860 Note that we take shortcuts assuming that no multi-reg machine mode
4861 occurs as part of an address.
4863 OPNUM and TYPE specify the purpose of this reload.
4865 IND_LEVELS says how many levels of indirect addressing this machine
4866 supports.
4868 INSN, if nonzero, is the insn in which we do the reload. It is used
4869 to determine if we may generate output reloads, and where to put USEs
4870 for pseudos that we have to replace with stack slots.
4872 Value is one if this address is reloaded or replaced as a whole; it is
4873 zero if the top level of this address was not reloaded or replaced, and
4874 it is -1 if it may or may not have been reloaded or replaced.
4876 Note that there is no verification that the address will be valid after
4877 this routine does its work. Instead, we rely on the fact that the address
4878 was valid when reload started. So we need only undo things that reload
4879 could have broken. These are wrong register types, pseudos not allocated
4880 to a hard register, and frame pointer elimination. */
4882 static int
4883 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
4884 rtx *loc, int opnum, enum reload_type type,
4885 int ind_levels, rtx insn)
4887 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4888 : ADDR_SPACE_GENERIC;
4889 int regno;
4890 int removed_and = 0;
4891 int op_index;
4892 rtx tem;
4894 /* If the address is a register, see if it is a legitimate address and
4895 reload if not. We first handle the cases where we need not reload
4896 or where we must reload in a non-standard way. */
4898 if (REG_P (ad))
4900 regno = REGNO (ad);
4902 if (reg_equiv_constant (regno) != 0)
4904 find_reloads_address_part (reg_equiv_constant (regno), loc,
4905 base_reg_class (mode, as, MEM, SCRATCH),
4906 GET_MODE (ad), opnum, type, ind_levels);
4907 return 1;
4910 tem = reg_equiv_memory_loc (regno);
4911 if (tem != 0)
4913 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4915 tem = make_memloc (ad, regno);
4916 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4917 XEXP (tem, 0),
4918 MEM_ADDR_SPACE (tem)))
4920 rtx orig = tem;
4922 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4923 &XEXP (tem, 0), opnum,
4924 ADDR_TYPE (type), ind_levels, insn);
4925 if (!rtx_equal_p (tem, orig))
4926 push_reg_equiv_alt_mem (regno, tem);
4928 /* We can avoid a reload if the register's equivalent memory
4929 expression is valid as an indirect memory address.
4930 But not all addresses are valid in a mem used as an indirect
4931 address: only reg or reg+constant. */
4933 if (ind_levels > 0
4934 && strict_memory_address_addr_space_p (mode, tem, as)
4935 && (REG_P (XEXP (tem, 0))
4936 || (GET_CODE (XEXP (tem, 0)) == PLUS
4937 && REG_P (XEXP (XEXP (tem, 0), 0))
4938 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4940 /* TEM is not the same as what we'll be replacing the
4941 pseudo with after reload, put a USE in front of INSN
4942 in the final reload pass. */
4943 if (replace_reloads
4944 && num_not_at_initial_offset
4945 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4947 *loc = tem;
4948 /* We mark the USE with QImode so that we
4949 recognize it as one that can be safely
4950 deleted at the end of reload. */
4951 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4952 insn), QImode);
4954 /* This doesn't really count as replacing the address
4955 as a whole, since it is still a memory access. */
4957 return 0;
4959 ad = tem;
4963 /* The only remaining case where we can avoid a reload is if this is a
4964 hard register that is valid as a base register and which is not the
4965 subject of a CLOBBER in this insn. */
4967 else if (regno < FIRST_PSEUDO_REGISTER
4968 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4969 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4970 return 0;
4972 /* If we do not have one of the cases above, we must do the reload. */
4973 push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4974 base_reg_class (mode, as, MEM, SCRATCH),
4975 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4976 return 1;
4979 if (strict_memory_address_addr_space_p (mode, ad, as))
4981 /* The address appears valid, so reloads are not needed.
4982 But the address may contain an eliminable register.
4983 This can happen because a machine with indirect addressing
4984 may consider a pseudo register by itself a valid address even when
4985 it has failed to get a hard reg.
4986 So do a tree-walk to find and eliminate all such regs. */
4988 /* But first quickly dispose of a common case. */
4989 if (GET_CODE (ad) == PLUS
4990 && CONST_INT_P (XEXP (ad, 1))
4991 && REG_P (XEXP (ad, 0))
4992 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4993 return 0;
4995 subst_reg_equivs_changed = 0;
4996 *loc = subst_reg_equivs (ad, insn);
4998 if (! subst_reg_equivs_changed)
4999 return 0;
5001 /* Check result for validity after substitution. */
5002 if (strict_memory_address_addr_space_p (mode, ad, as))
5003 return 0;
5006 #ifdef LEGITIMIZE_RELOAD_ADDRESS
5009 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
5011 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5012 ind_levels, win);
5014 break;
5015 win:
5016 *memrefloc = copy_rtx (*memrefloc);
5017 XEXP (*memrefloc, 0) = ad;
5018 move_replacements (&ad, &XEXP (*memrefloc, 0));
5019 return -1;
5021 while (0);
5022 #endif
5024 /* The address is not valid. We have to figure out why. First see if
5025 we have an outer AND and remove it if so. Then analyze what's inside. */
5027 if (GET_CODE (ad) == AND)
5029 removed_and = 1;
5030 loc = &XEXP (ad, 0);
5031 ad = *loc;
5034 /* One possibility for why the address is invalid is that it is itself
5035 a MEM. This can happen when the frame pointer is being eliminated, a
5036 pseudo is not allocated to a hard register, and the offset between the
5037 frame and stack pointers is not its initial value. In that case the
5038 pseudo will have been replaced by a MEM referring to the
5039 stack pointer. */
5040 if (MEM_P (ad))
5042 /* First ensure that the address in this MEM is valid. Then, unless
5043 indirect addresses are valid, reload the MEM into a register. */
5044 tem = ad;
5045 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5046 opnum, ADDR_TYPE (type),
5047 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5049 /* If tem was changed, then we must create a new memory reference to
5050 hold it and store it back into memrefloc. */
5051 if (tem != ad && memrefloc)
5053 *memrefloc = copy_rtx (*memrefloc);
5054 copy_replacements (tem, XEXP (*memrefloc, 0));
5055 loc = &XEXP (*memrefloc, 0);
5056 if (removed_and)
5057 loc = &XEXP (*loc, 0);
5060 /* Check similar cases as for indirect addresses as above except
5061 that we can allow pseudos and a MEM since they should have been
5062 taken care of above. */
5064 if (ind_levels == 0
5065 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5066 || MEM_P (XEXP (tem, 0))
5067 || ! (REG_P (XEXP (tem, 0))
5068 || (GET_CODE (XEXP (tem, 0)) == PLUS
5069 && REG_P (XEXP (XEXP (tem, 0), 0))
5070 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5072 /* Must use TEM here, not AD, since it is the one that will
5073 have any subexpressions reloaded, if needed. */
5074 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5075 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5076 VOIDmode, 0,
5077 0, opnum, type);
5078 return ! removed_and;
5080 else
5081 return 0;
5084 /* If we have address of a stack slot but it's not valid because the
5085 displacement is too large, compute the sum in a register.
5086 Handle all base registers here, not just fp/ap/sp, because on some
5087 targets (namely SH) we can also get too large displacements from
5088 big-endian corrections. */
5089 else if (GET_CODE (ad) == PLUS
5090 && REG_P (XEXP (ad, 0))
5091 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5092 && CONST_INT_P (XEXP (ad, 1))
5093 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5094 CONST_INT)
5095 /* Similarly, if we were to reload the base register and the
5096 mem+offset address is still invalid, then we want to reload
5097 the whole address, not just the base register. */
5098 || ! maybe_memory_address_addr_space_p
5099 (mode, ad, as, &(XEXP (ad, 0)))))
5102 /* Unshare the MEM rtx so we can safely alter it. */
5103 if (memrefloc)
5105 *memrefloc = copy_rtx (*memrefloc);
5106 loc = &XEXP (*memrefloc, 0);
5107 if (removed_and)
5108 loc = &XEXP (*loc, 0);
5111 if (double_reg_address_ok
5112 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5113 PLUS, CONST_INT))
5115 /* Unshare the sum as well. */
5116 *loc = ad = copy_rtx (ad);
5118 /* Reload the displacement into an index reg.
5119 We assume the frame pointer or arg pointer is a base reg. */
5120 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5121 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5122 type, ind_levels);
5123 return 0;
5125 else
5127 /* If the sum of two regs is not necessarily valid,
5128 reload the sum into a base reg.
5129 That will at least work. */
5130 find_reloads_address_part (ad, loc,
5131 base_reg_class (mode, as, MEM, SCRATCH),
5132 GET_MODE (ad), opnum, type, ind_levels);
5134 return ! removed_and;
5137 /* If we have an indexed stack slot, there are three possible reasons why
5138 it might be invalid: The index might need to be reloaded, the address
5139 might have been made by frame pointer elimination and hence have a
5140 constant out of range, or both reasons might apply.
5142 We can easily check for an index needing reload, but even if that is the
5143 case, we might also have an invalid constant. To avoid making the
5144 conservative assumption and requiring two reloads, we see if this address
5145 is valid when not interpreted strictly. If it is, the only problem is
5146 that the index needs a reload and find_reloads_address_1 will take care
5147 of it.
5149 Handle all base registers here, not just fp/ap/sp, because on some
5150 targets (namely SPARC) we can also get invalid addresses from preventive
5151 subreg big-endian corrections made by find_reloads_toplev. We
5152 can also get expressions involving LO_SUM (rather than PLUS) from
5153 find_reloads_subreg_address.
5155 If we decide to do something, it must be that `double_reg_address_ok'
5156 is true. We generate a reload of the base register + constant and
5157 rework the sum so that the reload register will be added to the index.
5158 This is safe because we know the address isn't shared.
5160 We check for the base register as both the first and second operand of
5161 the innermost PLUS and/or LO_SUM. */
5163 for (op_index = 0; op_index < 2; ++op_index)
5165 rtx operand, addend;
5166 enum rtx_code inner_code;
5168 if (GET_CODE (ad) != PLUS)
5169 continue;
5171 inner_code = GET_CODE (XEXP (ad, 0));
5172 if (!(GET_CODE (ad) == PLUS
5173 && CONST_INT_P (XEXP (ad, 1))
5174 && (inner_code == PLUS || inner_code == LO_SUM)))
5175 continue;
5177 operand = XEXP (XEXP (ad, 0), op_index);
5178 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5179 continue;
5181 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5183 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5184 GET_CODE (addend))
5185 || operand == frame_pointer_rtx
5186 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
5187 || operand == hard_frame_pointer_rtx
5188 #endif
5189 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5190 || operand == arg_pointer_rtx
5191 #endif
5192 || operand == stack_pointer_rtx)
5193 && ! maybe_memory_address_addr_space_p
5194 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5196 rtx offset_reg;
5197 enum reg_class cls;
5199 offset_reg = plus_constant (operand, INTVAL (XEXP (ad, 1)));
5201 /* Form the adjusted address. */
5202 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5203 ad = gen_rtx_PLUS (GET_MODE (ad),
5204 op_index == 0 ? offset_reg : addend,
5205 op_index == 0 ? addend : offset_reg);
5206 else
5207 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5208 op_index == 0 ? offset_reg : addend,
5209 op_index == 0 ? addend : offset_reg);
5210 *loc = ad;
5212 cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5213 find_reloads_address_part (XEXP (ad, op_index),
5214 &XEXP (ad, op_index), cls,
5215 GET_MODE (ad), opnum, type, ind_levels);
5216 find_reloads_address_1 (mode, as,
5217 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5218 GET_CODE (XEXP (ad, op_index)),
5219 &XEXP (ad, 1 - op_index), opnum,
5220 type, 0, insn);
5222 return 0;
5226 /* See if address becomes valid when an eliminable register
5227 in a sum is replaced. */
5229 tem = ad;
5230 if (GET_CODE (ad) == PLUS)
5231 tem = subst_indexed_address (ad);
5232 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5234 /* Ok, we win that way. Replace any additional eliminable
5235 registers. */
5237 subst_reg_equivs_changed = 0;
5238 tem = subst_reg_equivs (tem, insn);
5240 /* Make sure that didn't make the address invalid again. */
5242 if (! subst_reg_equivs_changed
5243 || strict_memory_address_addr_space_p (mode, tem, as))
5245 *loc = tem;
5246 return 0;
5250 /* If constants aren't valid addresses, reload the constant address
5251 into a register. */
5252 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5254 enum machine_mode address_mode = GET_MODE (ad);
5255 if (address_mode == VOIDmode)
5256 address_mode = targetm.addr_space.address_mode (as);
5258 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5259 Unshare it so we can safely alter it. */
5260 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5261 && CONSTANT_POOL_ADDRESS_P (ad))
5263 *memrefloc = copy_rtx (*memrefloc);
5264 loc = &XEXP (*memrefloc, 0);
5265 if (removed_and)
5266 loc = &XEXP (*loc, 0);
5269 find_reloads_address_part (ad, loc,
5270 base_reg_class (mode, as, MEM, SCRATCH),
5271 address_mode, opnum, type, ind_levels);
5272 return ! removed_and;
5275 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5276 opnum, type, ind_levels, insn);
5279 /* Find all pseudo regs appearing in AD
5280 that are eliminable in favor of equivalent values
5281 and do not have hard regs; replace them by their equivalents.
5282 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5283 front of it for pseudos that we have to replace with stack slots. */
5285 static rtx
5286 subst_reg_equivs (rtx ad, rtx insn)
5288 RTX_CODE code = GET_CODE (ad);
5289 int i;
5290 const char *fmt;
5292 switch (code)
5294 case HIGH:
5295 case CONST_INT:
5296 case CONST:
5297 case CONST_DOUBLE:
5298 case CONST_FIXED:
5299 case CONST_VECTOR:
5300 case SYMBOL_REF:
5301 case LABEL_REF:
5302 case PC:
5303 case CC0:
5304 return ad;
5306 case REG:
5308 int regno = REGNO (ad);
5310 if (reg_equiv_constant (regno) != 0)
5312 subst_reg_equivs_changed = 1;
5313 return reg_equiv_constant (regno);
5315 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5317 rtx mem = make_memloc (ad, regno);
5318 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5320 subst_reg_equivs_changed = 1;
5321 /* We mark the USE with QImode so that we recognize it
5322 as one that can be safely deleted at the end of
5323 reload. */
5324 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5325 QImode);
5326 return mem;
5330 return ad;
5332 case PLUS:
5333 /* Quickly dispose of a common case. */
5334 if (XEXP (ad, 0) == frame_pointer_rtx
5335 && CONST_INT_P (XEXP (ad, 1)))
5336 return ad;
5337 break;
5339 default:
5340 break;
5343 fmt = GET_RTX_FORMAT (code);
5344 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5345 if (fmt[i] == 'e')
5346 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5347 return ad;
5350 /* Compute the sum of X and Y, making canonicalizations assumed in an
5351 address, namely: sum constant integers, surround the sum of two
5352 constants with a CONST, put the constant as the second operand, and
5353 group the constant on the outermost sum.
5355 This routine assumes both inputs are already in canonical form. */
5358 form_sum (enum machine_mode mode, rtx x, rtx y)
5360 rtx tem;
5362 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5363 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5365 if (CONST_INT_P (x))
5366 return plus_constant (y, INTVAL (x));
5367 else if (CONST_INT_P (y))
5368 return plus_constant (x, INTVAL (y));
5369 else if (CONSTANT_P (x))
5370 tem = x, x = y, y = tem;
5372 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5373 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5375 /* Note that if the operands of Y are specified in the opposite
5376 order in the recursive calls below, infinite recursion will occur. */
5377 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5378 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5380 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5381 constant will have been placed second. */
5382 if (CONSTANT_P (x) && CONSTANT_P (y))
5384 if (GET_CODE (x) == CONST)
5385 x = XEXP (x, 0);
5386 if (GET_CODE (y) == CONST)
5387 y = XEXP (y, 0);
5389 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5392 return gen_rtx_PLUS (mode, x, y);
5395 /* If ADDR is a sum containing a pseudo register that should be
5396 replaced with a constant (from reg_equiv_constant),
5397 return the result of doing so, and also apply the associative
5398 law so that the result is more likely to be a valid address.
5399 (But it is not guaranteed to be one.)
5401 Note that at most one register is replaced, even if more are
5402 replaceable. Also, we try to put the result into a canonical form
5403 so it is more likely to be a valid address.
5405 In all other cases, return ADDR. */
5407 static rtx
5408 subst_indexed_address (rtx addr)
5410 rtx op0 = 0, op1 = 0, op2 = 0;
5411 rtx tem;
5412 int regno;
5414 if (GET_CODE (addr) == PLUS)
5416 /* Try to find a register to replace. */
5417 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5418 if (REG_P (op0)
5419 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5420 && reg_renumber[regno] < 0
5421 && reg_equiv_constant (regno) != 0)
5422 op0 = reg_equiv_constant (regno);
5423 else if (REG_P (op1)
5424 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5425 && reg_renumber[regno] < 0
5426 && reg_equiv_constant (regno) != 0)
5427 op1 = reg_equiv_constant (regno);
5428 else if (GET_CODE (op0) == PLUS
5429 && (tem = subst_indexed_address (op0)) != op0)
5430 op0 = tem;
5431 else if (GET_CODE (op1) == PLUS
5432 && (tem = subst_indexed_address (op1)) != op1)
5433 op1 = tem;
5434 else
5435 return addr;
5437 /* Pick out up to three things to add. */
5438 if (GET_CODE (op1) == PLUS)
5439 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5440 else if (GET_CODE (op0) == PLUS)
5441 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5443 /* Compute the sum. */
5444 if (op2 != 0)
5445 op1 = form_sum (GET_MODE (addr), op1, op2);
5446 if (op1 != 0)
5447 op0 = form_sum (GET_MODE (addr), op0, op1);
5449 return op0;
5451 return addr;
5454 /* Update the REG_INC notes for an insn. It updates all REG_INC
5455 notes for the instruction which refer to REGNO the to refer
5456 to the reload number.
5458 INSN is the insn for which any REG_INC notes need updating.
5460 REGNO is the register number which has been reloaded.
5462 RELOADNUM is the reload number. */
5464 static void
5465 update_auto_inc_notes (rtx insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5466 int reloadnum ATTRIBUTE_UNUSED)
5468 #ifdef AUTO_INC_DEC
5469 rtx link;
5471 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5472 if (REG_NOTE_KIND (link) == REG_INC
5473 && (int) REGNO (XEXP (link, 0)) == regno)
5474 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5475 #endif
5478 /* Record the pseudo registers we must reload into hard registers in a
5479 subexpression of a would-be memory address, X referring to a value
5480 in mode MODE. (This function is not called if the address we find
5481 is strictly valid.)
5483 CONTEXT = 1 means we are considering regs as index regs,
5484 = 0 means we are considering them as base regs.
5485 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5486 or an autoinc code.
5487 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5488 is the code of the index part of the address. Otherwise, pass SCRATCH
5489 for this argument.
5490 OPNUM and TYPE specify the purpose of any reloads made.
5492 IND_LEVELS says how many levels of indirect addressing are
5493 supported at this point in the address.
5495 INSN, if nonzero, is the insn in which we do the reload. It is used
5496 to determine if we may generate output reloads.
5498 We return nonzero if X, as a whole, is reloaded or replaced. */
5500 /* Note that we take shortcuts assuming that no multi-reg machine mode
5501 occurs as part of an address.
5502 Also, this is not fully machine-customizable; it works for machines
5503 such as VAXen and 68000's and 32000's, but other possible machines
5504 could have addressing modes that this does not handle right.
5505 If you add push_reload calls here, you need to make sure gen_reload
5506 handles those cases gracefully. */
5508 static int
5509 find_reloads_address_1 (enum machine_mode mode, addr_space_t as,
5510 rtx x, int context,
5511 enum rtx_code outer_code, enum rtx_code index_code,
5512 rtx *loc, int opnum, enum reload_type type,
5513 int ind_levels, rtx insn)
5515 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5516 ((CONTEXT) == 0 \
5517 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5518 : REGNO_OK_FOR_INDEX_P (REGNO))
5520 enum reg_class context_reg_class;
5521 RTX_CODE code = GET_CODE (x);
5523 if (context == 1)
5524 context_reg_class = INDEX_REG_CLASS;
5525 else
5526 context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5528 switch (code)
5530 case PLUS:
5532 rtx orig_op0 = XEXP (x, 0);
5533 rtx orig_op1 = XEXP (x, 1);
5534 RTX_CODE code0 = GET_CODE (orig_op0);
5535 RTX_CODE code1 = GET_CODE (orig_op1);
5536 rtx op0 = orig_op0;
5537 rtx op1 = orig_op1;
5539 if (GET_CODE (op0) == SUBREG)
5541 op0 = SUBREG_REG (op0);
5542 code0 = GET_CODE (op0);
5543 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5544 op0 = gen_rtx_REG (word_mode,
5545 (REGNO (op0) +
5546 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5547 GET_MODE (SUBREG_REG (orig_op0)),
5548 SUBREG_BYTE (orig_op0),
5549 GET_MODE (orig_op0))));
5552 if (GET_CODE (op1) == SUBREG)
5554 op1 = SUBREG_REG (op1);
5555 code1 = GET_CODE (op1);
5556 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5557 /* ??? Why is this given op1's mode and above for
5558 ??? op0 SUBREGs we use word_mode? */
5559 op1 = gen_rtx_REG (GET_MODE (op1),
5560 (REGNO (op1) +
5561 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5562 GET_MODE (SUBREG_REG (orig_op1)),
5563 SUBREG_BYTE (orig_op1),
5564 GET_MODE (orig_op1))));
5566 /* Plus in the index register may be created only as a result of
5567 register rematerialization for expression like &localvar*4. Reload it.
5568 It may be possible to combine the displacement on the outer level,
5569 but it is probably not worthwhile to do so. */
5570 if (context == 1)
5572 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5573 opnum, ADDR_TYPE (type), ind_levels, insn);
5574 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5575 context_reg_class,
5576 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5577 return 1;
5580 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5581 || code0 == ZERO_EXTEND || code1 == MEM)
5583 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5584 &XEXP (x, 0), opnum, type, ind_levels,
5585 insn);
5586 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5587 &XEXP (x, 1), opnum, type, ind_levels,
5588 insn);
5591 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5592 || code1 == ZERO_EXTEND || code0 == MEM)
5594 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5595 &XEXP (x, 0), opnum, type, ind_levels,
5596 insn);
5597 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5598 &XEXP (x, 1), opnum, type, ind_levels,
5599 insn);
5602 else if (code0 == CONST_INT || code0 == CONST
5603 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5604 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5605 &XEXP (x, 1), opnum, type, ind_levels,
5606 insn);
5608 else if (code1 == CONST_INT || code1 == CONST
5609 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5610 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5611 &XEXP (x, 0), opnum, type, ind_levels,
5612 insn);
5614 else if (code0 == REG && code1 == REG)
5616 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5617 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5618 return 0;
5619 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5620 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5621 return 0;
5622 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5623 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5624 &XEXP (x, 1), opnum, type, ind_levels,
5625 insn);
5626 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5627 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5628 &XEXP (x, 0), opnum, type, ind_levels,
5629 insn);
5630 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5631 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5632 &XEXP (x, 0), opnum, type, ind_levels,
5633 insn);
5634 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5635 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5636 &XEXP (x, 1), opnum, type, ind_levels,
5637 insn);
5638 else
5640 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5641 &XEXP (x, 0), opnum, type, ind_levels,
5642 insn);
5643 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5644 &XEXP (x, 1), opnum, type, ind_levels,
5645 insn);
5649 else if (code0 == REG)
5651 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5652 &XEXP (x, 0), opnum, type, ind_levels,
5653 insn);
5654 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5655 &XEXP (x, 1), opnum, type, ind_levels,
5656 insn);
5659 else if (code1 == REG)
5661 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5662 &XEXP (x, 1), opnum, type, ind_levels,
5663 insn);
5664 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5665 &XEXP (x, 0), opnum, type, ind_levels,
5666 insn);
5670 return 0;
5672 case POST_MODIFY:
5673 case PRE_MODIFY:
5675 rtx op0 = XEXP (x, 0);
5676 rtx op1 = XEXP (x, 1);
5677 enum rtx_code index_code;
5678 int regno;
5679 int reloadnum;
5681 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5682 return 0;
5684 /* Currently, we only support {PRE,POST}_MODIFY constructs
5685 where a base register is {inc,dec}remented by the contents
5686 of another register or by a constant value. Thus, these
5687 operands must match. */
5688 gcc_assert (op0 == XEXP (op1, 0));
5690 /* Require index register (or constant). Let's just handle the
5691 register case in the meantime... If the target allows
5692 auto-modify by a constant then we could try replacing a pseudo
5693 register with its equivalent constant where applicable.
5695 We also handle the case where the register was eliminated
5696 resulting in a PLUS subexpression.
5698 If we later decide to reload the whole PRE_MODIFY or
5699 POST_MODIFY, inc_for_reload might clobber the reload register
5700 before reading the index. The index register might therefore
5701 need to live longer than a TYPE reload normally would, so be
5702 conservative and class it as RELOAD_OTHER. */
5703 if ((REG_P (XEXP (op1, 1))
5704 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5705 || GET_CODE (XEXP (op1, 1)) == PLUS)
5706 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5707 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5708 ind_levels, insn);
5710 gcc_assert (REG_P (XEXP (op1, 0)));
5712 regno = REGNO (XEXP (op1, 0));
5713 index_code = GET_CODE (XEXP (op1, 1));
5715 /* A register that is incremented cannot be constant! */
5716 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5717 || reg_equiv_constant (regno) == 0);
5719 /* Handle a register that is equivalent to a memory location
5720 which cannot be addressed directly. */
5721 if (reg_equiv_memory_loc (regno) != 0
5722 && (reg_equiv_address (regno) != 0
5723 || num_not_at_initial_offset))
5725 rtx tem = make_memloc (XEXP (x, 0), regno);
5727 if (reg_equiv_address (regno)
5728 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5730 rtx orig = tem;
5732 /* First reload the memory location's address.
5733 We can't use ADDR_TYPE (type) here, because we need to
5734 write back the value after reading it, hence we actually
5735 need two registers. */
5736 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5737 &XEXP (tem, 0), opnum,
5738 RELOAD_OTHER,
5739 ind_levels, insn);
5741 if (!rtx_equal_p (tem, orig))
5742 push_reg_equiv_alt_mem (regno, tem);
5744 /* Then reload the memory location into a base
5745 register. */
5746 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5747 &XEXP (op1, 0),
5748 base_reg_class (mode, as,
5749 code, index_code),
5750 GET_MODE (x), GET_MODE (x), 0,
5751 0, opnum, RELOAD_OTHER);
5753 update_auto_inc_notes (this_insn, regno, reloadnum);
5754 return 0;
5758 if (reg_renumber[regno] >= 0)
5759 regno = reg_renumber[regno];
5761 /* We require a base register here... */
5762 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5764 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5765 &XEXP (op1, 0), &XEXP (x, 0),
5766 base_reg_class (mode, as,
5767 code, index_code),
5768 GET_MODE (x), GET_MODE (x), 0, 0,
5769 opnum, RELOAD_OTHER);
5771 update_auto_inc_notes (this_insn, regno, reloadnum);
5772 return 0;
5775 return 0;
5777 case POST_INC:
5778 case POST_DEC:
5779 case PRE_INC:
5780 case PRE_DEC:
5781 if (REG_P (XEXP (x, 0)))
5783 int regno = REGNO (XEXP (x, 0));
5784 int value = 0;
5785 rtx x_orig = x;
5787 /* A register that is incremented cannot be constant! */
5788 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5789 || reg_equiv_constant (regno) == 0);
5791 /* Handle a register that is equivalent to a memory location
5792 which cannot be addressed directly. */
5793 if (reg_equiv_memory_loc (regno) != 0
5794 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5796 rtx tem = make_memloc (XEXP (x, 0), regno);
5797 if (reg_equiv_address (regno)
5798 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5800 rtx orig = tem;
5802 /* First reload the memory location's address.
5803 We can't use ADDR_TYPE (type) here, because we need to
5804 write back the value after reading it, hence we actually
5805 need two registers. */
5806 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5807 &XEXP (tem, 0), opnum, type,
5808 ind_levels, insn);
5809 if (!rtx_equal_p (tem, orig))
5810 push_reg_equiv_alt_mem (regno, tem);
5811 /* Put this inside a new increment-expression. */
5812 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5813 /* Proceed to reload that, as if it contained a register. */
5817 /* If we have a hard register that is ok in this incdec context,
5818 don't make a reload. If the register isn't nice enough for
5819 autoincdec, we can reload it. But, if an autoincrement of a
5820 register that we here verified as playing nice, still outside
5821 isn't "valid", it must be that no autoincrement is "valid".
5822 If that is true and something made an autoincrement anyway,
5823 this must be a special context where one is allowed.
5824 (For example, a "push" instruction.)
5825 We can't improve this address, so leave it alone. */
5827 /* Otherwise, reload the autoincrement into a suitable hard reg
5828 and record how much to increment by. */
5830 if (reg_renumber[regno] >= 0)
5831 regno = reg_renumber[regno];
5832 if (regno >= FIRST_PSEUDO_REGISTER
5833 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5834 index_code))
5836 int reloadnum;
5838 /* If we can output the register afterwards, do so, this
5839 saves the extra update.
5840 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5841 CALL_INSN - and it does not set CC0.
5842 But don't do this if we cannot directly address the
5843 memory location, since this will make it harder to
5844 reuse address reloads, and increases register pressure.
5845 Also don't do this if we can probably update x directly. */
5846 rtx equiv = (MEM_P (XEXP (x, 0))
5847 ? XEXP (x, 0)
5848 : reg_equiv_mem (regno));
5849 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5850 if (insn && NONJUMP_INSN_P (insn) && equiv
5851 && memory_operand (equiv, GET_MODE (equiv))
5852 #ifdef HAVE_cc0
5853 && ! sets_cc0_p (PATTERN (insn))
5854 #endif
5855 && ! (icode != CODE_FOR_nothing
5856 && insn_operand_matches (icode, 0, equiv)
5857 && insn_operand_matches (icode, 1, equiv)))
5859 /* We use the original pseudo for loc, so that
5860 emit_reload_insns() knows which pseudo this
5861 reload refers to and updates the pseudo rtx, not
5862 its equivalent memory location, as well as the
5863 corresponding entry in reg_last_reload_reg. */
5864 loc = &XEXP (x_orig, 0);
5865 x = XEXP (x, 0);
5866 reloadnum
5867 = push_reload (x, x, loc, loc,
5868 context_reg_class,
5869 GET_MODE (x), GET_MODE (x), 0, 0,
5870 opnum, RELOAD_OTHER);
5872 else
5874 reloadnum
5875 = push_reload (x, x, loc, (rtx*) 0,
5876 context_reg_class,
5877 GET_MODE (x), GET_MODE (x), 0, 0,
5878 opnum, type);
5879 rld[reloadnum].inc
5880 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5882 value = 1;
5885 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5886 reloadnum);
5888 return value;
5890 return 0;
5892 case TRUNCATE:
5893 case SIGN_EXTEND:
5894 case ZERO_EXTEND:
5895 /* Look for parts to reload in the inner expression and reload them
5896 too, in addition to this operation. Reloading all inner parts in
5897 addition to this one shouldn't be necessary, but at this point,
5898 we don't know if we can possibly omit any part that *can* be
5899 reloaded. Targets that are better off reloading just either part
5900 (or perhaps even a different part of an outer expression), should
5901 define LEGITIMIZE_RELOAD_ADDRESS. */
5902 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5903 context, code, SCRATCH, &XEXP (x, 0), opnum,
5904 type, ind_levels, insn);
5905 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5906 context_reg_class,
5907 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5908 return 1;
5910 case MEM:
5911 /* This is probably the result of a substitution, by eliminate_regs, of
5912 an equivalent address for a pseudo that was not allocated to a hard
5913 register. Verify that the specified address is valid and reload it
5914 into a register.
5916 Since we know we are going to reload this item, don't decrement for
5917 the indirection level.
5919 Note that this is actually conservative: it would be slightly more
5920 efficient to use the value of SPILL_INDIRECT_LEVELS from
5921 reload1.c here. */
5923 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5924 opnum, ADDR_TYPE (type), ind_levels, insn);
5925 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5926 context_reg_class,
5927 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5928 return 1;
5930 case REG:
5932 int regno = REGNO (x);
5934 if (reg_equiv_constant (regno) != 0)
5936 find_reloads_address_part (reg_equiv_constant (regno), loc,
5937 context_reg_class,
5938 GET_MODE (x), opnum, type, ind_levels);
5939 return 1;
5942 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5943 that feeds this insn. */
5944 if (reg_equiv_mem (regno) != 0)
5946 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5947 context_reg_class,
5948 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5949 return 1;
5951 #endif
5953 if (reg_equiv_memory_loc (regno)
5954 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5956 rtx tem = make_memloc (x, regno);
5957 if (reg_equiv_address (regno) != 0
5958 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5960 x = tem;
5961 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5962 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5963 ind_levels, insn);
5964 if (!rtx_equal_p (x, tem))
5965 push_reg_equiv_alt_mem (regno, x);
5969 if (reg_renumber[regno] >= 0)
5970 regno = reg_renumber[regno];
5972 if (regno >= FIRST_PSEUDO_REGISTER
5973 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5974 index_code))
5976 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5977 context_reg_class,
5978 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5979 return 1;
5982 /* If a register appearing in an address is the subject of a CLOBBER
5983 in this insn, reload it into some other register to be safe.
5984 The CLOBBER is supposed to make the register unavailable
5985 from before this insn to after it. */
5986 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5988 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5989 context_reg_class,
5990 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5991 return 1;
5994 return 0;
5996 case SUBREG:
5997 if (REG_P (SUBREG_REG (x)))
5999 /* If this is a SUBREG of a hard register and the resulting register
6000 is of the wrong class, reload the whole SUBREG. This avoids
6001 needless copies if SUBREG_REG is multi-word. */
6002 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6004 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
6006 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6007 index_code))
6009 push_reload (x, NULL_RTX, loc, (rtx*) 0,
6010 context_reg_class,
6011 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6012 return 1;
6015 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6016 is larger than the class size, then reload the whole SUBREG. */
6017 else
6019 enum reg_class rclass = context_reg_class;
6020 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6021 > reg_class_size[(int) rclass])
6023 x = find_reloads_subreg_address (x, 0, opnum,
6024 ADDR_TYPE (type),
6025 ind_levels, insn, NULL);
6026 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6027 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6028 return 1;
6032 break;
6034 default:
6035 break;
6039 const char *fmt = GET_RTX_FORMAT (code);
6040 int i;
6042 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6044 if (fmt[i] == 'e')
6045 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6046 we get here. */
6047 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6048 code, SCRATCH, &XEXP (x, i),
6049 opnum, type, ind_levels, insn);
6053 #undef REG_OK_FOR_CONTEXT
6054 return 0;
6057 /* X, which is found at *LOC, is a part of an address that needs to be
6058 reloaded into a register of class RCLASS. If X is a constant, or if
6059 X is a PLUS that contains a constant, check that the constant is a
6060 legitimate operand and that we are supposed to be able to load
6061 it into the register.
6063 If not, force the constant into memory and reload the MEM instead.
6065 MODE is the mode to use, in case X is an integer constant.
6067 OPNUM and TYPE describe the purpose of any reloads made.
6069 IND_LEVELS says how many levels of indirect addressing this machine
6070 supports. */
6072 static void
6073 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6074 enum machine_mode mode, int opnum,
6075 enum reload_type type, int ind_levels)
6077 if (CONSTANT_P (x)
6078 && (!targetm.legitimate_constant_p (mode, x)
6079 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6081 x = force_const_mem (mode, x);
6082 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6083 opnum, type, ind_levels, 0);
6086 else if (GET_CODE (x) == PLUS
6087 && CONSTANT_P (XEXP (x, 1))
6088 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6089 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6090 == NO_REGS))
6092 rtx tem;
6094 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6095 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6096 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6097 opnum, type, ind_levels, 0);
6100 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6101 mode, VOIDmode, 0, 0, opnum, type);
6104 /* X, a subreg of a pseudo, is a part of an address that needs to be
6105 reloaded.
6107 If the pseudo is equivalent to a memory location that cannot be directly
6108 addressed, make the necessary address reloads.
6110 If address reloads have been necessary, or if the address is changed
6111 by register elimination, return the rtx of the memory location;
6112 otherwise, return X.
6114 If FORCE_REPLACE is nonzero, unconditionally replace the subreg with the
6115 memory location.
6117 OPNUM and TYPE identify the purpose of the reload.
6119 IND_LEVELS says how many levels of indirect addressing are
6120 supported at this point in the address.
6122 INSN, if nonzero, is the insn in which we do the reload. It is used
6123 to determine where to put USEs for pseudos that we have to replace with
6124 stack slots. */
6126 static rtx
6127 find_reloads_subreg_address (rtx x, int force_replace, int opnum,
6128 enum reload_type type, int ind_levels, rtx insn,
6129 int *address_reloaded)
6131 int regno = REGNO (SUBREG_REG (x));
6132 int reloaded = 0;
6134 if (reg_equiv_memory_loc (regno))
6136 /* If the address is not directly addressable, or if the address is not
6137 offsettable, then it must be replaced. */
6138 if (! force_replace
6139 && (reg_equiv_address (regno)
6140 || ! offsettable_memref_p (reg_equiv_mem (regno))))
6141 force_replace = 1;
6143 if (force_replace || num_not_at_initial_offset)
6145 rtx tem = make_memloc (SUBREG_REG (x), regno);
6147 /* If the address changes because of register elimination, then
6148 it must be replaced. */
6149 if (force_replace
6150 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
6152 unsigned outer_size = GET_MODE_SIZE (GET_MODE (x));
6153 unsigned inner_size = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
6154 int offset;
6155 rtx orig = tem;
6157 /* For big-endian paradoxical subregs, SUBREG_BYTE does not
6158 hold the correct (negative) byte offset. */
6159 if (BYTES_BIG_ENDIAN && outer_size > inner_size)
6160 offset = inner_size - outer_size;
6161 else
6162 offset = SUBREG_BYTE (x);
6164 XEXP (tem, 0) = plus_constant (XEXP (tem, 0), offset);
6165 PUT_MODE (tem, GET_MODE (x));
6166 if (MEM_OFFSET_KNOWN_P (tem))
6167 set_mem_offset (tem, MEM_OFFSET (tem) + offset);
6168 if (MEM_SIZE_KNOWN_P (tem)
6169 && MEM_SIZE (tem) != (HOST_WIDE_INT) outer_size)
6170 set_mem_size (tem, outer_size);
6172 /* If this was a paradoxical subreg that we replaced, the
6173 resulting memory must be sufficiently aligned to allow
6174 us to widen the mode of the memory. */
6175 if (outer_size > inner_size)
6177 rtx base;
6179 base = XEXP (tem, 0);
6180 if (GET_CODE (base) == PLUS)
6182 if (CONST_INT_P (XEXP (base, 1))
6183 && INTVAL (XEXP (base, 1)) % outer_size != 0)
6184 return x;
6185 base = XEXP (base, 0);
6187 if (!REG_P (base)
6188 || (REGNO_POINTER_ALIGN (REGNO (base))
6189 < outer_size * BITS_PER_UNIT))
6190 return x;
6193 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6194 XEXP (tem, 0), &XEXP (tem, 0),
6195 opnum, type, ind_levels, insn);
6196 /* ??? Do we need to handle nonzero offsets somehow? */
6197 if (!offset && !rtx_equal_p (tem, orig))
6198 push_reg_equiv_alt_mem (regno, tem);
6200 /* For some processors an address may be valid in the
6201 original mode but not in a smaller mode. For
6202 example, ARM accepts a scaled index register in
6203 SImode but not in HImode. Note that this is only
6204 a problem if the address in reg_equiv_mem is already
6205 invalid in the new mode; other cases would be fixed
6206 by find_reloads_address as usual.
6208 ??? We attempt to handle such cases here by doing an
6209 additional reload of the full address after the
6210 usual processing by find_reloads_address. Note that
6211 this may not work in the general case, but it seems
6212 to cover the cases where this situation currently
6213 occurs. A more general fix might be to reload the
6214 *value* instead of the address, but this would not
6215 be expected by the callers of this routine as-is.
6217 If find_reloads_address already completed replaced
6218 the address, there is nothing further to do. */
6219 if (reloaded == 0
6220 && reg_equiv_mem (regno) != 0
6221 && !strict_memory_address_addr_space_p
6222 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6223 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6225 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6226 base_reg_class (GET_MODE (tem),
6227 MEM_ADDR_SPACE (tem),
6228 MEM, SCRATCH),
6229 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0,
6230 opnum, type);
6231 reloaded = 1;
6233 /* If this is not a toplevel operand, find_reloads doesn't see
6234 this substitution. We have to emit a USE of the pseudo so
6235 that delete_output_reload can see it. */
6236 if (replace_reloads && recog_data.operand[opnum] != x)
6237 /* We mark the USE with QImode so that we recognize it
6238 as one that can be safely deleted at the end of
6239 reload. */
6240 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode,
6241 SUBREG_REG (x)),
6242 insn), QImode);
6243 x = tem;
6247 if (address_reloaded)
6248 *address_reloaded = reloaded;
6250 return x;
6253 /* Substitute into the current INSN the registers into which we have reloaded
6254 the things that need reloading. The array `replacements'
6255 contains the locations of all pointers that must be changed
6256 and says what to replace them with.
6258 Return the rtx that X translates into; usually X, but modified. */
6260 void
6261 subst_reloads (rtx insn)
6263 int i;
6265 for (i = 0; i < n_replacements; i++)
6267 struct replacement *r = &replacements[i];
6268 rtx reloadreg = rld[r->what].reg_rtx;
6269 if (reloadreg)
6271 #ifdef DEBUG_RELOAD
6272 /* This checking takes a very long time on some platforms
6273 causing the gcc.c-torture/compile/limits-fnargs.c test
6274 to time out during testing. See PR 31850.
6276 Internal consistency test. Check that we don't modify
6277 anything in the equivalence arrays. Whenever something from
6278 those arrays needs to be reloaded, it must be unshared before
6279 being substituted into; the equivalence must not be modified.
6280 Otherwise, if the equivalence is used after that, it will
6281 have been modified, and the thing substituted (probably a
6282 register) is likely overwritten and not a usable equivalence. */
6283 int check_regno;
6285 for (check_regno = 0; check_regno < max_regno; check_regno++)
6287 #define CHECK_MODF(ARRAY) \
6288 gcc_assert (!VEC_index (reg_equivs_t, reg_equivs, check_regno).ARRAY \
6289 || !loc_mentioned_in_p (r->where, \
6290 VEC_index (reg_equivs_t, reg_equivs, check_regno).ARRAY))
6292 CHECK_MODF (equiv_constant);
6293 CHECK_MODF (equiv_memory_loc);
6294 CHECK_MODF (equiv_address);
6295 CHECK_MODF (equiv_mem);
6296 #undef CHECK_MODF
6298 #endif /* DEBUG_RELOAD */
6300 /* If we're replacing a LABEL_REF with a register, there must
6301 already be an indication (to e.g. flow) which label this
6302 register refers to. */
6303 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6304 || !JUMP_P (insn)
6305 || find_reg_note (insn,
6306 REG_LABEL_OPERAND,
6307 XEXP (*r->where, 0))
6308 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6310 /* Encapsulate RELOADREG so its machine mode matches what
6311 used to be there. Note that gen_lowpart_common will
6312 do the wrong thing if RELOADREG is multi-word. RELOADREG
6313 will always be a REG here. */
6314 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6315 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6317 *r->where = reloadreg;
6319 /* If reload got no reg and isn't optional, something's wrong. */
6320 else
6321 gcc_assert (rld[r->what].optional);
6325 /* Make a copy of any replacements being done into X and move those
6326 copies to locations in Y, a copy of X. */
6328 void
6329 copy_replacements (rtx x, rtx y)
6331 copy_replacements_1 (&x, &y, n_replacements);
6334 static void
6335 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6337 int i, j;
6338 rtx x, y;
6339 struct replacement *r;
6340 enum rtx_code code;
6341 const char *fmt;
6343 for (j = 0; j < orig_replacements; j++)
6344 if (replacements[j].where == px)
6346 r = &replacements[n_replacements++];
6347 r->where = py;
6348 r->what = replacements[j].what;
6349 r->mode = replacements[j].mode;
6352 x = *px;
6353 y = *py;
6354 code = GET_CODE (x);
6355 fmt = GET_RTX_FORMAT (code);
6357 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6359 if (fmt[i] == 'e')
6360 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6361 else if (fmt[i] == 'E')
6362 for (j = XVECLEN (x, i); --j >= 0; )
6363 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6364 orig_replacements);
6368 /* Change any replacements being done to *X to be done to *Y. */
6370 void
6371 move_replacements (rtx *x, rtx *y)
6373 int i;
6375 for (i = 0; i < n_replacements; i++)
6376 if (replacements[i].where == x)
6377 replacements[i].where = y;
6380 /* If LOC was scheduled to be replaced by something, return the replacement.
6381 Otherwise, return *LOC. */
6384 find_replacement (rtx *loc)
6386 struct replacement *r;
6388 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6390 rtx reloadreg = rld[r->what].reg_rtx;
6392 if (reloadreg && r->where == loc)
6394 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6395 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6397 return reloadreg;
6399 else if (reloadreg && GET_CODE (*loc) == SUBREG
6400 && r->where == &SUBREG_REG (*loc))
6402 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6403 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6405 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6406 GET_MODE (SUBREG_REG (*loc)),
6407 SUBREG_BYTE (*loc));
6411 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6412 what's inside and make a new rtl if so. */
6413 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6414 || GET_CODE (*loc) == MULT)
6416 rtx x = find_replacement (&XEXP (*loc, 0));
6417 rtx y = find_replacement (&XEXP (*loc, 1));
6419 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6420 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6423 return *loc;
6426 /* Return nonzero if register in range [REGNO, ENDREGNO)
6427 appears either explicitly or implicitly in X
6428 other than being stored into (except for earlyclobber operands).
6430 References contained within the substructure at LOC do not count.
6431 LOC may be zero, meaning don't ignore anything.
6433 This is similar to refers_to_regno_p in rtlanal.c except that we
6434 look at equivalences for pseudos that didn't get hard registers. */
6436 static int
6437 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6438 rtx x, rtx *loc)
6440 int i;
6441 unsigned int r;
6442 RTX_CODE code;
6443 const char *fmt;
6445 if (x == 0)
6446 return 0;
6448 repeat:
6449 code = GET_CODE (x);
6451 switch (code)
6453 case REG:
6454 r = REGNO (x);
6456 /* If this is a pseudo, a hard register must not have been allocated.
6457 X must therefore either be a constant or be in memory. */
6458 if (r >= FIRST_PSEUDO_REGISTER)
6460 if (reg_equiv_memory_loc (r))
6461 return refers_to_regno_for_reload_p (regno, endregno,
6462 reg_equiv_memory_loc (r),
6463 (rtx*) 0);
6465 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6466 return 0;
6469 return (endregno > r
6470 && regno < r + (r < FIRST_PSEUDO_REGISTER
6471 ? hard_regno_nregs[r][GET_MODE (x)]
6472 : 1));
6474 case SUBREG:
6475 /* If this is a SUBREG of a hard reg, we can see exactly which
6476 registers are being modified. Otherwise, handle normally. */
6477 if (REG_P (SUBREG_REG (x))
6478 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6480 unsigned int inner_regno = subreg_regno (x);
6481 unsigned int inner_endregno
6482 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6483 ? subreg_nregs (x) : 1);
6485 return endregno > inner_regno && regno < inner_endregno;
6487 break;
6489 case CLOBBER:
6490 case SET:
6491 if (&SET_DEST (x) != loc
6492 /* Note setting a SUBREG counts as referring to the REG it is in for
6493 a pseudo but not for hard registers since we can
6494 treat each word individually. */
6495 && ((GET_CODE (SET_DEST (x)) == SUBREG
6496 && loc != &SUBREG_REG (SET_DEST (x))
6497 && REG_P (SUBREG_REG (SET_DEST (x)))
6498 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6499 && refers_to_regno_for_reload_p (regno, endregno,
6500 SUBREG_REG (SET_DEST (x)),
6501 loc))
6502 /* If the output is an earlyclobber operand, this is
6503 a conflict. */
6504 || ((!REG_P (SET_DEST (x))
6505 || earlyclobber_operand_p (SET_DEST (x)))
6506 && refers_to_regno_for_reload_p (regno, endregno,
6507 SET_DEST (x), loc))))
6508 return 1;
6510 if (code == CLOBBER || loc == &SET_SRC (x))
6511 return 0;
6512 x = SET_SRC (x);
6513 goto repeat;
6515 default:
6516 break;
6519 /* X does not match, so try its subexpressions. */
6521 fmt = GET_RTX_FORMAT (code);
6522 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6524 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6526 if (i == 0)
6528 x = XEXP (x, 0);
6529 goto repeat;
6531 else
6532 if (refers_to_regno_for_reload_p (regno, endregno,
6533 XEXP (x, i), loc))
6534 return 1;
6536 else if (fmt[i] == 'E')
6538 int j;
6539 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6540 if (loc != &XVECEXP (x, i, j)
6541 && refers_to_regno_for_reload_p (regno, endregno,
6542 XVECEXP (x, i, j), loc))
6543 return 1;
6546 return 0;
6549 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6550 we check if any register number in X conflicts with the relevant register
6551 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6552 contains a MEM (we don't bother checking for memory addresses that can't
6553 conflict because we expect this to be a rare case.
6555 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6556 that we look at equivalences for pseudos that didn't get hard registers. */
6559 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6561 int regno, endregno;
6563 /* Overly conservative. */
6564 if (GET_CODE (x) == STRICT_LOW_PART
6565 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6566 x = XEXP (x, 0);
6568 /* If either argument is a constant, then modifying X can not affect IN. */
6569 if (CONSTANT_P (x) || CONSTANT_P (in))
6570 return 0;
6571 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6572 return refers_to_mem_for_reload_p (in);
6573 else if (GET_CODE (x) == SUBREG)
6575 regno = REGNO (SUBREG_REG (x));
6576 if (regno < FIRST_PSEUDO_REGISTER)
6577 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6578 GET_MODE (SUBREG_REG (x)),
6579 SUBREG_BYTE (x),
6580 GET_MODE (x));
6581 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6582 ? subreg_nregs (x) : 1);
6584 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6586 else if (REG_P (x))
6588 regno = REGNO (x);
6590 /* If this is a pseudo, it must not have been assigned a hard register.
6591 Therefore, it must either be in memory or be a constant. */
6593 if (regno >= FIRST_PSEUDO_REGISTER)
6595 if (reg_equiv_memory_loc (regno))
6596 return refers_to_mem_for_reload_p (in);
6597 gcc_assert (reg_equiv_constant (regno));
6598 return 0;
6601 endregno = END_HARD_REGNO (x);
6603 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6605 else if (MEM_P (x))
6606 return refers_to_mem_for_reload_p (in);
6607 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6608 || GET_CODE (x) == CC0)
6609 return reg_mentioned_p (x, in);
6610 else
6612 gcc_assert (GET_CODE (x) == PLUS);
6614 /* We actually want to know if X is mentioned somewhere inside IN.
6615 We must not say that (plus (sp) (const_int 124)) is in
6616 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6617 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6618 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6619 while (MEM_P (in))
6620 in = XEXP (in, 0);
6621 if (REG_P (in))
6622 return 0;
6623 else if (GET_CODE (in) == PLUS)
6624 return (rtx_equal_p (x, in)
6625 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6626 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6627 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6628 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6631 gcc_unreachable ();
6634 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6635 registers. */
6637 static int
6638 refers_to_mem_for_reload_p (rtx x)
6640 const char *fmt;
6641 int i;
6643 if (MEM_P (x))
6644 return 1;
6646 if (REG_P (x))
6647 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6648 && reg_equiv_memory_loc (REGNO (x)));
6650 fmt = GET_RTX_FORMAT (GET_CODE (x));
6651 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6652 if (fmt[i] == 'e'
6653 && (MEM_P (XEXP (x, i))
6654 || refers_to_mem_for_reload_p (XEXP (x, i))))
6655 return 1;
6657 return 0;
6660 /* Check the insns before INSN to see if there is a suitable register
6661 containing the same value as GOAL.
6662 If OTHER is -1, look for a register in class RCLASS.
6663 Otherwise, just see if register number OTHER shares GOAL's value.
6665 Return an rtx for the register found, or zero if none is found.
6667 If RELOAD_REG_P is (short *)1,
6668 we reject any hard reg that appears in reload_reg_rtx
6669 because such a hard reg is also needed coming into this insn.
6671 If RELOAD_REG_P is any other nonzero value,
6672 it is a vector indexed by hard reg number
6673 and we reject any hard reg whose element in the vector is nonnegative
6674 as well as any that appears in reload_reg_rtx.
6676 If GOAL is zero, then GOALREG is a register number; we look
6677 for an equivalent for that register.
6679 MODE is the machine mode of the value we want an equivalence for.
6680 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6682 This function is used by jump.c as well as in the reload pass.
6684 If GOAL is the sum of the stack pointer and a constant, we treat it
6685 as if it were a constant except that sp is required to be unchanging. */
6688 find_equiv_reg (rtx goal, rtx insn, enum reg_class rclass, int other,
6689 short *reload_reg_p, int goalreg, enum machine_mode mode)
6691 rtx p = insn;
6692 rtx goaltry, valtry, value, where;
6693 rtx pat;
6694 int regno = -1;
6695 int valueno;
6696 int goal_mem = 0;
6697 int goal_const = 0;
6698 int goal_mem_addr_varies = 0;
6699 int need_stable_sp = 0;
6700 int nregs;
6701 int valuenregs;
6702 int num = 0;
6704 if (goal == 0)
6705 regno = goalreg;
6706 else if (REG_P (goal))
6707 regno = REGNO (goal);
6708 else if (MEM_P (goal))
6710 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6711 if (MEM_VOLATILE_P (goal))
6712 return 0;
6713 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6714 return 0;
6715 /* An address with side effects must be reexecuted. */
6716 switch (code)
6718 case POST_INC:
6719 case PRE_INC:
6720 case POST_DEC:
6721 case PRE_DEC:
6722 case POST_MODIFY:
6723 case PRE_MODIFY:
6724 return 0;
6725 default:
6726 break;
6728 goal_mem = 1;
6730 else if (CONSTANT_P (goal))
6731 goal_const = 1;
6732 else if (GET_CODE (goal) == PLUS
6733 && XEXP (goal, 0) == stack_pointer_rtx
6734 && CONSTANT_P (XEXP (goal, 1)))
6735 goal_const = need_stable_sp = 1;
6736 else if (GET_CODE (goal) == PLUS
6737 && XEXP (goal, 0) == frame_pointer_rtx
6738 && CONSTANT_P (XEXP (goal, 1)))
6739 goal_const = 1;
6740 else
6741 return 0;
6743 num = 0;
6744 /* Scan insns back from INSN, looking for one that copies
6745 a value into or out of GOAL.
6746 Stop and give up if we reach a label. */
6748 while (1)
6750 p = PREV_INSN (p);
6751 if (p && DEBUG_INSN_P (p))
6752 continue;
6753 num++;
6754 if (p == 0 || LABEL_P (p)
6755 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6756 return 0;
6758 /* Don't reuse register contents from before a setjmp-type
6759 function call; on the second return (from the longjmp) it
6760 might have been clobbered by a later reuse. It doesn't
6761 seem worthwhile to actually go and see if it is actually
6762 reused even if that information would be readily available;
6763 just don't reuse it across the setjmp call. */
6764 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6765 return 0;
6767 if (NONJUMP_INSN_P (p)
6768 /* If we don't want spill regs ... */
6769 && (! (reload_reg_p != 0
6770 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6771 /* ... then ignore insns introduced by reload; they aren't
6772 useful and can cause results in reload_as_needed to be
6773 different from what they were when calculating the need for
6774 spills. If we notice an input-reload insn here, we will
6775 reject it below, but it might hide a usable equivalent.
6776 That makes bad code. It may even fail: perhaps no reg was
6777 spilled for this insn because it was assumed we would find
6778 that equivalent. */
6779 || INSN_UID (p) < reload_first_uid))
6781 rtx tem;
6782 pat = single_set (p);
6784 /* First check for something that sets some reg equal to GOAL. */
6785 if (pat != 0
6786 && ((regno >= 0
6787 && true_regnum (SET_SRC (pat)) == regno
6788 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6790 (regno >= 0
6791 && true_regnum (SET_DEST (pat)) == regno
6792 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6794 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6795 /* When looking for stack pointer + const,
6796 make sure we don't use a stack adjust. */
6797 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6798 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6799 || (goal_mem
6800 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6801 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6802 || (goal_mem
6803 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6804 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6805 /* If we are looking for a constant,
6806 and something equivalent to that constant was copied
6807 into a reg, we can use that reg. */
6808 || (goal_const && REG_NOTES (p) != 0
6809 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6810 && ((rtx_equal_p (XEXP (tem, 0), goal)
6811 && (valueno
6812 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6813 || (REG_P (SET_DEST (pat))
6814 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6815 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6816 && CONST_INT_P (goal)
6817 && 0 != (goaltry
6818 = operand_subword (XEXP (tem, 0), 0, 0,
6819 VOIDmode))
6820 && rtx_equal_p (goal, goaltry)
6821 && (valtry
6822 = operand_subword (SET_DEST (pat), 0, 0,
6823 VOIDmode))
6824 && (valueno = true_regnum (valtry)) >= 0)))
6825 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6826 NULL_RTX))
6827 && REG_P (SET_DEST (pat))
6828 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6829 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6830 && CONST_INT_P (goal)
6831 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6832 VOIDmode))
6833 && rtx_equal_p (goal, goaltry)
6834 && (valtry
6835 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6836 && (valueno = true_regnum (valtry)) >= 0)))
6838 if (other >= 0)
6840 if (valueno != other)
6841 continue;
6843 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6844 continue;
6845 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6846 mode, valueno))
6847 continue;
6848 value = valtry;
6849 where = p;
6850 break;
6855 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6856 (or copying VALUE into GOAL, if GOAL is also a register).
6857 Now verify that VALUE is really valid. */
6859 /* VALUENO is the register number of VALUE; a hard register. */
6861 /* Don't try to re-use something that is killed in this insn. We want
6862 to be able to trust REG_UNUSED notes. */
6863 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6864 return 0;
6866 /* If we propose to get the value from the stack pointer or if GOAL is
6867 a MEM based on the stack pointer, we need a stable SP. */
6868 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6869 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6870 goal)))
6871 need_stable_sp = 1;
6873 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6874 if (GET_MODE (value) != mode)
6875 return 0;
6877 /* Reject VALUE if it was loaded from GOAL
6878 and is also a register that appears in the address of GOAL. */
6880 if (goal_mem && value == SET_DEST (single_set (where))
6881 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6882 goal, (rtx*) 0))
6883 return 0;
6885 /* Reject registers that overlap GOAL. */
6887 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6888 nregs = hard_regno_nregs[regno][mode];
6889 else
6890 nregs = 1;
6891 valuenregs = hard_regno_nregs[valueno][mode];
6893 if (!goal_mem && !goal_const
6894 && regno + nregs > valueno && regno < valueno + valuenregs)
6895 return 0;
6897 /* Reject VALUE if it is one of the regs reserved for reloads.
6898 Reload1 knows how to reuse them anyway, and it would get
6899 confused if we allocated one without its knowledge.
6900 (Now that insns introduced by reload are ignored above,
6901 this case shouldn't happen, but I'm not positive.) */
6903 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6905 int i;
6906 for (i = 0; i < valuenregs; ++i)
6907 if (reload_reg_p[valueno + i] >= 0)
6908 return 0;
6911 /* Reject VALUE if it is a register being used for an input reload
6912 even if it is not one of those reserved. */
6914 if (reload_reg_p != 0)
6916 int i;
6917 for (i = 0; i < n_reloads; i++)
6918 if (rld[i].reg_rtx != 0 && rld[i].in)
6920 int regno1 = REGNO (rld[i].reg_rtx);
6921 int nregs1 = hard_regno_nregs[regno1]
6922 [GET_MODE (rld[i].reg_rtx)];
6923 if (regno1 < valueno + valuenregs
6924 && regno1 + nregs1 > valueno)
6925 return 0;
6929 if (goal_mem)
6930 /* We must treat frame pointer as varying here,
6931 since it can vary--in a nonlocal goto as generated by expand_goto. */
6932 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6934 /* Now verify that the values of GOAL and VALUE remain unaltered
6935 until INSN is reached. */
6937 p = insn;
6938 while (1)
6940 p = PREV_INSN (p);
6941 if (p == where)
6942 return value;
6944 /* Don't trust the conversion past a function call
6945 if either of the two is in a call-clobbered register, or memory. */
6946 if (CALL_P (p))
6948 int i;
6950 if (goal_mem || need_stable_sp)
6951 return 0;
6953 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6954 for (i = 0; i < nregs; ++i)
6955 if (call_used_regs[regno + i]
6956 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6957 return 0;
6959 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6960 for (i = 0; i < valuenregs; ++i)
6961 if (call_used_regs[valueno + i]
6962 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6963 return 0;
6966 if (INSN_P (p))
6968 pat = PATTERN (p);
6970 /* Watch out for unspec_volatile, and volatile asms. */
6971 if (volatile_insn_p (pat))
6972 return 0;
6974 /* If this insn P stores in either GOAL or VALUE, return 0.
6975 If GOAL is a memory ref and this insn writes memory, return 0.
6976 If GOAL is a memory ref and its address is not constant,
6977 and this insn P changes a register used in GOAL, return 0. */
6979 if (GET_CODE (pat) == COND_EXEC)
6980 pat = COND_EXEC_CODE (pat);
6981 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6983 rtx dest = SET_DEST (pat);
6984 while (GET_CODE (dest) == SUBREG
6985 || GET_CODE (dest) == ZERO_EXTRACT
6986 || GET_CODE (dest) == STRICT_LOW_PART)
6987 dest = XEXP (dest, 0);
6988 if (REG_P (dest))
6990 int xregno = REGNO (dest);
6991 int xnregs;
6992 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6993 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6994 else
6995 xnregs = 1;
6996 if (xregno < regno + nregs && xregno + xnregs > regno)
6997 return 0;
6998 if (xregno < valueno + valuenregs
6999 && xregno + xnregs > valueno)
7000 return 0;
7001 if (goal_mem_addr_varies
7002 && reg_overlap_mentioned_for_reload_p (dest, goal))
7003 return 0;
7004 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7005 return 0;
7007 else if (goal_mem && MEM_P (dest)
7008 && ! push_operand (dest, GET_MODE (dest)))
7009 return 0;
7010 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7011 && reg_equiv_memory_loc (regno) != 0)
7012 return 0;
7013 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
7014 return 0;
7016 else if (GET_CODE (pat) == PARALLEL)
7018 int i;
7019 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7021 rtx v1 = XVECEXP (pat, 0, i);
7022 if (GET_CODE (v1) == COND_EXEC)
7023 v1 = COND_EXEC_CODE (v1);
7024 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7026 rtx dest = SET_DEST (v1);
7027 while (GET_CODE (dest) == SUBREG
7028 || GET_CODE (dest) == ZERO_EXTRACT
7029 || GET_CODE (dest) == STRICT_LOW_PART)
7030 dest = XEXP (dest, 0);
7031 if (REG_P (dest))
7033 int xregno = REGNO (dest);
7034 int xnregs;
7035 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7036 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7037 else
7038 xnregs = 1;
7039 if (xregno < regno + nregs
7040 && xregno + xnregs > regno)
7041 return 0;
7042 if (xregno < valueno + valuenregs
7043 && xregno + xnregs > valueno)
7044 return 0;
7045 if (goal_mem_addr_varies
7046 && reg_overlap_mentioned_for_reload_p (dest,
7047 goal))
7048 return 0;
7049 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7050 return 0;
7052 else if (goal_mem && MEM_P (dest)
7053 && ! push_operand (dest, GET_MODE (dest)))
7054 return 0;
7055 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7056 && reg_equiv_memory_loc (regno) != 0)
7057 return 0;
7058 else if (need_stable_sp
7059 && push_operand (dest, GET_MODE (dest)))
7060 return 0;
7065 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7067 rtx link;
7069 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7070 link = XEXP (link, 1))
7072 pat = XEXP (link, 0);
7073 if (GET_CODE (pat) == CLOBBER)
7075 rtx dest = SET_DEST (pat);
7077 if (REG_P (dest))
7079 int xregno = REGNO (dest);
7080 int xnregs
7081 = hard_regno_nregs[xregno][GET_MODE (dest)];
7083 if (xregno < regno + nregs
7084 && xregno + xnregs > regno)
7085 return 0;
7086 else if (xregno < valueno + valuenregs
7087 && xregno + xnregs > valueno)
7088 return 0;
7089 else if (goal_mem_addr_varies
7090 && reg_overlap_mentioned_for_reload_p (dest,
7091 goal))
7092 return 0;
7095 else if (goal_mem && MEM_P (dest)
7096 && ! push_operand (dest, GET_MODE (dest)))
7097 return 0;
7098 else if (need_stable_sp
7099 && push_operand (dest, GET_MODE (dest)))
7100 return 0;
7105 #ifdef AUTO_INC_DEC
7106 /* If this insn auto-increments or auto-decrements
7107 either regno or valueno, return 0 now.
7108 If GOAL is a memory ref and its address is not constant,
7109 and this insn P increments a register used in GOAL, return 0. */
7111 rtx link;
7113 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7114 if (REG_NOTE_KIND (link) == REG_INC
7115 && REG_P (XEXP (link, 0)))
7117 int incno = REGNO (XEXP (link, 0));
7118 if (incno < regno + nregs && incno >= regno)
7119 return 0;
7120 if (incno < valueno + valuenregs && incno >= valueno)
7121 return 0;
7122 if (goal_mem_addr_varies
7123 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7124 goal))
7125 return 0;
7128 #endif
7133 /* Find a place where INCED appears in an increment or decrement operator
7134 within X, and return the amount INCED is incremented or decremented by.
7135 The value is always positive. */
7137 static int
7138 find_inc_amount (rtx x, rtx inced)
7140 enum rtx_code code = GET_CODE (x);
7141 const char *fmt;
7142 int i;
7144 if (code == MEM)
7146 rtx addr = XEXP (x, 0);
7147 if ((GET_CODE (addr) == PRE_DEC
7148 || GET_CODE (addr) == POST_DEC
7149 || GET_CODE (addr) == PRE_INC
7150 || GET_CODE (addr) == POST_INC)
7151 && XEXP (addr, 0) == inced)
7152 return GET_MODE_SIZE (GET_MODE (x));
7153 else if ((GET_CODE (addr) == PRE_MODIFY
7154 || GET_CODE (addr) == POST_MODIFY)
7155 && GET_CODE (XEXP (addr, 1)) == PLUS
7156 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7157 && XEXP (addr, 0) == inced
7158 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7160 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7161 return i < 0 ? -i : i;
7165 fmt = GET_RTX_FORMAT (code);
7166 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7168 if (fmt[i] == 'e')
7170 int tem = find_inc_amount (XEXP (x, i), inced);
7171 if (tem != 0)
7172 return tem;
7174 if (fmt[i] == 'E')
7176 int j;
7177 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7179 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7180 if (tem != 0)
7181 return tem;
7186 return 0;
7189 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7190 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7192 #ifdef AUTO_INC_DEC
7193 static int
7194 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7195 rtx insn)
7197 rtx link;
7199 gcc_assert (insn);
7201 if (! INSN_P (insn))
7202 return 0;
7204 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7205 if (REG_NOTE_KIND (link) == REG_INC)
7207 unsigned int test = (int) REGNO (XEXP (link, 0));
7208 if (test >= regno && test < endregno)
7209 return 1;
7211 return 0;
7213 #else
7215 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7217 #endif
7219 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7220 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7221 REG_INC. REGNO must refer to a hard register. */
7224 regno_clobbered_p (unsigned int regno, rtx insn, enum machine_mode mode,
7225 int sets)
7227 unsigned int nregs, endregno;
7229 /* regno must be a hard register. */
7230 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7232 nregs = hard_regno_nregs[regno][mode];
7233 endregno = regno + nregs;
7235 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7236 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7237 && REG_P (XEXP (PATTERN (insn), 0)))
7239 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7241 return test >= regno && test < endregno;
7244 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7245 return 1;
7247 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7249 int i = XVECLEN (PATTERN (insn), 0) - 1;
7251 for (; i >= 0; i--)
7253 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7254 if ((GET_CODE (elt) == CLOBBER
7255 || (sets == 1 && GET_CODE (elt) == SET))
7256 && REG_P (XEXP (elt, 0)))
7258 unsigned int test = REGNO (XEXP (elt, 0));
7260 if (test >= regno && test < endregno)
7261 return 1;
7263 if (sets == 2
7264 && reg_inc_found_and_valid_p (regno, endregno, elt))
7265 return 1;
7269 return 0;
7272 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7274 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
7276 int regno;
7278 if (GET_MODE (reloadreg) == mode)
7279 return reloadreg;
7281 regno = REGNO (reloadreg);
7283 if (REG_WORDS_BIG_ENDIAN)
7284 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7285 - (int) hard_regno_nregs[regno][mode];
7287 return gen_rtx_REG (mode, regno);
7290 static const char *const reload_when_needed_name[] =
7292 "RELOAD_FOR_INPUT",
7293 "RELOAD_FOR_OUTPUT",
7294 "RELOAD_FOR_INSN",
7295 "RELOAD_FOR_INPUT_ADDRESS",
7296 "RELOAD_FOR_INPADDR_ADDRESS",
7297 "RELOAD_FOR_OUTPUT_ADDRESS",
7298 "RELOAD_FOR_OUTADDR_ADDRESS",
7299 "RELOAD_FOR_OPERAND_ADDRESS",
7300 "RELOAD_FOR_OPADDR_ADDR",
7301 "RELOAD_OTHER",
7302 "RELOAD_FOR_OTHER_ADDRESS"
7305 /* These functions are used to print the variables set by 'find_reloads' */
7307 DEBUG_FUNCTION void
7308 debug_reload_to_stream (FILE *f)
7310 int r;
7311 const char *prefix;
7313 if (! f)
7314 f = stderr;
7315 for (r = 0; r < n_reloads; r++)
7317 fprintf (f, "Reload %d: ", r);
7319 if (rld[r].in != 0)
7321 fprintf (f, "reload_in (%s) = ",
7322 GET_MODE_NAME (rld[r].inmode));
7323 print_inline_rtx (f, rld[r].in, 24);
7324 fprintf (f, "\n\t");
7327 if (rld[r].out != 0)
7329 fprintf (f, "reload_out (%s) = ",
7330 GET_MODE_NAME (rld[r].outmode));
7331 print_inline_rtx (f, rld[r].out, 24);
7332 fprintf (f, "\n\t");
7335 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7337 fprintf (f, "%s (opnum = %d)",
7338 reload_when_needed_name[(int) rld[r].when_needed],
7339 rld[r].opnum);
7341 if (rld[r].optional)
7342 fprintf (f, ", optional");
7344 if (rld[r].nongroup)
7345 fprintf (f, ", nongroup");
7347 if (rld[r].inc != 0)
7348 fprintf (f, ", inc by %d", rld[r].inc);
7350 if (rld[r].nocombine)
7351 fprintf (f, ", can't combine");
7353 if (rld[r].secondary_p)
7354 fprintf (f, ", secondary_reload_p");
7356 if (rld[r].in_reg != 0)
7358 fprintf (f, "\n\treload_in_reg: ");
7359 print_inline_rtx (f, rld[r].in_reg, 24);
7362 if (rld[r].out_reg != 0)
7364 fprintf (f, "\n\treload_out_reg: ");
7365 print_inline_rtx (f, rld[r].out_reg, 24);
7368 if (rld[r].reg_rtx != 0)
7370 fprintf (f, "\n\treload_reg_rtx: ");
7371 print_inline_rtx (f, rld[r].reg_rtx, 24);
7374 prefix = "\n\t";
7375 if (rld[r].secondary_in_reload != -1)
7377 fprintf (f, "%ssecondary_in_reload = %d",
7378 prefix, rld[r].secondary_in_reload);
7379 prefix = ", ";
7382 if (rld[r].secondary_out_reload != -1)
7383 fprintf (f, "%ssecondary_out_reload = %d\n",
7384 prefix, rld[r].secondary_out_reload);
7386 prefix = "\n\t";
7387 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7389 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7390 insn_data[rld[r].secondary_in_icode].name);
7391 prefix = ", ";
7394 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7395 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7396 insn_data[rld[r].secondary_out_icode].name);
7398 fprintf (f, "\n");
7402 DEBUG_FUNCTION void
7403 debug_reload (void)
7405 debug_reload_to_stream (stderr);