PR c++/33916
[official-gcc.git] / gcc / reload.c
blobd880097a164634d1f107c921cd0e4f4238baf921
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains subroutines used only from the file reload1.c.
23 It knows how to scan one insn for operands and values
24 that need to be copied into registers to make valid code.
25 It also finds other operands and values which are valid
26 but for which equivalent values in registers exist and
27 ought to be used instead.
29 Before processing the first insn of the function, call `init_reload'.
30 init_reload actually has to be called earlier anyway.
32 To scan an insn, call `find_reloads'. This does two things:
33 1. sets up tables describing which values must be reloaded
34 for this insn, and what kind of hard regs they must be reloaded into;
35 2. optionally record the locations where those values appear in
36 the data, so they can be replaced properly later.
37 This is done only if the second arg to `find_reloads' is nonzero.
39 The third arg to `find_reloads' specifies the number of levels
40 of indirect addressing supported by the machine. If it is zero,
41 indirect addressing is not valid. If it is one, (MEM (REG n))
42 is valid even if (REG n) did not get a hard register; if it is two,
43 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
44 hard register, and similarly for higher values.
46 Then you must choose the hard regs to reload those pseudo regs into,
47 and generate appropriate load insns before this insn and perhaps
48 also store insns after this insn. Set up the array `reload_reg_rtx'
49 to contain the REG rtx's for the registers you used. In some
50 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
51 for certain reloads. Then that tells you which register to use,
52 so you do not need to allocate one. But you still do need to add extra
53 instructions to copy the value into and out of that register.
55 Finally you must call `subst_reloads' to substitute the reload reg rtx's
56 into the locations already recorded.
58 NOTE SIDE EFFECTS:
60 find_reloads can alter the operands of the instruction it is called on.
62 1. Two operands of any sort may be interchanged, if they are in a
63 commutative instruction.
64 This happens only if find_reloads thinks the instruction will compile
65 better that way.
67 2. Pseudo-registers that are equivalent to constants are replaced
68 with those constants if they are not in hard registers.
70 1 happens every time find_reloads is called.
71 2 happens only when REPLACE is 1, which is only when
72 actually doing the reloads, not when just counting them.
74 Using a reload register for several reloads in one insn:
76 When an insn has reloads, it is considered as having three parts:
77 the input reloads, the insn itself after reloading, and the output reloads.
78 Reloads of values used in memory addresses are often needed for only one part.
80 When this is so, reload_when_needed records which part needs the reload.
81 Two reloads for different parts of the insn can share the same reload
82 register.
84 When a reload is used for addresses in multiple parts, or when it is
85 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
86 a register with any other reload. */
88 #define REG_OK_STRICT
90 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
91 #undef DEBUG_RELOAD
93 #include "config.h"
94 #include "system.h"
95 #include "coretypes.h"
96 #include "tm.h"
97 #include "rtl.h"
98 #include "tm_p.h"
99 #include "insn-config.h"
100 #include "expr.h"
101 #include "optabs.h"
102 #include "recog.h"
103 #include "reload.h"
104 #include "regs.h"
105 #include "addresses.h"
106 #include "hard-reg-set.h"
107 #include "flags.h"
108 #include "real.h"
109 #include "output.h"
110 #include "function.h"
111 #include "toplev.h"
112 #include "params.h"
113 #include "target.h"
114 #include "df.h"
116 /* True if X is a constant that can be forced into the constant pool. */
117 #define CONST_POOL_OK_P(X) \
118 (CONSTANT_P (X) \
119 && GET_CODE (X) != HIGH \
120 && !targetm.cannot_force_const_mem (X))
122 /* True if C is a non-empty register class that has too few registers
123 to be safely used as a reload target class. */
124 #define SMALL_REGISTER_CLASS_P(C) \
125 (reg_class_size [(C)] == 1 \
126 || (reg_class_size [(C)] >= 1 && CLASS_LIKELY_SPILLED_P (C)))
129 /* All reloads of the current insn are recorded here. See reload.h for
130 comments. */
131 int n_reloads;
132 struct reload rld[MAX_RELOADS];
134 /* All the "earlyclobber" operands of the current insn
135 are recorded here. */
136 int n_earlyclobbers;
137 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
139 int reload_n_operands;
141 /* Replacing reloads.
143 If `replace_reloads' is nonzero, then as each reload is recorded
144 an entry is made for it in the table `replacements'.
145 Then later `subst_reloads' can look through that table and
146 perform all the replacements needed. */
148 /* Nonzero means record the places to replace. */
149 static int replace_reloads;
151 /* Each replacement is recorded with a structure like this. */
152 struct replacement
154 rtx *where; /* Location to store in */
155 rtx *subreg_loc; /* Location of SUBREG if WHERE is inside
156 a SUBREG; 0 otherwise. */
157 int what; /* which reload this is for */
158 enum machine_mode mode; /* mode it must have */
161 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
163 /* Number of replacements currently recorded. */
164 static int n_replacements;
166 /* Used to track what is modified by an operand. */
167 struct decomposition
169 int reg_flag; /* Nonzero if referencing a register. */
170 int safe; /* Nonzero if this can't conflict with anything. */
171 rtx base; /* Base address for MEM. */
172 HOST_WIDE_INT start; /* Starting offset or register number. */
173 HOST_WIDE_INT end; /* Ending offset or register number. */
176 #ifdef SECONDARY_MEMORY_NEEDED
178 /* Save MEMs needed to copy from one class of registers to another. One MEM
179 is used per mode, but normally only one or two modes are ever used.
181 We keep two versions, before and after register elimination. The one
182 after register elimination is record separately for each operand. This
183 is done in case the address is not valid to be sure that we separately
184 reload each. */
186 static rtx secondary_memlocs[NUM_MACHINE_MODES];
187 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
188 static int secondary_memlocs_elim_used = 0;
189 #endif
191 /* The instruction we are doing reloads for;
192 so we can test whether a register dies in it. */
193 static rtx this_insn;
195 /* Nonzero if this instruction is a user-specified asm with operands. */
196 static int this_insn_is_asm;
198 /* If hard_regs_live_known is nonzero,
199 we can tell which hard regs are currently live,
200 at least enough to succeed in choosing dummy reloads. */
201 static int hard_regs_live_known;
203 /* Indexed by hard reg number,
204 element is nonnegative if hard reg has been spilled.
205 This vector is passed to `find_reloads' as an argument
206 and is not changed here. */
207 static short *static_reload_reg_p;
209 /* Set to 1 in subst_reg_equivs if it changes anything. */
210 static int subst_reg_equivs_changed;
212 /* On return from push_reload, holds the reload-number for the OUT
213 operand, which can be different for that from the input operand. */
214 static int output_reloadnum;
216 /* Compare two RTX's. */
217 #define MATCHES(x, y) \
218 (x == y || (x != 0 && (REG_P (x) \
219 ? REG_P (y) && REGNO (x) == REGNO (y) \
220 : rtx_equal_p (x, y) && ! side_effects_p (x))))
222 /* Indicates if two reloads purposes are for similar enough things that we
223 can merge their reloads. */
224 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
225 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
226 || ((when1) == (when2) && (op1) == (op2)) \
227 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
228 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
229 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
230 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
231 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
233 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
234 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
235 ((when1) != (when2) \
236 || ! ((op1) == (op2) \
237 || (when1) == RELOAD_FOR_INPUT \
238 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
239 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
241 /* If we are going to reload an address, compute the reload type to
242 use. */
243 #define ADDR_TYPE(type) \
244 ((type) == RELOAD_FOR_INPUT_ADDRESS \
245 ? RELOAD_FOR_INPADDR_ADDRESS \
246 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
247 ? RELOAD_FOR_OUTADDR_ADDRESS \
248 : (type)))
250 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
251 enum machine_mode, enum reload_type,
252 enum insn_code *, secondary_reload_info *);
253 static enum reg_class find_valid_class (enum machine_mode, enum machine_mode,
254 int, unsigned int);
255 static int reload_inner_reg_of_subreg (rtx, enum machine_mode, int);
256 static void push_replacement (rtx *, int, enum machine_mode);
257 static void dup_replacements (rtx *, rtx *);
258 static void combine_reloads (void);
259 static int find_reusable_reload (rtx *, rtx, enum reg_class,
260 enum reload_type, int, int);
261 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
262 enum machine_mode, enum reg_class, int, int);
263 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
264 static struct decomposition decompose (rtx);
265 static int immune_p (rtx, rtx, struct decomposition);
266 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
267 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int, rtx,
268 int *);
269 static rtx make_memloc (rtx, int);
270 static int maybe_memory_address_p (enum machine_mode, rtx, rtx *);
271 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
272 int, enum reload_type, int, rtx);
273 static rtx subst_reg_equivs (rtx, rtx);
274 static rtx subst_indexed_address (rtx);
275 static void update_auto_inc_notes (rtx, int, int);
276 static int find_reloads_address_1 (enum machine_mode, rtx, int,
277 enum rtx_code, enum rtx_code, rtx *,
278 int, enum reload_type,int, rtx);
279 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
280 enum machine_mode, int,
281 enum reload_type, int);
282 static rtx find_reloads_subreg_address (rtx, int, int, enum reload_type,
283 int, rtx);
284 static void copy_replacements_1 (rtx *, rtx *, int);
285 static int find_inc_amount (rtx, rtx);
286 static int refers_to_mem_for_reload_p (rtx);
287 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
288 rtx, rtx *);
290 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
291 list yet. */
293 static void
294 push_reg_equiv_alt_mem (int regno, rtx mem)
296 rtx it;
298 for (it = reg_equiv_alt_mem_list [regno]; it; it = XEXP (it, 1))
299 if (rtx_equal_p (XEXP (it, 0), mem))
300 return;
302 reg_equiv_alt_mem_list [regno]
303 = alloc_EXPR_LIST (REG_EQUIV, mem,
304 reg_equiv_alt_mem_list [regno]);
307 /* Determine if any secondary reloads are needed for loading (if IN_P is
308 nonzero) or storing (if IN_P is zero) X to or from a reload register of
309 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
310 are needed, push them.
312 Return the reload number of the secondary reload we made, or -1 if
313 we didn't need one. *PICODE is set to the insn_code to use if we do
314 need a secondary reload. */
316 static int
317 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
318 enum reg_class reload_class,
319 enum machine_mode reload_mode, enum reload_type type,
320 enum insn_code *picode, secondary_reload_info *prev_sri)
322 enum reg_class class = NO_REGS;
323 enum reg_class scratch_class;
324 enum machine_mode mode = reload_mode;
325 enum insn_code icode = CODE_FOR_nothing;
326 enum insn_code t_icode = CODE_FOR_nothing;
327 enum reload_type secondary_type;
328 int s_reload, t_reload = -1;
329 const char *scratch_constraint;
330 char letter;
331 secondary_reload_info sri;
333 if (type == RELOAD_FOR_INPUT_ADDRESS
334 || type == RELOAD_FOR_OUTPUT_ADDRESS
335 || type == RELOAD_FOR_INPADDR_ADDRESS
336 || type == RELOAD_FOR_OUTADDR_ADDRESS)
337 secondary_type = type;
338 else
339 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
341 *picode = CODE_FOR_nothing;
343 /* If X is a paradoxical SUBREG, use the inner value to determine both the
344 mode and object being reloaded. */
345 if (GET_CODE (x) == SUBREG
346 && (GET_MODE_SIZE (GET_MODE (x))
347 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
349 x = SUBREG_REG (x);
350 reload_mode = GET_MODE (x);
353 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
354 is still a pseudo-register by now, it *must* have an equivalent MEM
355 but we don't want to assume that), use that equivalent when seeing if
356 a secondary reload is needed since whether or not a reload is needed
357 might be sensitive to the form of the MEM. */
359 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
360 && reg_equiv_mem[REGNO (x)] != 0)
361 x = reg_equiv_mem[REGNO (x)];
363 sri.icode = CODE_FOR_nothing;
364 sri.prev_sri = prev_sri;
365 class = targetm.secondary_reload (in_p, x, reload_class, reload_mode, &sri);
366 icode = sri.icode;
368 /* If we don't need any secondary registers, done. */
369 if (class == NO_REGS && icode == CODE_FOR_nothing)
370 return -1;
372 if (class != NO_REGS)
373 t_reload = push_secondary_reload (in_p, x, opnum, optional, class,
374 reload_mode, type, &t_icode, &sri);
376 /* If we will be using an insn, the secondary reload is for a
377 scratch register. */
379 if (icode != CODE_FOR_nothing)
381 /* If IN_P is nonzero, the reload register will be the output in
382 operand 0. If IN_P is zero, the reload register will be the input
383 in operand 1. Outputs should have an initial "=", which we must
384 skip. */
386 /* ??? It would be useful to be able to handle only two, or more than
387 three, operands, but for now we can only handle the case of having
388 exactly three: output, input and one temp/scratch. */
389 gcc_assert (insn_data[(int) icode].n_operands == 3);
391 /* ??? We currently have no way to represent a reload that needs
392 an icode to reload from an intermediate tertiary reload register.
393 We should probably have a new field in struct reload to tag a
394 chain of scratch operand reloads onto. */
395 gcc_assert (class == NO_REGS);
397 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
398 gcc_assert (*scratch_constraint == '=');
399 scratch_constraint++;
400 if (*scratch_constraint == '&')
401 scratch_constraint++;
402 letter = *scratch_constraint;
403 scratch_class = (letter == 'r' ? GENERAL_REGS
404 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) letter,
405 scratch_constraint));
407 class = scratch_class;
408 mode = insn_data[(int) icode].operand[2].mode;
411 /* This case isn't valid, so fail. Reload is allowed to use the same
412 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
413 in the case of a secondary register, we actually need two different
414 registers for correct code. We fail here to prevent the possibility of
415 silently generating incorrect code later.
417 The convention is that secondary input reloads are valid only if the
418 secondary_class is different from class. If you have such a case, you
419 can not use secondary reloads, you must work around the problem some
420 other way.
422 Allow this when a reload_in/out pattern is being used. I.e. assume
423 that the generated code handles this case. */
425 gcc_assert (!in_p || class != reload_class || icode != CODE_FOR_nothing
426 || t_icode != CODE_FOR_nothing);
428 /* See if we can reuse an existing secondary reload. */
429 for (s_reload = 0; s_reload < n_reloads; s_reload++)
430 if (rld[s_reload].secondary_p
431 && (reg_class_subset_p (class, rld[s_reload].class)
432 || reg_class_subset_p (rld[s_reload].class, class))
433 && ((in_p && rld[s_reload].inmode == mode)
434 || (! in_p && rld[s_reload].outmode == mode))
435 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
436 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
437 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
438 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
439 && (SMALL_REGISTER_CLASS_P (class) || SMALL_REGISTER_CLASSES)
440 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
441 opnum, rld[s_reload].opnum))
443 if (in_p)
444 rld[s_reload].inmode = mode;
445 if (! in_p)
446 rld[s_reload].outmode = mode;
448 if (reg_class_subset_p (class, rld[s_reload].class))
449 rld[s_reload].class = class;
451 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
452 rld[s_reload].optional &= optional;
453 rld[s_reload].secondary_p = 1;
454 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
455 opnum, rld[s_reload].opnum))
456 rld[s_reload].when_needed = RELOAD_OTHER;
459 if (s_reload == n_reloads)
461 #ifdef SECONDARY_MEMORY_NEEDED
462 /* If we need a memory location to copy between the two reload regs,
463 set it up now. Note that we do the input case before making
464 the reload and the output case after. This is due to the
465 way reloads are output. */
467 if (in_p && icode == CODE_FOR_nothing
468 && SECONDARY_MEMORY_NEEDED (class, reload_class, mode))
470 get_secondary_mem (x, reload_mode, opnum, type);
472 /* We may have just added new reloads. Make sure we add
473 the new reload at the end. */
474 s_reload = n_reloads;
476 #endif
478 /* We need to make a new secondary reload for this register class. */
479 rld[s_reload].in = rld[s_reload].out = 0;
480 rld[s_reload].class = class;
482 rld[s_reload].inmode = in_p ? mode : VOIDmode;
483 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
484 rld[s_reload].reg_rtx = 0;
485 rld[s_reload].optional = optional;
486 rld[s_reload].inc = 0;
487 /* Maybe we could combine these, but it seems too tricky. */
488 rld[s_reload].nocombine = 1;
489 rld[s_reload].in_reg = 0;
490 rld[s_reload].out_reg = 0;
491 rld[s_reload].opnum = opnum;
492 rld[s_reload].when_needed = secondary_type;
493 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
494 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
495 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
496 rld[s_reload].secondary_out_icode
497 = ! in_p ? t_icode : CODE_FOR_nothing;
498 rld[s_reload].secondary_p = 1;
500 n_reloads++;
502 #ifdef SECONDARY_MEMORY_NEEDED
503 if (! in_p && icode == CODE_FOR_nothing
504 && SECONDARY_MEMORY_NEEDED (reload_class, class, mode))
505 get_secondary_mem (x, mode, opnum, type);
506 #endif
509 *picode = icode;
510 return s_reload;
513 /* If a secondary reload is needed, return its class. If both an intermediate
514 register and a scratch register is needed, we return the class of the
515 intermediate register. */
516 enum reg_class
517 secondary_reload_class (bool in_p, enum reg_class class,
518 enum machine_mode mode, rtx x)
520 enum insn_code icode;
521 secondary_reload_info sri;
523 sri.icode = CODE_FOR_nothing;
524 sri.prev_sri = NULL;
525 class = targetm.secondary_reload (in_p, x, class, mode, &sri);
526 icode = sri.icode;
528 /* If there are no secondary reloads at all, we return NO_REGS.
529 If an intermediate register is needed, we return its class. */
530 if (icode == CODE_FOR_nothing || class != NO_REGS)
531 return class;
533 /* No intermediate register is needed, but we have a special reload
534 pattern, which we assume for now needs a scratch register. */
535 return scratch_reload_class (icode);
538 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
539 three operands, verify that operand 2 is an output operand, and return
540 its register class.
541 ??? We'd like to be able to handle any pattern with at least 2 operands,
542 for zero or more scratch registers, but that needs more infrastructure. */
543 enum reg_class
544 scratch_reload_class (enum insn_code icode)
546 const char *scratch_constraint;
547 char scratch_letter;
548 enum reg_class class;
550 gcc_assert (insn_data[(int) icode].n_operands == 3);
551 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
552 gcc_assert (*scratch_constraint == '=');
553 scratch_constraint++;
554 if (*scratch_constraint == '&')
555 scratch_constraint++;
556 scratch_letter = *scratch_constraint;
557 if (scratch_letter == 'r')
558 return GENERAL_REGS;
559 class = REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
560 scratch_constraint);
561 gcc_assert (class != NO_REGS);
562 return class;
565 #ifdef SECONDARY_MEMORY_NEEDED
567 /* Return a memory location that will be used to copy X in mode MODE.
568 If we haven't already made a location for this mode in this insn,
569 call find_reloads_address on the location being returned. */
572 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
573 int opnum, enum reload_type type)
575 rtx loc;
576 int mem_valid;
578 /* By default, if MODE is narrower than a word, widen it to a word.
579 This is required because most machines that require these memory
580 locations do not support short load and stores from all registers
581 (e.g., FP registers). */
583 #ifdef SECONDARY_MEMORY_NEEDED_MODE
584 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
585 #else
586 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
587 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
588 #endif
590 /* If we already have made a MEM for this operand in MODE, return it. */
591 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
592 return secondary_memlocs_elim[(int) mode][opnum];
594 /* If this is the first time we've tried to get a MEM for this mode,
595 allocate a new one. `something_changed' in reload will get set
596 by noticing that the frame size has changed. */
598 if (secondary_memlocs[(int) mode] == 0)
600 #ifdef SECONDARY_MEMORY_NEEDED_RTX
601 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
602 #else
603 secondary_memlocs[(int) mode]
604 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
605 #endif
608 /* Get a version of the address doing any eliminations needed. If that
609 didn't give us a new MEM, make a new one if it isn't valid. */
611 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
612 mem_valid = strict_memory_address_p (mode, XEXP (loc, 0));
614 if (! mem_valid && loc == secondary_memlocs[(int) mode])
615 loc = copy_rtx (loc);
617 /* The only time the call below will do anything is if the stack
618 offset is too large. In that case IND_LEVELS doesn't matter, so we
619 can just pass a zero. Adjust the type to be the address of the
620 corresponding object. If the address was valid, save the eliminated
621 address. If it wasn't valid, we need to make a reload each time, so
622 don't save it. */
624 if (! mem_valid)
626 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
627 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
628 : RELOAD_OTHER);
630 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
631 opnum, type, 0, 0);
634 secondary_memlocs_elim[(int) mode][opnum] = loc;
635 if (secondary_memlocs_elim_used <= (int)mode)
636 secondary_memlocs_elim_used = (int)mode + 1;
637 return loc;
640 /* Clear any secondary memory locations we've made. */
642 void
643 clear_secondary_mem (void)
645 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
647 #endif /* SECONDARY_MEMORY_NEEDED */
650 /* Find the largest class which has at least one register valid in
651 mode INNER, and which for every such register, that register number
652 plus N is also valid in OUTER (if in range) and is cheap to move
653 into REGNO. Such a class must exist. */
655 static enum reg_class
656 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
657 enum machine_mode inner ATTRIBUTE_UNUSED, int n,
658 unsigned int dest_regno ATTRIBUTE_UNUSED)
660 int best_cost = -1;
661 int class;
662 int regno;
663 enum reg_class best_class = NO_REGS;
664 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
665 unsigned int best_size = 0;
666 int cost;
668 for (class = 1; class < N_REG_CLASSES; class++)
670 int bad = 0;
671 int good = 0;
672 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
673 if (TEST_HARD_REG_BIT (reg_class_contents[class], regno))
675 if (HARD_REGNO_MODE_OK (regno, inner))
677 good = 1;
678 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno + n)
679 || ! HARD_REGNO_MODE_OK (regno + n, outer))
680 bad = 1;
684 if (bad || !good)
685 continue;
686 cost = REGISTER_MOVE_COST (outer, class, dest_class);
688 if ((reg_class_size[class] > best_size
689 && (best_cost < 0 || best_cost >= cost))
690 || best_cost > cost)
692 best_class = class;
693 best_size = reg_class_size[class];
694 best_cost = REGISTER_MOVE_COST (outer, class, dest_class);
698 gcc_assert (best_size != 0);
700 return best_class;
703 /* Return the number of a previously made reload that can be combined with
704 a new one, or n_reloads if none of the existing reloads can be used.
705 OUT, CLASS, TYPE and OPNUM are the same arguments as passed to
706 push_reload, they determine the kind of the new reload that we try to
707 combine. P_IN points to the corresponding value of IN, which can be
708 modified by this function.
709 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
711 static int
712 find_reusable_reload (rtx *p_in, rtx out, enum reg_class class,
713 enum reload_type type, int opnum, int dont_share)
715 rtx in = *p_in;
716 int i;
717 /* We can't merge two reloads if the output of either one is
718 earlyclobbered. */
720 if (earlyclobber_operand_p (out))
721 return n_reloads;
723 /* We can use an existing reload if the class is right
724 and at least one of IN and OUT is a match
725 and the other is at worst neutral.
726 (A zero compared against anything is neutral.)
728 If SMALL_REGISTER_CLASSES, don't use existing reloads unless they are
729 for the same thing since that can cause us to need more reload registers
730 than we otherwise would. */
732 for (i = 0; i < n_reloads; i++)
733 if ((reg_class_subset_p (class, rld[i].class)
734 || reg_class_subset_p (rld[i].class, class))
735 /* If the existing reload has a register, it must fit our class. */
736 && (rld[i].reg_rtx == 0
737 || TEST_HARD_REG_BIT (reg_class_contents[(int) class],
738 true_regnum (rld[i].reg_rtx)))
739 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
740 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
741 || (out != 0 && MATCHES (rld[i].out, out)
742 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
743 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
744 && (SMALL_REGISTER_CLASS_P (class) || SMALL_REGISTER_CLASSES)
745 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
746 return i;
748 /* Reloading a plain reg for input can match a reload to postincrement
749 that reg, since the postincrement's value is the right value.
750 Likewise, it can match a preincrement reload, since we regard
751 the preincrementation as happening before any ref in this insn
752 to that register. */
753 for (i = 0; i < n_reloads; i++)
754 if ((reg_class_subset_p (class, rld[i].class)
755 || reg_class_subset_p (rld[i].class, class))
756 /* If the existing reload has a register, it must fit our
757 class. */
758 && (rld[i].reg_rtx == 0
759 || TEST_HARD_REG_BIT (reg_class_contents[(int) class],
760 true_regnum (rld[i].reg_rtx)))
761 && out == 0 && rld[i].out == 0 && rld[i].in != 0
762 && ((REG_P (in)
763 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
764 && MATCHES (XEXP (rld[i].in, 0), in))
765 || (REG_P (rld[i].in)
766 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
767 && MATCHES (XEXP (in, 0), rld[i].in)))
768 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
769 && (SMALL_REGISTER_CLASS_P (class) || SMALL_REGISTER_CLASSES)
770 && MERGABLE_RELOADS (type, rld[i].when_needed,
771 opnum, rld[i].opnum))
773 /* Make sure reload_in ultimately has the increment,
774 not the plain register. */
775 if (REG_P (in))
776 *p_in = rld[i].in;
777 return i;
779 return n_reloads;
782 /* Return nonzero if X is a SUBREG which will require reloading of its
783 SUBREG_REG expression. */
785 static int
786 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, int output)
788 rtx inner;
790 /* Only SUBREGs are problematical. */
791 if (GET_CODE (x) != SUBREG)
792 return 0;
794 inner = SUBREG_REG (x);
796 /* If INNER is a constant or PLUS, then INNER must be reloaded. */
797 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
798 return 1;
800 /* If INNER is not a hard register, then INNER will not need to
801 be reloaded. */
802 if (!REG_P (inner)
803 || REGNO (inner) >= FIRST_PSEUDO_REGISTER)
804 return 0;
806 /* If INNER is not ok for MODE, then INNER will need reloading. */
807 if (! HARD_REGNO_MODE_OK (subreg_regno (x), mode))
808 return 1;
810 /* If the outer part is a word or smaller, INNER larger than a
811 word and the number of regs for INNER is not the same as the
812 number of words in INNER, then INNER will need reloading. */
813 return (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
814 && output
815 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
816 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
817 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
820 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
821 requiring an extra reload register. The caller has already found that
822 IN contains some reference to REGNO, so check that we can produce the
823 new value in a single step. E.g. if we have
824 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
825 instruction that adds one to a register, this should succeed.
826 However, if we have something like
827 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
828 needs to be loaded into a register first, we need a separate reload
829 register.
830 Such PLUS reloads are generated by find_reload_address_part.
831 The out-of-range PLUS expressions are usually introduced in the instruction
832 patterns by register elimination and substituting pseudos without a home
833 by their function-invariant equivalences. */
834 static int
835 can_reload_into (rtx in, int regno, enum machine_mode mode)
837 rtx dst, test_insn;
838 int r = 0;
839 struct recog_data save_recog_data;
841 /* For matching constraints, we often get notional input reloads where
842 we want to use the original register as the reload register. I.e.
843 technically this is a non-optional input-output reload, but IN is
844 already a valid register, and has been chosen as the reload register.
845 Speed this up, since it trivially works. */
846 if (REG_P (in))
847 return 1;
849 /* To test MEMs properly, we'd have to take into account all the reloads
850 that are already scheduled, which can become quite complicated.
851 And since we've already handled address reloads for this MEM, it
852 should always succeed anyway. */
853 if (MEM_P (in))
854 return 1;
856 /* If we can make a simple SET insn that does the job, everything should
857 be fine. */
858 dst = gen_rtx_REG (mode, regno);
859 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
860 save_recog_data = recog_data;
861 if (recog_memoized (test_insn) >= 0)
863 extract_insn (test_insn);
864 r = constrain_operands (1);
866 recog_data = save_recog_data;
867 return r;
870 /* Record one reload that needs to be performed.
871 IN is an rtx saying where the data are to be found before this instruction.
872 OUT says where they must be stored after the instruction.
873 (IN is zero for data not read, and OUT is zero for data not written.)
874 INLOC and OUTLOC point to the places in the instructions where
875 IN and OUT were found.
876 If IN and OUT are both nonzero, it means the same register must be used
877 to reload both IN and OUT.
879 CLASS is a register class required for the reloaded data.
880 INMODE is the machine mode that the instruction requires
881 for the reg that replaces IN and OUTMODE is likewise for OUT.
883 If IN is zero, then OUT's location and mode should be passed as
884 INLOC and INMODE.
886 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
888 OPTIONAL nonzero means this reload does not need to be performed:
889 it can be discarded if that is more convenient.
891 OPNUM and TYPE say what the purpose of this reload is.
893 The return value is the reload-number for this reload.
895 If both IN and OUT are nonzero, in some rare cases we might
896 want to make two separate reloads. (Actually we never do this now.)
897 Therefore, the reload-number for OUT is stored in
898 output_reloadnum when we return; the return value applies to IN.
899 Usually (presently always), when IN and OUT are nonzero,
900 the two reload-numbers are equal, but the caller should be careful to
901 distinguish them. */
904 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
905 enum reg_class class, enum machine_mode inmode,
906 enum machine_mode outmode, int strict_low, int optional,
907 int opnum, enum reload_type type)
909 int i;
910 int dont_share = 0;
911 int dont_remove_subreg = 0;
912 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
913 int secondary_in_reload = -1, secondary_out_reload = -1;
914 enum insn_code secondary_in_icode = CODE_FOR_nothing;
915 enum insn_code secondary_out_icode = CODE_FOR_nothing;
917 /* INMODE and/or OUTMODE could be VOIDmode if no mode
918 has been specified for the operand. In that case,
919 use the operand's mode as the mode to reload. */
920 if (inmode == VOIDmode && in != 0)
921 inmode = GET_MODE (in);
922 if (outmode == VOIDmode && out != 0)
923 outmode = GET_MODE (out);
925 /* If find_reloads and friends until now missed to replace a pseudo
926 with a constant of reg_equiv_constant something went wrong
927 beforehand.
928 Note that it can't simply be done here if we missed it earlier
929 since the constant might need to be pushed into the literal pool
930 and the resulting memref would probably need further
931 reloading. */
932 if (in != 0 && REG_P (in))
934 int regno = REGNO (in);
936 gcc_assert (regno < FIRST_PSEUDO_REGISTER
937 || reg_renumber[regno] >= 0
938 || reg_equiv_constant[regno] == NULL_RTX);
941 /* reg_equiv_constant only contains constants which are obviously
942 not appropriate as destination. So if we would need to replace
943 the destination pseudo with a constant we are in real
944 trouble. */
945 if (out != 0 && REG_P (out))
947 int regno = REGNO (out);
949 gcc_assert (regno < FIRST_PSEUDO_REGISTER
950 || reg_renumber[regno] >= 0
951 || reg_equiv_constant[regno] == NULL_RTX);
954 /* If we have a read-write operand with an address side-effect,
955 change either IN or OUT so the side-effect happens only once. */
956 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
957 switch (GET_CODE (XEXP (in, 0)))
959 case POST_INC: case POST_DEC: case POST_MODIFY:
960 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
961 break;
963 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
964 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
965 break;
967 default:
968 break;
971 /* If we are reloading a (SUBREG constant ...), really reload just the
972 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
973 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
974 a pseudo and hence will become a MEM) with M1 wider than M2 and the
975 register is a pseudo, also reload the inside expression.
976 For machines that extend byte loads, do this for any SUBREG of a pseudo
977 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
978 M2 is an integral mode that gets extended when loaded.
979 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
980 either M1 is not valid for R or M2 is wider than a word but we only
981 need one word to store an M2-sized quantity in R.
982 (However, if OUT is nonzero, we need to reload the reg *and*
983 the subreg, so do nothing here, and let following statement handle it.)
985 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
986 we can't handle it here because CONST_INT does not indicate a mode.
988 Similarly, we must reload the inside expression if we have a
989 STRICT_LOW_PART (presumably, in == out in the cas).
991 Also reload the inner expression if it does not require a secondary
992 reload but the SUBREG does.
994 Finally, reload the inner expression if it is a register that is in
995 the class whose registers cannot be referenced in a different size
996 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
997 cannot reload just the inside since we might end up with the wrong
998 register class. But if it is inside a STRICT_LOW_PART, we have
999 no choice, so we hope we do get the right register class there. */
1001 if (in != 0 && GET_CODE (in) == SUBREG
1002 && (subreg_lowpart_p (in) || strict_low)
1003 #ifdef CANNOT_CHANGE_MODE_CLASS
1004 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, class)
1005 #endif
1006 && (CONSTANT_P (SUBREG_REG (in))
1007 || GET_CODE (SUBREG_REG (in)) == PLUS
1008 || strict_low
1009 || (((REG_P (SUBREG_REG (in))
1010 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1011 || MEM_P (SUBREG_REG (in)))
1012 && ((GET_MODE_SIZE (inmode)
1013 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1014 #ifdef LOAD_EXTEND_OP
1015 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1016 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1017 <= UNITS_PER_WORD)
1018 && (GET_MODE_SIZE (inmode)
1019 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1020 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1021 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1022 #endif
1023 #ifdef WORD_REGISTER_OPERATIONS
1024 || ((GET_MODE_SIZE (inmode)
1025 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1026 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1027 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1028 / UNITS_PER_WORD)))
1029 #endif
1031 || (REG_P (SUBREG_REG (in))
1032 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1033 /* The case where out is nonzero
1034 is handled differently in the following statement. */
1035 && (out == 0 || subreg_lowpart_p (in))
1036 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1037 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1038 > UNITS_PER_WORD)
1039 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1040 / UNITS_PER_WORD)
1041 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1042 [GET_MODE (SUBREG_REG (in))]))
1043 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1044 || (secondary_reload_class (1, class, inmode, in) != NO_REGS
1045 && (secondary_reload_class (1, class, GET_MODE (SUBREG_REG (in)),
1046 SUBREG_REG (in))
1047 == NO_REGS))
1048 #ifdef CANNOT_CHANGE_MODE_CLASS
1049 || (REG_P (SUBREG_REG (in))
1050 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1051 && REG_CANNOT_CHANGE_MODE_P
1052 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1053 #endif
1056 in_subreg_loc = inloc;
1057 inloc = &SUBREG_REG (in);
1058 in = *inloc;
1059 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1060 if (MEM_P (in))
1061 /* This is supposed to happen only for paradoxical subregs made by
1062 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1063 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1064 #endif
1065 inmode = GET_MODE (in);
1068 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1069 either M1 is not valid for R or M2 is wider than a word but we only
1070 need one word to store an M2-sized quantity in R.
1072 However, we must reload the inner reg *as well as* the subreg in
1073 that case. */
1075 /* Similar issue for (SUBREG constant ...) if it was not handled by the
1076 code above. This can happen if SUBREG_BYTE != 0. */
1078 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, 0))
1080 enum reg_class in_class = class;
1082 if (REG_P (SUBREG_REG (in)))
1083 in_class
1084 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1085 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1086 GET_MODE (SUBREG_REG (in)),
1087 SUBREG_BYTE (in),
1088 GET_MODE (in)),
1089 REGNO (SUBREG_REG (in)));
1091 /* This relies on the fact that emit_reload_insns outputs the
1092 instructions for input reloads of type RELOAD_OTHER in the same
1093 order as the reloads. Thus if the outer reload is also of type
1094 RELOAD_OTHER, we are guaranteed that this inner reload will be
1095 output before the outer reload. */
1096 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1097 in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1098 dont_remove_subreg = 1;
1101 /* Similarly for paradoxical and problematical SUBREGs on the output.
1102 Note that there is no reason we need worry about the previous value
1103 of SUBREG_REG (out); even if wider than out,
1104 storing in a subreg is entitled to clobber it all
1105 (except in the case of STRICT_LOW_PART,
1106 and in that case the constraint should label it input-output.) */
1107 if (out != 0 && GET_CODE (out) == SUBREG
1108 && (subreg_lowpart_p (out) || strict_low)
1109 #ifdef CANNOT_CHANGE_MODE_CLASS
1110 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, class)
1111 #endif
1112 && (CONSTANT_P (SUBREG_REG (out))
1113 || strict_low
1114 || (((REG_P (SUBREG_REG (out))
1115 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1116 || MEM_P (SUBREG_REG (out)))
1117 && ((GET_MODE_SIZE (outmode)
1118 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
1119 #ifdef WORD_REGISTER_OPERATIONS
1120 || ((GET_MODE_SIZE (outmode)
1121 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
1122 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1123 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1124 / UNITS_PER_WORD)))
1125 #endif
1127 || (REG_P (SUBREG_REG (out))
1128 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1129 && ((GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1130 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1131 > UNITS_PER_WORD)
1132 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1133 / UNITS_PER_WORD)
1134 != (int) hard_regno_nregs[REGNO (SUBREG_REG (out))]
1135 [GET_MODE (SUBREG_REG (out))]))
1136 || ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode)))
1137 || (secondary_reload_class (0, class, outmode, out) != NO_REGS
1138 && (secondary_reload_class (0, class, GET_MODE (SUBREG_REG (out)),
1139 SUBREG_REG (out))
1140 == NO_REGS))
1141 #ifdef CANNOT_CHANGE_MODE_CLASS
1142 || (REG_P (SUBREG_REG (out))
1143 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1144 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1145 GET_MODE (SUBREG_REG (out)),
1146 outmode))
1147 #endif
1150 out_subreg_loc = outloc;
1151 outloc = &SUBREG_REG (out);
1152 out = *outloc;
1153 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1154 gcc_assert (!MEM_P (out)
1155 || GET_MODE_SIZE (GET_MODE (out))
1156 <= GET_MODE_SIZE (outmode));
1157 #endif
1158 outmode = GET_MODE (out);
1161 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1162 either M1 is not valid for R or M2 is wider than a word but we only
1163 need one word to store an M2-sized quantity in R.
1165 However, we must reload the inner reg *as well as* the subreg in
1166 that case. In this case, the inner reg is an in-out reload. */
1168 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, 1))
1170 /* This relies on the fact that emit_reload_insns outputs the
1171 instructions for output reloads of type RELOAD_OTHER in reverse
1172 order of the reloads. Thus if the outer reload is also of type
1173 RELOAD_OTHER, we are guaranteed that this inner reload will be
1174 output after the outer reload. */
1175 dont_remove_subreg = 1;
1176 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1177 &SUBREG_REG (out),
1178 find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1179 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1180 GET_MODE (SUBREG_REG (out)),
1181 SUBREG_BYTE (out),
1182 GET_MODE (out)),
1183 REGNO (SUBREG_REG (out))),
1184 VOIDmode, VOIDmode, 0, 0,
1185 opnum, RELOAD_OTHER);
1188 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1189 if (in != 0 && out != 0 && MEM_P (out)
1190 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1191 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1192 dont_share = 1;
1194 /* If IN is a SUBREG of a hard register, make a new REG. This
1195 simplifies some of the cases below. */
1197 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1198 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1199 && ! dont_remove_subreg)
1200 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1202 /* Similarly for OUT. */
1203 if (out != 0 && GET_CODE (out) == SUBREG
1204 && REG_P (SUBREG_REG (out))
1205 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1206 && ! dont_remove_subreg)
1207 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1209 /* Narrow down the class of register wanted if that is
1210 desirable on this machine for efficiency. */
1212 enum reg_class preferred_class = class;
1214 if (in != 0)
1215 preferred_class = PREFERRED_RELOAD_CLASS (in, class);
1217 /* Output reloads may need analogous treatment, different in detail. */
1218 #ifdef PREFERRED_OUTPUT_RELOAD_CLASS
1219 if (out != 0)
1220 preferred_class = PREFERRED_OUTPUT_RELOAD_CLASS (out, preferred_class);
1221 #endif
1223 /* Discard what the target said if we cannot do it. */
1224 if (preferred_class != NO_REGS
1225 || (optional && type == RELOAD_FOR_OUTPUT))
1226 class = preferred_class;
1229 /* Make sure we use a class that can handle the actual pseudo
1230 inside any subreg. For example, on the 386, QImode regs
1231 can appear within SImode subregs. Although GENERAL_REGS
1232 can handle SImode, QImode needs a smaller class. */
1233 #ifdef LIMIT_RELOAD_CLASS
1234 if (in_subreg_loc)
1235 class = LIMIT_RELOAD_CLASS (inmode, class);
1236 else if (in != 0 && GET_CODE (in) == SUBREG)
1237 class = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), class);
1239 if (out_subreg_loc)
1240 class = LIMIT_RELOAD_CLASS (outmode, class);
1241 if (out != 0 && GET_CODE (out) == SUBREG)
1242 class = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), class);
1243 #endif
1245 /* Verify that this class is at least possible for the mode that
1246 is specified. */
1247 if (this_insn_is_asm)
1249 enum machine_mode mode;
1250 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1251 mode = inmode;
1252 else
1253 mode = outmode;
1254 if (mode == VOIDmode)
1256 error_for_asm (this_insn, "cannot reload integer constant "
1257 "operand in %<asm%>");
1258 mode = word_mode;
1259 if (in != 0)
1260 inmode = word_mode;
1261 if (out != 0)
1262 outmode = word_mode;
1264 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1265 if (HARD_REGNO_MODE_OK (i, mode)
1266 && in_hard_reg_set_p (reg_class_contents[(int) class], mode, i))
1267 break;
1268 if (i == FIRST_PSEUDO_REGISTER)
1270 error_for_asm (this_insn, "impossible register constraint "
1271 "in %<asm%>");
1272 /* Avoid further trouble with this insn. */
1273 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1274 /* We used to continue here setting class to ALL_REGS, but it triggers
1275 sanity check on i386 for:
1276 void foo(long double d)
1278 asm("" :: "a" (d));
1280 Returning zero here ought to be safe as we take care in
1281 find_reloads to not process the reloads when instruction was
1282 replaced by USE. */
1284 return 0;
1288 /* Optional output reloads are always OK even if we have no register class,
1289 since the function of these reloads is only to have spill_reg_store etc.
1290 set, so that the storing insn can be deleted later. */
1291 gcc_assert (class != NO_REGS
1292 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1294 i = find_reusable_reload (&in, out, class, type, opnum, dont_share);
1296 if (i == n_reloads)
1298 /* See if we need a secondary reload register to move between CLASS
1299 and IN or CLASS and OUT. Get the icode and push any required reloads
1300 needed for each of them if so. */
1302 if (in != 0)
1303 secondary_in_reload
1304 = push_secondary_reload (1, in, opnum, optional, class, inmode, type,
1305 &secondary_in_icode, NULL);
1306 if (out != 0 && GET_CODE (out) != SCRATCH)
1307 secondary_out_reload
1308 = push_secondary_reload (0, out, opnum, optional, class, outmode,
1309 type, &secondary_out_icode, NULL);
1311 /* We found no existing reload suitable for re-use.
1312 So add an additional reload. */
1314 #ifdef SECONDARY_MEMORY_NEEDED
1315 /* If a memory location is needed for the copy, make one. */
1316 if (in != 0
1317 && (REG_P (in)
1318 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1319 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
1320 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
1321 class, inmode))
1322 get_secondary_mem (in, inmode, opnum, type);
1323 #endif
1325 i = n_reloads;
1326 rld[i].in = in;
1327 rld[i].out = out;
1328 rld[i].class = class;
1329 rld[i].inmode = inmode;
1330 rld[i].outmode = outmode;
1331 rld[i].reg_rtx = 0;
1332 rld[i].optional = optional;
1333 rld[i].inc = 0;
1334 rld[i].nocombine = 0;
1335 rld[i].in_reg = inloc ? *inloc : 0;
1336 rld[i].out_reg = outloc ? *outloc : 0;
1337 rld[i].opnum = opnum;
1338 rld[i].when_needed = type;
1339 rld[i].secondary_in_reload = secondary_in_reload;
1340 rld[i].secondary_out_reload = secondary_out_reload;
1341 rld[i].secondary_in_icode = secondary_in_icode;
1342 rld[i].secondary_out_icode = secondary_out_icode;
1343 rld[i].secondary_p = 0;
1345 n_reloads++;
1347 #ifdef SECONDARY_MEMORY_NEEDED
1348 if (out != 0
1349 && (REG_P (out)
1350 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1351 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1352 && SECONDARY_MEMORY_NEEDED (class,
1353 REGNO_REG_CLASS (reg_or_subregno (out)),
1354 outmode))
1355 get_secondary_mem (out, outmode, opnum, type);
1356 #endif
1358 else
1360 /* We are reusing an existing reload,
1361 but we may have additional information for it.
1362 For example, we may now have both IN and OUT
1363 while the old one may have just one of them. */
1365 /* The modes can be different. If they are, we want to reload in
1366 the larger mode, so that the value is valid for both modes. */
1367 if (inmode != VOIDmode
1368 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1369 rld[i].inmode = inmode;
1370 if (outmode != VOIDmode
1371 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1372 rld[i].outmode = outmode;
1373 if (in != 0)
1375 rtx in_reg = inloc ? *inloc : 0;
1376 /* If we merge reloads for two distinct rtl expressions that
1377 are identical in content, there might be duplicate address
1378 reloads. Remove the extra set now, so that if we later find
1379 that we can inherit this reload, we can get rid of the
1380 address reloads altogether.
1382 Do not do this if both reloads are optional since the result
1383 would be an optional reload which could potentially leave
1384 unresolved address replacements.
1386 It is not sufficient to call transfer_replacements since
1387 choose_reload_regs will remove the replacements for address
1388 reloads of inherited reloads which results in the same
1389 problem. */
1390 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1391 && ! (rld[i].optional && optional))
1393 /* We must keep the address reload with the lower operand
1394 number alive. */
1395 if (opnum > rld[i].opnum)
1397 remove_address_replacements (in);
1398 in = rld[i].in;
1399 in_reg = rld[i].in_reg;
1401 else
1402 remove_address_replacements (rld[i].in);
1404 rld[i].in = in;
1405 rld[i].in_reg = in_reg;
1407 if (out != 0)
1409 rld[i].out = out;
1410 rld[i].out_reg = outloc ? *outloc : 0;
1412 if (reg_class_subset_p (class, rld[i].class))
1413 rld[i].class = class;
1414 rld[i].optional &= optional;
1415 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1416 opnum, rld[i].opnum))
1417 rld[i].when_needed = RELOAD_OTHER;
1418 rld[i].opnum = MIN (rld[i].opnum, opnum);
1421 /* If the ostensible rtx being reloaded differs from the rtx found
1422 in the location to substitute, this reload is not safe to combine
1423 because we cannot reliably tell whether it appears in the insn. */
1425 if (in != 0 && in != *inloc)
1426 rld[i].nocombine = 1;
1428 #if 0
1429 /* This was replaced by changes in find_reloads_address_1 and the new
1430 function inc_for_reload, which go with a new meaning of reload_inc. */
1432 /* If this is an IN/OUT reload in an insn that sets the CC,
1433 it must be for an autoincrement. It doesn't work to store
1434 the incremented value after the insn because that would clobber the CC.
1435 So we must do the increment of the value reloaded from,
1436 increment it, store it back, then decrement again. */
1437 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1439 out = 0;
1440 rld[i].out = 0;
1441 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1442 /* If we did not find a nonzero amount-to-increment-by,
1443 that contradicts the belief that IN is being incremented
1444 in an address in this insn. */
1445 gcc_assert (rld[i].inc != 0);
1447 #endif
1449 /* If we will replace IN and OUT with the reload-reg,
1450 record where they are located so that substitution need
1451 not do a tree walk. */
1453 if (replace_reloads)
1455 if (inloc != 0)
1457 struct replacement *r = &replacements[n_replacements++];
1458 r->what = i;
1459 r->subreg_loc = in_subreg_loc;
1460 r->where = inloc;
1461 r->mode = inmode;
1463 if (outloc != 0 && outloc != inloc)
1465 struct replacement *r = &replacements[n_replacements++];
1466 r->what = i;
1467 r->where = outloc;
1468 r->subreg_loc = out_subreg_loc;
1469 r->mode = outmode;
1473 /* If this reload is just being introduced and it has both
1474 an incoming quantity and an outgoing quantity that are
1475 supposed to be made to match, see if either one of the two
1476 can serve as the place to reload into.
1478 If one of them is acceptable, set rld[i].reg_rtx
1479 to that one. */
1481 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1483 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1484 inmode, outmode,
1485 rld[i].class, i,
1486 earlyclobber_operand_p (out));
1488 /* If the outgoing register already contains the same value
1489 as the incoming one, we can dispense with loading it.
1490 The easiest way to tell the caller that is to give a phony
1491 value for the incoming operand (same as outgoing one). */
1492 if (rld[i].reg_rtx == out
1493 && (REG_P (in) || CONSTANT_P (in))
1494 && 0 != find_equiv_reg (in, this_insn, 0, REGNO (out),
1495 static_reload_reg_p, i, inmode))
1496 rld[i].in = out;
1499 /* If this is an input reload and the operand contains a register that
1500 dies in this insn and is used nowhere else, see if it is the right class
1501 to be used for this reload. Use it if so. (This occurs most commonly
1502 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1503 this if it is also an output reload that mentions the register unless
1504 the output is a SUBREG that clobbers an entire register.
1506 Note that the operand might be one of the spill regs, if it is a
1507 pseudo reg and we are in a block where spilling has not taken place.
1508 But if there is no spilling in this block, that is OK.
1509 An explicitly used hard reg cannot be a spill reg. */
1511 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1513 rtx note;
1514 int regno;
1515 enum machine_mode rel_mode = inmode;
1517 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1518 rel_mode = outmode;
1520 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1521 if (REG_NOTE_KIND (note) == REG_DEAD
1522 && REG_P (XEXP (note, 0))
1523 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1524 && reg_mentioned_p (XEXP (note, 0), in)
1525 /* Check that a former pseudo is valid; see find_dummy_reload. */
1526 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1527 || (!bitmap_bit_p (DF_LIVE_OUT (ENTRY_BLOCK_PTR),
1528 ORIGINAL_REGNO (XEXP (note, 0)))
1529 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1530 && ! refers_to_regno_for_reload_p (regno,
1531 end_hard_regno (rel_mode,
1532 regno),
1533 PATTERN (this_insn), inloc)
1534 /* If this is also an output reload, IN cannot be used as
1535 the reload register if it is set in this insn unless IN
1536 is also OUT. */
1537 && (out == 0 || in == out
1538 || ! hard_reg_set_here_p (regno,
1539 end_hard_regno (rel_mode, regno),
1540 PATTERN (this_insn)))
1541 /* ??? Why is this code so different from the previous?
1542 Is there any simple coherent way to describe the two together?
1543 What's going on here. */
1544 && (in != out
1545 || (GET_CODE (in) == SUBREG
1546 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1547 / UNITS_PER_WORD)
1548 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1549 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1550 /* Make sure the operand fits in the reg that dies. */
1551 && (GET_MODE_SIZE (rel_mode)
1552 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1553 && HARD_REGNO_MODE_OK (regno, inmode)
1554 && HARD_REGNO_MODE_OK (regno, outmode))
1556 unsigned int offs;
1557 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1558 hard_regno_nregs[regno][outmode]);
1560 for (offs = 0; offs < nregs; offs++)
1561 if (fixed_regs[regno + offs]
1562 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
1563 regno + offs))
1564 break;
1566 if (offs == nregs
1567 && (! (refers_to_regno_for_reload_p
1568 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1569 || can_reload_into (in, regno, inmode)))
1571 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1572 break;
1577 if (out)
1578 output_reloadnum = i;
1580 return i;
1583 /* Record an additional place we must replace a value
1584 for which we have already recorded a reload.
1585 RELOADNUM is the value returned by push_reload
1586 when the reload was recorded.
1587 This is used in insn patterns that use match_dup. */
1589 static void
1590 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
1592 if (replace_reloads)
1594 struct replacement *r = &replacements[n_replacements++];
1595 r->what = reloadnum;
1596 r->where = loc;
1597 r->subreg_loc = 0;
1598 r->mode = mode;
1602 /* Duplicate any replacement we have recorded to apply at
1603 location ORIG_LOC to also be performed at DUP_LOC.
1604 This is used in insn patterns that use match_dup. */
1606 static void
1607 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1609 int i, n = n_replacements;
1611 for (i = 0; i < n; i++)
1613 struct replacement *r = &replacements[i];
1614 if (r->where == orig_loc)
1615 push_replacement (dup_loc, r->what, r->mode);
1619 /* Transfer all replacements that used to be in reload FROM to be in
1620 reload TO. */
1622 void
1623 transfer_replacements (int to, int from)
1625 int i;
1627 for (i = 0; i < n_replacements; i++)
1628 if (replacements[i].what == from)
1629 replacements[i].what = to;
1632 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1633 or a subpart of it. If we have any replacements registered for IN_RTX,
1634 cancel the reloads that were supposed to load them.
1635 Return nonzero if we canceled any reloads. */
1637 remove_address_replacements (rtx in_rtx)
1639 int i, j;
1640 char reload_flags[MAX_RELOADS];
1641 int something_changed = 0;
1643 memset (reload_flags, 0, sizeof reload_flags);
1644 for (i = 0, j = 0; i < n_replacements; i++)
1646 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1647 reload_flags[replacements[i].what] |= 1;
1648 else
1650 replacements[j++] = replacements[i];
1651 reload_flags[replacements[i].what] |= 2;
1654 /* Note that the following store must be done before the recursive calls. */
1655 n_replacements = j;
1657 for (i = n_reloads - 1; i >= 0; i--)
1659 if (reload_flags[i] == 1)
1661 deallocate_reload_reg (i);
1662 remove_address_replacements (rld[i].in);
1663 rld[i].in = 0;
1664 something_changed = 1;
1667 return something_changed;
1670 /* If there is only one output reload, and it is not for an earlyclobber
1671 operand, try to combine it with a (logically unrelated) input reload
1672 to reduce the number of reload registers needed.
1674 This is safe if the input reload does not appear in
1675 the value being output-reloaded, because this implies
1676 it is not needed any more once the original insn completes.
1678 If that doesn't work, see we can use any of the registers that
1679 die in this insn as a reload register. We can if it is of the right
1680 class and does not appear in the value being output-reloaded. */
1682 static void
1683 combine_reloads (void)
1685 int i, regno;
1686 int output_reload = -1;
1687 int secondary_out = -1;
1688 rtx note;
1690 /* Find the output reload; return unless there is exactly one
1691 and that one is mandatory. */
1693 for (i = 0; i < n_reloads; i++)
1694 if (rld[i].out != 0)
1696 if (output_reload >= 0)
1697 return;
1698 output_reload = i;
1701 if (output_reload < 0 || rld[output_reload].optional)
1702 return;
1704 /* An input-output reload isn't combinable. */
1706 if (rld[output_reload].in != 0)
1707 return;
1709 /* If this reload is for an earlyclobber operand, we can't do anything. */
1710 if (earlyclobber_operand_p (rld[output_reload].out))
1711 return;
1713 /* If there is a reload for part of the address of this operand, we would
1714 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1715 its life to the point where doing this combine would not lower the
1716 number of spill registers needed. */
1717 for (i = 0; i < n_reloads; i++)
1718 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1719 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1720 && rld[i].opnum == rld[output_reload].opnum)
1721 return;
1723 /* Check each input reload; can we combine it? */
1725 for (i = 0; i < n_reloads; i++)
1726 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1727 /* Life span of this reload must not extend past main insn. */
1728 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1729 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1730 && rld[i].when_needed != RELOAD_OTHER
1731 && (CLASS_MAX_NREGS (rld[i].class, rld[i].inmode)
1732 == CLASS_MAX_NREGS (rld[output_reload].class,
1733 rld[output_reload].outmode))
1734 && rld[i].inc == 0
1735 && rld[i].reg_rtx == 0
1736 #ifdef SECONDARY_MEMORY_NEEDED
1737 /* Don't combine two reloads with different secondary
1738 memory locations. */
1739 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1740 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1741 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1742 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1743 #endif
1744 && (SMALL_REGISTER_CLASSES
1745 ? (rld[i].class == rld[output_reload].class)
1746 : (reg_class_subset_p (rld[i].class,
1747 rld[output_reload].class)
1748 || reg_class_subset_p (rld[output_reload].class,
1749 rld[i].class)))
1750 && (MATCHES (rld[i].in, rld[output_reload].out)
1751 /* Args reversed because the first arg seems to be
1752 the one that we imagine being modified
1753 while the second is the one that might be affected. */
1754 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1755 rld[i].in)
1756 /* However, if the input is a register that appears inside
1757 the output, then we also can't share.
1758 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1759 If the same reload reg is used for both reg 69 and the
1760 result to be stored in memory, then that result
1761 will clobber the address of the memory ref. */
1762 && ! (REG_P (rld[i].in)
1763 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1764 rld[output_reload].out))))
1765 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1766 rld[i].when_needed != RELOAD_FOR_INPUT)
1767 && (reg_class_size[(int) rld[i].class]
1768 || SMALL_REGISTER_CLASSES)
1769 /* We will allow making things slightly worse by combining an
1770 input and an output, but no worse than that. */
1771 && (rld[i].when_needed == RELOAD_FOR_INPUT
1772 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1774 int j;
1776 /* We have found a reload to combine with! */
1777 rld[i].out = rld[output_reload].out;
1778 rld[i].out_reg = rld[output_reload].out_reg;
1779 rld[i].outmode = rld[output_reload].outmode;
1780 /* Mark the old output reload as inoperative. */
1781 rld[output_reload].out = 0;
1782 /* The combined reload is needed for the entire insn. */
1783 rld[i].when_needed = RELOAD_OTHER;
1784 /* If the output reload had a secondary reload, copy it. */
1785 if (rld[output_reload].secondary_out_reload != -1)
1787 rld[i].secondary_out_reload
1788 = rld[output_reload].secondary_out_reload;
1789 rld[i].secondary_out_icode
1790 = rld[output_reload].secondary_out_icode;
1793 #ifdef SECONDARY_MEMORY_NEEDED
1794 /* Copy any secondary MEM. */
1795 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1796 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1797 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1798 #endif
1799 /* If required, minimize the register class. */
1800 if (reg_class_subset_p (rld[output_reload].class,
1801 rld[i].class))
1802 rld[i].class = rld[output_reload].class;
1804 /* Transfer all replacements from the old reload to the combined. */
1805 for (j = 0; j < n_replacements; j++)
1806 if (replacements[j].what == output_reload)
1807 replacements[j].what = i;
1809 return;
1812 /* If this insn has only one operand that is modified or written (assumed
1813 to be the first), it must be the one corresponding to this reload. It
1814 is safe to use anything that dies in this insn for that output provided
1815 that it does not occur in the output (we already know it isn't an
1816 earlyclobber. If this is an asm insn, give up. */
1818 if (INSN_CODE (this_insn) == -1)
1819 return;
1821 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1822 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1823 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1824 return;
1826 /* See if some hard register that dies in this insn and is not used in
1827 the output is the right class. Only works if the register we pick
1828 up can fully hold our output reload. */
1829 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1830 if (REG_NOTE_KIND (note) == REG_DEAD
1831 && REG_P (XEXP (note, 0))
1832 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1833 rld[output_reload].out)
1834 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1835 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1836 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].class],
1837 regno)
1838 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1839 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1840 /* Ensure that a secondary or tertiary reload for this output
1841 won't want this register. */
1842 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1843 || (!(TEST_HARD_REG_BIT
1844 (reg_class_contents[(int) rld[secondary_out].class], regno))
1845 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1846 || !(TEST_HARD_REG_BIT
1847 (reg_class_contents[(int) rld[secondary_out].class],
1848 regno)))))
1849 && !fixed_regs[regno]
1850 /* Check that a former pseudo is valid; see find_dummy_reload. */
1851 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1852 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1853 ORIGINAL_REGNO (XEXP (note, 0)))
1854 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1856 rld[output_reload].reg_rtx
1857 = gen_rtx_REG (rld[output_reload].outmode, regno);
1858 return;
1862 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1863 See if one of IN and OUT is a register that may be used;
1864 this is desirable since a spill-register won't be needed.
1865 If so, return the register rtx that proves acceptable.
1867 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1868 CLASS is the register class required for the reload.
1870 If FOR_REAL is >= 0, it is the number of the reload,
1871 and in some cases when it can be discovered that OUT doesn't need
1872 to be computed, clear out rld[FOR_REAL].out.
1874 If FOR_REAL is -1, this should not be done, because this call
1875 is just to see if a register can be found, not to find and install it.
1877 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1878 puts an additional constraint on being able to use IN for OUT since
1879 IN must not appear elsewhere in the insn (it is assumed that IN itself
1880 is safe from the earlyclobber). */
1882 static rtx
1883 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1884 enum machine_mode inmode, enum machine_mode outmode,
1885 enum reg_class class, int for_real, int earlyclobber)
1887 rtx in = real_in;
1888 rtx out = real_out;
1889 int in_offset = 0;
1890 int out_offset = 0;
1891 rtx value = 0;
1893 /* If operands exceed a word, we can't use either of them
1894 unless they have the same size. */
1895 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1896 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1897 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1898 return 0;
1900 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1901 respectively refers to a hard register. */
1903 /* Find the inside of any subregs. */
1904 while (GET_CODE (out) == SUBREG)
1906 if (REG_P (SUBREG_REG (out))
1907 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1908 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1909 GET_MODE (SUBREG_REG (out)),
1910 SUBREG_BYTE (out),
1911 GET_MODE (out));
1912 out = SUBREG_REG (out);
1914 while (GET_CODE (in) == SUBREG)
1916 if (REG_P (SUBREG_REG (in))
1917 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1918 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1919 GET_MODE (SUBREG_REG (in)),
1920 SUBREG_BYTE (in),
1921 GET_MODE (in));
1922 in = SUBREG_REG (in);
1925 /* Narrow down the reg class, the same way push_reload will;
1926 otherwise we might find a dummy now, but push_reload won't. */
1928 enum reg_class preferred_class = PREFERRED_RELOAD_CLASS (in, class);
1929 if (preferred_class != NO_REGS)
1930 class = preferred_class;
1933 /* See if OUT will do. */
1934 if (REG_P (out)
1935 && REGNO (out) < FIRST_PSEUDO_REGISTER)
1937 unsigned int regno = REGNO (out) + out_offset;
1938 unsigned int nwords = hard_regno_nregs[regno][outmode];
1939 rtx saved_rtx;
1941 /* When we consider whether the insn uses OUT,
1942 ignore references within IN. They don't prevent us
1943 from copying IN into OUT, because those refs would
1944 move into the insn that reloads IN.
1946 However, we only ignore IN in its role as this reload.
1947 If the insn uses IN elsewhere and it contains OUT,
1948 that counts. We can't be sure it's the "same" operand
1949 so it might not go through this reload. */
1950 saved_rtx = *inloc;
1951 *inloc = const0_rtx;
1953 if (regno < FIRST_PSEUDO_REGISTER
1954 && HARD_REGNO_MODE_OK (regno, outmode)
1955 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
1956 PATTERN (this_insn), outloc))
1958 unsigned int i;
1960 for (i = 0; i < nwords; i++)
1961 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
1962 regno + i))
1963 break;
1965 if (i == nwords)
1967 if (REG_P (real_out))
1968 value = real_out;
1969 else
1970 value = gen_rtx_REG (outmode, regno);
1974 *inloc = saved_rtx;
1977 /* Consider using IN if OUT was not acceptable
1978 or if OUT dies in this insn (like the quotient in a divmod insn).
1979 We can't use IN unless it is dies in this insn,
1980 which means we must know accurately which hard regs are live.
1981 Also, the result can't go in IN if IN is used within OUT,
1982 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
1983 if (hard_regs_live_known
1984 && REG_P (in)
1985 && REGNO (in) < FIRST_PSEUDO_REGISTER
1986 && (value == 0
1987 || find_reg_note (this_insn, REG_UNUSED, real_out))
1988 && find_reg_note (this_insn, REG_DEAD, real_in)
1989 && !fixed_regs[REGNO (in)]
1990 && HARD_REGNO_MODE_OK (REGNO (in),
1991 /* The only case where out and real_out might
1992 have different modes is where real_out
1993 is a subreg, and in that case, out
1994 has a real mode. */
1995 (GET_MODE (out) != VOIDmode
1996 ? GET_MODE (out) : outmode))
1997 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
1998 /* However only do this if we can be sure that this input
1999 operand doesn't correspond with an uninitialized pseudo.
2000 global can assign some hardreg to it that is the same as
2001 the one assigned to a different, also live pseudo (as it
2002 can ignore the conflict). We must never introduce writes
2003 to such hardregs, as they would clobber the other live
2004 pseudo. See PR 20973. */
2005 || (!bitmap_bit_p (DF_LIVE_OUT (ENTRY_BLOCK_PTR),
2006 ORIGINAL_REGNO (in))
2007 /* Similarly, only do this if we can be sure that the death
2008 note is still valid. global can assign some hardreg to
2009 the pseudo referenced in the note and simultaneously a
2010 subword of this hardreg to a different, also live pseudo,
2011 because only another subword of the hardreg is actually
2012 used in the insn. This cannot happen if the pseudo has
2013 been assigned exactly one hardreg. See PR 33732. */
2014 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2016 unsigned int regno = REGNO (in) + in_offset;
2017 unsigned int nwords = hard_regno_nregs[regno][inmode];
2019 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2020 && ! hard_reg_set_here_p (regno, regno + nwords,
2021 PATTERN (this_insn))
2022 && (! earlyclobber
2023 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2024 PATTERN (this_insn), inloc)))
2026 unsigned int i;
2028 for (i = 0; i < nwords; i++)
2029 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2030 regno + i))
2031 break;
2033 if (i == nwords)
2035 /* If we were going to use OUT as the reload reg
2036 and changed our mind, it means OUT is a dummy that
2037 dies here. So don't bother copying value to it. */
2038 if (for_real >= 0 && value == real_out)
2039 rld[for_real].out = 0;
2040 if (REG_P (real_in))
2041 value = real_in;
2042 else
2043 value = gen_rtx_REG (inmode, regno);
2048 return value;
2051 /* This page contains subroutines used mainly for determining
2052 whether the IN or an OUT of a reload can serve as the
2053 reload register. */
2055 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2058 earlyclobber_operand_p (rtx x)
2060 int i;
2062 for (i = 0; i < n_earlyclobbers; i++)
2063 if (reload_earlyclobbers[i] == x)
2064 return 1;
2066 return 0;
2069 /* Return 1 if expression X alters a hard reg in the range
2070 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2071 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2072 X should be the body of an instruction. */
2074 static int
2075 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2077 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2079 rtx op0 = SET_DEST (x);
2081 while (GET_CODE (op0) == SUBREG)
2082 op0 = SUBREG_REG (op0);
2083 if (REG_P (op0))
2085 unsigned int r = REGNO (op0);
2087 /* See if this reg overlaps range under consideration. */
2088 if (r < end_regno
2089 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2090 return 1;
2093 else if (GET_CODE (x) == PARALLEL)
2095 int i = XVECLEN (x, 0) - 1;
2097 for (; i >= 0; i--)
2098 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2099 return 1;
2102 return 0;
2105 /* Return 1 if ADDR is a valid memory address for mode MODE,
2106 and check that each pseudo reg has the proper kind of
2107 hard reg. */
2110 strict_memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
2112 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2113 return 0;
2115 win:
2116 return 1;
2119 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2120 if they are the same hard reg, and has special hacks for
2121 autoincrement and autodecrement.
2122 This is specifically intended for find_reloads to use
2123 in determining whether two operands match.
2124 X is the operand whose number is the lower of the two.
2126 The value is 2 if Y contains a pre-increment that matches
2127 a non-incrementing address in X. */
2129 /* ??? To be completely correct, we should arrange to pass
2130 for X the output operand and for Y the input operand.
2131 For now, we assume that the output operand has the lower number
2132 because that is natural in (SET output (... input ...)). */
2135 operands_match_p (rtx x, rtx y)
2137 int i;
2138 RTX_CODE code = GET_CODE (x);
2139 const char *fmt;
2140 int success_2;
2142 if (x == y)
2143 return 1;
2144 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2145 && (REG_P (y) || (GET_CODE (y) == SUBREG
2146 && REG_P (SUBREG_REG (y)))))
2148 int j;
2150 if (code == SUBREG)
2152 i = REGNO (SUBREG_REG (x));
2153 if (i >= FIRST_PSEUDO_REGISTER)
2154 goto slow;
2155 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2156 GET_MODE (SUBREG_REG (x)),
2157 SUBREG_BYTE (x),
2158 GET_MODE (x));
2160 else
2161 i = REGNO (x);
2163 if (GET_CODE (y) == SUBREG)
2165 j = REGNO (SUBREG_REG (y));
2166 if (j >= FIRST_PSEUDO_REGISTER)
2167 goto slow;
2168 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2169 GET_MODE (SUBREG_REG (y)),
2170 SUBREG_BYTE (y),
2171 GET_MODE (y));
2173 else
2174 j = REGNO (y);
2176 /* On a WORDS_BIG_ENDIAN machine, point to the last register of a
2177 multiple hard register group of scalar integer registers, so that
2178 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2179 register. */
2180 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2181 && SCALAR_INT_MODE_P (GET_MODE (x))
2182 && i < FIRST_PSEUDO_REGISTER)
2183 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2184 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2185 && SCALAR_INT_MODE_P (GET_MODE (y))
2186 && j < FIRST_PSEUDO_REGISTER)
2187 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2189 return i == j;
2191 /* If two operands must match, because they are really a single
2192 operand of an assembler insn, then two postincrements are invalid
2193 because the assembler insn would increment only once.
2194 On the other hand, a postincrement matches ordinary indexing
2195 if the postincrement is the output operand. */
2196 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2197 return operands_match_p (XEXP (x, 0), y);
2198 /* Two preincrements are invalid
2199 because the assembler insn would increment only once.
2200 On the other hand, a preincrement matches ordinary indexing
2201 if the preincrement is the input operand.
2202 In this case, return 2, since some callers need to do special
2203 things when this happens. */
2204 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2205 || GET_CODE (y) == PRE_MODIFY)
2206 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2208 slow:
2210 /* Now we have disposed of all the cases in which different rtx codes
2211 can match. */
2212 if (code != GET_CODE (y))
2213 return 0;
2215 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2216 if (GET_MODE (x) != GET_MODE (y))
2217 return 0;
2219 switch (code)
2221 case CONST_INT:
2222 case CONST_DOUBLE:
2223 case CONST_FIXED:
2224 return 0;
2226 case LABEL_REF:
2227 return XEXP (x, 0) == XEXP (y, 0);
2228 case SYMBOL_REF:
2229 return XSTR (x, 0) == XSTR (y, 0);
2231 default:
2232 break;
2235 /* Compare the elements. If any pair of corresponding elements
2236 fail to match, return 0 for the whole things. */
2238 success_2 = 0;
2239 fmt = GET_RTX_FORMAT (code);
2240 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2242 int val, j;
2243 switch (fmt[i])
2245 case 'w':
2246 if (XWINT (x, i) != XWINT (y, i))
2247 return 0;
2248 break;
2250 case 'i':
2251 if (XINT (x, i) != XINT (y, i))
2252 return 0;
2253 break;
2255 case 'e':
2256 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2257 if (val == 0)
2258 return 0;
2259 /* If any subexpression returns 2,
2260 we should return 2 if we are successful. */
2261 if (val == 2)
2262 success_2 = 1;
2263 break;
2265 case '0':
2266 break;
2268 case 'E':
2269 if (XVECLEN (x, i) != XVECLEN (y, i))
2270 return 0;
2271 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2273 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2274 if (val == 0)
2275 return 0;
2276 if (val == 2)
2277 success_2 = 1;
2279 break;
2281 /* It is believed that rtx's at this level will never
2282 contain anything but integers and other rtx's,
2283 except for within LABEL_REFs and SYMBOL_REFs. */
2284 default:
2285 gcc_unreachable ();
2288 return 1 + success_2;
2291 /* Describe the range of registers or memory referenced by X.
2292 If X is a register, set REG_FLAG and put the first register
2293 number into START and the last plus one into END.
2294 If X is a memory reference, put a base address into BASE
2295 and a range of integer offsets into START and END.
2296 If X is pushing on the stack, we can assume it causes no trouble,
2297 so we set the SAFE field. */
2299 static struct decomposition
2300 decompose (rtx x)
2302 struct decomposition val;
2303 int all_const = 0;
2305 memset (&val, 0, sizeof (val));
2307 switch (GET_CODE (x))
2309 case MEM:
2311 rtx base = NULL_RTX, offset = 0;
2312 rtx addr = XEXP (x, 0);
2314 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2315 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2317 val.base = XEXP (addr, 0);
2318 val.start = -GET_MODE_SIZE (GET_MODE (x));
2319 val.end = GET_MODE_SIZE (GET_MODE (x));
2320 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2321 return val;
2324 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2326 if (GET_CODE (XEXP (addr, 1)) == PLUS
2327 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2328 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2330 val.base = XEXP (addr, 0);
2331 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2332 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2333 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2334 return val;
2338 if (GET_CODE (addr) == CONST)
2340 addr = XEXP (addr, 0);
2341 all_const = 1;
2343 if (GET_CODE (addr) == PLUS)
2345 if (CONSTANT_P (XEXP (addr, 0)))
2347 base = XEXP (addr, 1);
2348 offset = XEXP (addr, 0);
2350 else if (CONSTANT_P (XEXP (addr, 1)))
2352 base = XEXP (addr, 0);
2353 offset = XEXP (addr, 1);
2357 if (offset == 0)
2359 base = addr;
2360 offset = const0_rtx;
2362 if (GET_CODE (offset) == CONST)
2363 offset = XEXP (offset, 0);
2364 if (GET_CODE (offset) == PLUS)
2366 if (GET_CODE (XEXP (offset, 0)) == CONST_INT)
2368 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2369 offset = XEXP (offset, 0);
2371 else if (GET_CODE (XEXP (offset, 1)) == CONST_INT)
2373 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2374 offset = XEXP (offset, 1);
2376 else
2378 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2379 offset = const0_rtx;
2382 else if (GET_CODE (offset) != CONST_INT)
2384 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2385 offset = const0_rtx;
2388 if (all_const && GET_CODE (base) == PLUS)
2389 base = gen_rtx_CONST (GET_MODE (base), base);
2391 gcc_assert (GET_CODE (offset) == CONST_INT);
2393 val.start = INTVAL (offset);
2394 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2395 val.base = base;
2397 break;
2399 case REG:
2400 val.reg_flag = 1;
2401 val.start = true_regnum (x);
2402 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2404 /* A pseudo with no hard reg. */
2405 val.start = REGNO (x);
2406 val.end = val.start + 1;
2408 else
2409 /* A hard reg. */
2410 val.end = end_hard_regno (GET_MODE (x), val.start);
2411 break;
2413 case SUBREG:
2414 if (!REG_P (SUBREG_REG (x)))
2415 /* This could be more precise, but it's good enough. */
2416 return decompose (SUBREG_REG (x));
2417 val.reg_flag = 1;
2418 val.start = true_regnum (x);
2419 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2420 return decompose (SUBREG_REG (x));
2421 else
2422 /* A hard reg. */
2423 val.end = val.start + subreg_nregs (x);
2424 break;
2426 case SCRATCH:
2427 /* This hasn't been assigned yet, so it can't conflict yet. */
2428 val.safe = 1;
2429 break;
2431 default:
2432 gcc_assert (CONSTANT_P (x));
2433 val.safe = 1;
2434 break;
2436 return val;
2439 /* Return 1 if altering Y will not modify the value of X.
2440 Y is also described by YDATA, which should be decompose (Y). */
2442 static int
2443 immune_p (rtx x, rtx y, struct decomposition ydata)
2445 struct decomposition xdata;
2447 if (ydata.reg_flag)
2448 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2449 if (ydata.safe)
2450 return 1;
2452 gcc_assert (MEM_P (y));
2453 /* If Y is memory and X is not, Y can't affect X. */
2454 if (!MEM_P (x))
2455 return 1;
2457 xdata = decompose (x);
2459 if (! rtx_equal_p (xdata.base, ydata.base))
2461 /* If bases are distinct symbolic constants, there is no overlap. */
2462 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2463 return 1;
2464 /* Constants and stack slots never overlap. */
2465 if (CONSTANT_P (xdata.base)
2466 && (ydata.base == frame_pointer_rtx
2467 || ydata.base == hard_frame_pointer_rtx
2468 || ydata.base == stack_pointer_rtx))
2469 return 1;
2470 if (CONSTANT_P (ydata.base)
2471 && (xdata.base == frame_pointer_rtx
2472 || xdata.base == hard_frame_pointer_rtx
2473 || xdata.base == stack_pointer_rtx))
2474 return 1;
2475 /* If either base is variable, we don't know anything. */
2476 return 0;
2479 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2482 /* Similar, but calls decompose. */
2485 safe_from_earlyclobber (rtx op, rtx clobber)
2487 struct decomposition early_data;
2489 early_data = decompose (clobber);
2490 return immune_p (op, clobber, early_data);
2493 /* Main entry point of this file: search the body of INSN
2494 for values that need reloading and record them with push_reload.
2495 REPLACE nonzero means record also where the values occur
2496 so that subst_reloads can be used.
2498 IND_LEVELS says how many levels of indirection are supported by this
2499 machine; a value of zero means that a memory reference is not a valid
2500 memory address.
2502 LIVE_KNOWN says we have valid information about which hard
2503 regs are live at each point in the program; this is true when
2504 we are called from global_alloc but false when stupid register
2505 allocation has been done.
2507 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2508 which is nonnegative if the reg has been commandeered for reloading into.
2509 It is copied into STATIC_RELOAD_REG_P and referenced from there
2510 by various subroutines.
2512 Return TRUE if some operands need to be changed, because of swapping
2513 commutative operands, reg_equiv_address substitution, or whatever. */
2516 find_reloads (rtx insn, int replace, int ind_levels, int live_known,
2517 short *reload_reg_p)
2519 int insn_code_number;
2520 int i, j;
2521 int noperands;
2522 /* These start out as the constraints for the insn
2523 and they are chewed up as we consider alternatives. */
2524 char *constraints[MAX_RECOG_OPERANDS];
2525 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2526 a register. */
2527 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2528 char pref_or_nothing[MAX_RECOG_OPERANDS];
2529 /* Nonzero for a MEM operand whose entire address needs a reload.
2530 May be -1 to indicate the entire address may or may not need a reload. */
2531 int address_reloaded[MAX_RECOG_OPERANDS];
2532 /* Nonzero for an address operand that needs to be completely reloaded.
2533 May be -1 to indicate the entire operand may or may not need a reload. */
2534 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2535 /* Value of enum reload_type to use for operand. */
2536 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2537 /* Value of enum reload_type to use within address of operand. */
2538 enum reload_type address_type[MAX_RECOG_OPERANDS];
2539 /* Save the usage of each operand. */
2540 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2541 int no_input_reloads = 0, no_output_reloads = 0;
2542 int n_alternatives;
2543 int this_alternative[MAX_RECOG_OPERANDS];
2544 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2545 char this_alternative_win[MAX_RECOG_OPERANDS];
2546 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2547 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2548 int this_alternative_matches[MAX_RECOG_OPERANDS];
2549 int swapped;
2550 int goal_alternative[MAX_RECOG_OPERANDS];
2551 int this_alternative_number;
2552 int goal_alternative_number = 0;
2553 int operand_reloadnum[MAX_RECOG_OPERANDS];
2554 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2555 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2556 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2557 char goal_alternative_win[MAX_RECOG_OPERANDS];
2558 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2559 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2560 int goal_alternative_swapped;
2561 int best;
2562 int commutative;
2563 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2564 rtx substed_operand[MAX_RECOG_OPERANDS];
2565 rtx body = PATTERN (insn);
2566 rtx set = single_set (insn);
2567 int goal_earlyclobber = 0, this_earlyclobber;
2568 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
2569 int retval = 0;
2571 this_insn = insn;
2572 n_reloads = 0;
2573 n_replacements = 0;
2574 n_earlyclobbers = 0;
2575 replace_reloads = replace;
2576 hard_regs_live_known = live_known;
2577 static_reload_reg_p = reload_reg_p;
2579 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2580 neither are insns that SET cc0. Insns that use CC0 are not allowed
2581 to have any input reloads. */
2582 if (JUMP_P (insn) || CALL_P (insn))
2583 no_output_reloads = 1;
2585 #ifdef HAVE_cc0
2586 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2587 no_input_reloads = 1;
2588 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2589 no_output_reloads = 1;
2590 #endif
2592 #ifdef SECONDARY_MEMORY_NEEDED
2593 /* The eliminated forms of any secondary memory locations are per-insn, so
2594 clear them out here. */
2596 if (secondary_memlocs_elim_used)
2598 memset (secondary_memlocs_elim, 0,
2599 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2600 secondary_memlocs_elim_used = 0;
2602 #endif
2604 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2605 is cheap to move between them. If it is not, there may not be an insn
2606 to do the copy, so we may need a reload. */
2607 if (GET_CODE (body) == SET
2608 && REG_P (SET_DEST (body))
2609 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2610 && REG_P (SET_SRC (body))
2611 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2612 && REGISTER_MOVE_COST (GET_MODE (SET_SRC (body)),
2613 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2614 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2615 return 0;
2617 extract_insn (insn);
2619 noperands = reload_n_operands = recog_data.n_operands;
2620 n_alternatives = recog_data.n_alternatives;
2622 /* Just return "no reloads" if insn has no operands with constraints. */
2623 if (noperands == 0 || n_alternatives == 0)
2624 return 0;
2626 insn_code_number = INSN_CODE (insn);
2627 this_insn_is_asm = insn_code_number < 0;
2629 memcpy (operand_mode, recog_data.operand_mode,
2630 noperands * sizeof (enum machine_mode));
2631 memcpy (constraints, recog_data.constraints, noperands * sizeof (char *));
2633 commutative = -1;
2635 /* If we will need to know, later, whether some pair of operands
2636 are the same, we must compare them now and save the result.
2637 Reloading the base and index registers will clobber them
2638 and afterward they will fail to match. */
2640 for (i = 0; i < noperands; i++)
2642 char *p;
2643 int c;
2645 substed_operand[i] = recog_data.operand[i];
2646 p = constraints[i];
2648 modified[i] = RELOAD_READ;
2650 /* Scan this operand's constraint to see if it is an output operand,
2651 an in-out operand, is commutative, or should match another. */
2653 while ((c = *p))
2655 p += CONSTRAINT_LEN (c, p);
2656 switch (c)
2658 case '=':
2659 modified[i] = RELOAD_WRITE;
2660 break;
2661 case '+':
2662 modified[i] = RELOAD_READ_WRITE;
2663 break;
2664 case '%':
2666 /* The last operand should not be marked commutative. */
2667 gcc_assert (i != noperands - 1);
2669 /* We currently only support one commutative pair of
2670 operands. Some existing asm code currently uses more
2671 than one pair. Previously, that would usually work,
2672 but sometimes it would crash the compiler. We
2673 continue supporting that case as well as we can by
2674 silently ignoring all but the first pair. In the
2675 future we may handle it correctly. */
2676 if (commutative < 0)
2677 commutative = i;
2678 else
2679 gcc_assert (this_insn_is_asm);
2681 break;
2682 /* Use of ISDIGIT is tempting here, but it may get expensive because
2683 of locale support we don't want. */
2684 case '0': case '1': case '2': case '3': case '4':
2685 case '5': case '6': case '7': case '8': case '9':
2687 c = strtoul (p - 1, &p, 10);
2689 operands_match[c][i]
2690 = operands_match_p (recog_data.operand[c],
2691 recog_data.operand[i]);
2693 /* An operand may not match itself. */
2694 gcc_assert (c != i);
2696 /* If C can be commuted with C+1, and C might need to match I,
2697 then C+1 might also need to match I. */
2698 if (commutative >= 0)
2700 if (c == commutative || c == commutative + 1)
2702 int other = c + (c == commutative ? 1 : -1);
2703 operands_match[other][i]
2704 = operands_match_p (recog_data.operand[other],
2705 recog_data.operand[i]);
2707 if (i == commutative || i == commutative + 1)
2709 int other = i + (i == commutative ? 1 : -1);
2710 operands_match[c][other]
2711 = operands_match_p (recog_data.operand[c],
2712 recog_data.operand[other]);
2714 /* Note that C is supposed to be less than I.
2715 No need to consider altering both C and I because in
2716 that case we would alter one into the other. */
2723 /* Examine each operand that is a memory reference or memory address
2724 and reload parts of the addresses into index registers.
2725 Also here any references to pseudo regs that didn't get hard regs
2726 but are equivalent to constants get replaced in the insn itself
2727 with those constants. Nobody will ever see them again.
2729 Finally, set up the preferred classes of each operand. */
2731 for (i = 0; i < noperands; i++)
2733 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2735 address_reloaded[i] = 0;
2736 address_operand_reloaded[i] = 0;
2737 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2738 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2739 : RELOAD_OTHER);
2740 address_type[i]
2741 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2742 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2743 : RELOAD_OTHER);
2745 if (*constraints[i] == 0)
2746 /* Ignore things like match_operator operands. */
2748 else if (constraints[i][0] == 'p'
2749 || EXTRA_ADDRESS_CONSTRAINT (constraints[i][0], constraints[i]))
2751 address_operand_reloaded[i]
2752 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2753 recog_data.operand[i],
2754 recog_data.operand_loc[i],
2755 i, operand_type[i], ind_levels, insn);
2757 /* If we now have a simple operand where we used to have a
2758 PLUS or MULT, re-recognize and try again. */
2759 if ((OBJECT_P (*recog_data.operand_loc[i])
2760 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2761 && (GET_CODE (recog_data.operand[i]) == MULT
2762 || GET_CODE (recog_data.operand[i]) == PLUS))
2764 INSN_CODE (insn) = -1;
2765 retval = find_reloads (insn, replace, ind_levels, live_known,
2766 reload_reg_p);
2767 return retval;
2770 recog_data.operand[i] = *recog_data.operand_loc[i];
2771 substed_operand[i] = recog_data.operand[i];
2773 /* Address operands are reloaded in their existing mode,
2774 no matter what is specified in the machine description. */
2775 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2777 else if (code == MEM)
2779 address_reloaded[i]
2780 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2781 recog_data.operand_loc[i],
2782 XEXP (recog_data.operand[i], 0),
2783 &XEXP (recog_data.operand[i], 0),
2784 i, address_type[i], ind_levels, insn);
2785 recog_data.operand[i] = *recog_data.operand_loc[i];
2786 substed_operand[i] = recog_data.operand[i];
2788 else if (code == SUBREG)
2790 rtx reg = SUBREG_REG (recog_data.operand[i]);
2791 rtx op
2792 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2793 ind_levels,
2794 set != 0
2795 && &SET_DEST (set) == recog_data.operand_loc[i],
2796 insn,
2797 &address_reloaded[i]);
2799 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2800 that didn't get a hard register, emit a USE with a REG_EQUAL
2801 note in front so that we might inherit a previous, possibly
2802 wider reload. */
2804 if (replace
2805 && MEM_P (op)
2806 && REG_P (reg)
2807 && (GET_MODE_SIZE (GET_MODE (reg))
2808 >= GET_MODE_SIZE (GET_MODE (op)))
2809 && reg_equiv_constant[REGNO (reg)] == 0)
2810 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2811 insn),
2812 REG_EQUAL, reg_equiv_memory_loc[REGNO (reg)]);
2814 substed_operand[i] = recog_data.operand[i] = op;
2816 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2817 /* We can get a PLUS as an "operand" as a result of register
2818 elimination. See eliminate_regs and gen_reload. We handle
2819 a unary operator by reloading the operand. */
2820 substed_operand[i] = recog_data.operand[i]
2821 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2822 ind_levels, 0, insn,
2823 &address_reloaded[i]);
2824 else if (code == REG)
2826 /* This is equivalent to calling find_reloads_toplev.
2827 The code is duplicated for speed.
2828 When we find a pseudo always equivalent to a constant,
2829 we replace it by the constant. We must be sure, however,
2830 that we don't try to replace it in the insn in which it
2831 is being set. */
2832 int regno = REGNO (recog_data.operand[i]);
2833 if (reg_equiv_constant[regno] != 0
2834 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2836 /* Record the existing mode so that the check if constants are
2837 allowed will work when operand_mode isn't specified. */
2839 if (operand_mode[i] == VOIDmode)
2840 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2842 substed_operand[i] = recog_data.operand[i]
2843 = reg_equiv_constant[regno];
2845 if (reg_equiv_memory_loc[regno] != 0
2846 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
2847 /* We need not give a valid is_set_dest argument since the case
2848 of a constant equivalence was checked above. */
2849 substed_operand[i] = recog_data.operand[i]
2850 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2851 ind_levels, 0, insn,
2852 &address_reloaded[i]);
2854 /* If the operand is still a register (we didn't replace it with an
2855 equivalent), get the preferred class to reload it into. */
2856 code = GET_CODE (recog_data.operand[i]);
2857 preferred_class[i]
2858 = ((code == REG && REGNO (recog_data.operand[i])
2859 >= FIRST_PSEUDO_REGISTER)
2860 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2861 : NO_REGS);
2862 pref_or_nothing[i]
2863 = (code == REG
2864 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2865 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2868 /* If this is simply a copy from operand 1 to operand 0, merge the
2869 preferred classes for the operands. */
2870 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2871 && recog_data.operand[1] == SET_SRC (set))
2873 preferred_class[0] = preferred_class[1]
2874 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2875 pref_or_nothing[0] |= pref_or_nothing[1];
2876 pref_or_nothing[1] |= pref_or_nothing[0];
2879 /* Now see what we need for pseudo-regs that didn't get hard regs
2880 or got the wrong kind of hard reg. For this, we must consider
2881 all the operands together against the register constraints. */
2883 best = MAX_RECOG_OPERANDS * 2 + 600;
2885 swapped = 0;
2886 goal_alternative_swapped = 0;
2887 try_swapped:
2889 /* The constraints are made of several alternatives.
2890 Each operand's constraint looks like foo,bar,... with commas
2891 separating the alternatives. The first alternatives for all
2892 operands go together, the second alternatives go together, etc.
2894 First loop over alternatives. */
2896 for (this_alternative_number = 0;
2897 this_alternative_number < n_alternatives;
2898 this_alternative_number++)
2900 /* Loop over operands for one constraint alternative. */
2901 /* LOSERS counts those that don't fit this alternative
2902 and would require loading. */
2903 int losers = 0;
2904 /* BAD is set to 1 if it some operand can't fit this alternative
2905 even after reloading. */
2906 int bad = 0;
2907 /* REJECT is a count of how undesirable this alternative says it is
2908 if any reloading is required. If the alternative matches exactly
2909 then REJECT is ignored, but otherwise it gets this much
2910 counted against it in addition to the reloading needed. Each
2911 ? counts three times here since we want the disparaging caused by
2912 a bad register class to only count 1/3 as much. */
2913 int reject = 0;
2915 this_earlyclobber = 0;
2917 for (i = 0; i < noperands; i++)
2919 char *p = constraints[i];
2920 char *end;
2921 int len;
2922 int win = 0;
2923 int did_match = 0;
2924 /* 0 => this operand can be reloaded somehow for this alternative. */
2925 int badop = 1;
2926 /* 0 => this operand can be reloaded if the alternative allows regs. */
2927 int winreg = 0;
2928 int c;
2929 int m;
2930 rtx operand = recog_data.operand[i];
2931 int offset = 0;
2932 /* Nonzero means this is a MEM that must be reloaded into a reg
2933 regardless of what the constraint says. */
2934 int force_reload = 0;
2935 int offmemok = 0;
2936 /* Nonzero if a constant forced into memory would be OK for this
2937 operand. */
2938 int constmemok = 0;
2939 int earlyclobber = 0;
2941 /* If the predicate accepts a unary operator, it means that
2942 we need to reload the operand, but do not do this for
2943 match_operator and friends. */
2944 if (UNARY_P (operand) && *p != 0)
2945 operand = XEXP (operand, 0);
2947 /* If the operand is a SUBREG, extract
2948 the REG or MEM (or maybe even a constant) within.
2949 (Constants can occur as a result of reg_equiv_constant.) */
2951 while (GET_CODE (operand) == SUBREG)
2953 /* Offset only matters when operand is a REG and
2954 it is a hard reg. This is because it is passed
2955 to reg_fits_class_p if it is a REG and all pseudos
2956 return 0 from that function. */
2957 if (REG_P (SUBREG_REG (operand))
2958 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
2960 if (!subreg_offset_representable_p
2961 (REGNO (SUBREG_REG (operand)),
2962 GET_MODE (SUBREG_REG (operand)),
2963 SUBREG_BYTE (operand),
2964 GET_MODE (operand)))
2965 force_reload = 1;
2966 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
2967 GET_MODE (SUBREG_REG (operand)),
2968 SUBREG_BYTE (operand),
2969 GET_MODE (operand));
2971 operand = SUBREG_REG (operand);
2972 /* Force reload if this is a constant or PLUS or if there may
2973 be a problem accessing OPERAND in the outer mode. */
2974 if (CONSTANT_P (operand)
2975 || GET_CODE (operand) == PLUS
2976 /* We must force a reload of paradoxical SUBREGs
2977 of a MEM because the alignment of the inner value
2978 may not be enough to do the outer reference. On
2979 big-endian machines, it may also reference outside
2980 the object.
2982 On machines that extend byte operations and we have a
2983 SUBREG where both the inner and outer modes are no wider
2984 than a word and the inner mode is narrower, is integral,
2985 and gets extended when loaded from memory, combine.c has
2986 made assumptions about the behavior of the machine in such
2987 register access. If the data is, in fact, in memory we
2988 must always load using the size assumed to be in the
2989 register and let the insn do the different-sized
2990 accesses.
2992 This is doubly true if WORD_REGISTER_OPERATIONS. In
2993 this case eliminate_regs has left non-paradoxical
2994 subregs for push_reload to see. Make sure it does
2995 by forcing the reload.
2997 ??? When is it right at this stage to have a subreg
2998 of a mem that is _not_ to be handled specially? IMO
2999 those should have been reduced to just a mem. */
3000 || ((MEM_P (operand)
3001 || (REG_P (operand)
3002 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3003 #ifndef WORD_REGISTER_OPERATIONS
3004 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3005 < BIGGEST_ALIGNMENT)
3006 && (GET_MODE_SIZE (operand_mode[i])
3007 > GET_MODE_SIZE (GET_MODE (operand))))
3008 || BYTES_BIG_ENDIAN
3009 #ifdef LOAD_EXTEND_OP
3010 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3011 && (GET_MODE_SIZE (GET_MODE (operand))
3012 <= UNITS_PER_WORD)
3013 && (GET_MODE_SIZE (operand_mode[i])
3014 > GET_MODE_SIZE (GET_MODE (operand)))
3015 && INTEGRAL_MODE_P (GET_MODE (operand))
3016 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3017 #endif
3019 #endif
3022 force_reload = 1;
3025 this_alternative[i] = (int) NO_REGS;
3026 this_alternative_win[i] = 0;
3027 this_alternative_match_win[i] = 0;
3028 this_alternative_offmemok[i] = 0;
3029 this_alternative_earlyclobber[i] = 0;
3030 this_alternative_matches[i] = -1;
3032 /* An empty constraint or empty alternative
3033 allows anything which matched the pattern. */
3034 if (*p == 0 || *p == ',')
3035 win = 1, badop = 0;
3037 /* Scan this alternative's specs for this operand;
3038 set WIN if the operand fits any letter in this alternative.
3039 Otherwise, clear BADOP if this operand could
3040 fit some letter after reloads,
3041 or set WINREG if this operand could fit after reloads
3042 provided the constraint allows some registers. */
3045 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3047 case '\0':
3048 len = 0;
3049 break;
3050 case ',':
3051 c = '\0';
3052 break;
3054 case '=': case '+': case '*':
3055 break;
3057 case '%':
3058 /* We only support one commutative marker, the first
3059 one. We already set commutative above. */
3060 break;
3062 case '?':
3063 reject += 6;
3064 break;
3066 case '!':
3067 reject = 600;
3068 break;
3070 case '#':
3071 /* Ignore rest of this alternative as far as
3072 reloading is concerned. */
3074 p++;
3075 while (*p && *p != ',');
3076 len = 0;
3077 break;
3079 case '0': case '1': case '2': case '3': case '4':
3080 case '5': case '6': case '7': case '8': case '9':
3081 m = strtoul (p, &end, 10);
3082 p = end;
3083 len = 0;
3085 this_alternative_matches[i] = m;
3086 /* We are supposed to match a previous operand.
3087 If we do, we win if that one did.
3088 If we do not, count both of the operands as losers.
3089 (This is too conservative, since most of the time
3090 only a single reload insn will be needed to make
3091 the two operands win. As a result, this alternative
3092 may be rejected when it is actually desirable.) */
3093 if ((swapped && (m != commutative || i != commutative + 1))
3094 /* If we are matching as if two operands were swapped,
3095 also pretend that operands_match had been computed
3096 with swapped.
3097 But if I is the second of those and C is the first,
3098 don't exchange them, because operands_match is valid
3099 only on one side of its diagonal. */
3100 ? (operands_match
3101 [(m == commutative || m == commutative + 1)
3102 ? 2 * commutative + 1 - m : m]
3103 [(i == commutative || i == commutative + 1)
3104 ? 2 * commutative + 1 - i : i])
3105 : operands_match[m][i])
3107 /* If we are matching a non-offsettable address where an
3108 offsettable address was expected, then we must reject
3109 this combination, because we can't reload it. */
3110 if (this_alternative_offmemok[m]
3111 && MEM_P (recog_data.operand[m])
3112 && this_alternative[m] == (int) NO_REGS
3113 && ! this_alternative_win[m])
3114 bad = 1;
3116 did_match = this_alternative_win[m];
3118 else
3120 /* Operands don't match. */
3121 rtx value;
3122 int loc1, loc2;
3123 /* Retroactively mark the operand we had to match
3124 as a loser, if it wasn't already. */
3125 if (this_alternative_win[m])
3126 losers++;
3127 this_alternative_win[m] = 0;
3128 if (this_alternative[m] == (int) NO_REGS)
3129 bad = 1;
3130 /* But count the pair only once in the total badness of
3131 this alternative, if the pair can be a dummy reload.
3132 The pointers in operand_loc are not swapped; swap
3133 them by hand if necessary. */
3134 if (swapped && i == commutative)
3135 loc1 = commutative + 1;
3136 else if (swapped && i == commutative + 1)
3137 loc1 = commutative;
3138 else
3139 loc1 = i;
3140 if (swapped && m == commutative)
3141 loc2 = commutative + 1;
3142 else if (swapped && m == commutative + 1)
3143 loc2 = commutative;
3144 else
3145 loc2 = m;
3146 value
3147 = find_dummy_reload (recog_data.operand[i],
3148 recog_data.operand[m],
3149 recog_data.operand_loc[loc1],
3150 recog_data.operand_loc[loc2],
3151 operand_mode[i], operand_mode[m],
3152 this_alternative[m], -1,
3153 this_alternative_earlyclobber[m]);
3155 if (value != 0)
3156 losers--;
3158 /* This can be fixed with reloads if the operand
3159 we are supposed to match can be fixed with reloads. */
3160 badop = 0;
3161 this_alternative[i] = this_alternative[m];
3163 /* If we have to reload this operand and some previous
3164 operand also had to match the same thing as this
3165 operand, we don't know how to do that. So reject this
3166 alternative. */
3167 if (! did_match || force_reload)
3168 for (j = 0; j < i; j++)
3169 if (this_alternative_matches[j]
3170 == this_alternative_matches[i])
3171 badop = 1;
3172 break;
3174 case 'p':
3175 /* All necessary reloads for an address_operand
3176 were handled in find_reloads_address. */
3177 this_alternative[i]
3178 = (int) base_reg_class (VOIDmode, ADDRESS, SCRATCH);
3179 win = 1;
3180 badop = 0;
3181 break;
3183 case 'm':
3184 if (force_reload)
3185 break;
3186 if (MEM_P (operand)
3187 || (REG_P (operand)
3188 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3189 && reg_renumber[REGNO (operand)] < 0))
3190 win = 1;
3191 if (CONST_POOL_OK_P (operand))
3192 badop = 0;
3193 constmemok = 1;
3194 break;
3196 case '<':
3197 if (MEM_P (operand)
3198 && ! address_reloaded[i]
3199 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3200 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3201 win = 1;
3202 break;
3204 case '>':
3205 if (MEM_P (operand)
3206 && ! address_reloaded[i]
3207 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3208 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3209 win = 1;
3210 break;
3212 /* Memory operand whose address is not offsettable. */
3213 case 'V':
3214 if (force_reload)
3215 break;
3216 if (MEM_P (operand)
3217 && ! (ind_levels ? offsettable_memref_p (operand)
3218 : offsettable_nonstrict_memref_p (operand))
3219 /* Certain mem addresses will become offsettable
3220 after they themselves are reloaded. This is important;
3221 we don't want our own handling of unoffsettables
3222 to override the handling of reg_equiv_address. */
3223 && !(REG_P (XEXP (operand, 0))
3224 && (ind_levels == 0
3225 || reg_equiv_address[REGNO (XEXP (operand, 0))] != 0)))
3226 win = 1;
3227 break;
3229 /* Memory operand whose address is offsettable. */
3230 case 'o':
3231 if (force_reload)
3232 break;
3233 if ((MEM_P (operand)
3234 /* If IND_LEVELS, find_reloads_address won't reload a
3235 pseudo that didn't get a hard reg, so we have to
3236 reject that case. */
3237 && ((ind_levels ? offsettable_memref_p (operand)
3238 : offsettable_nonstrict_memref_p (operand))
3239 /* A reloaded address is offsettable because it is now
3240 just a simple register indirect. */
3241 || address_reloaded[i] == 1))
3242 || (REG_P (operand)
3243 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3244 && reg_renumber[REGNO (operand)] < 0
3245 /* If reg_equiv_address is nonzero, we will be
3246 loading it into a register; hence it will be
3247 offsettable, but we cannot say that reg_equiv_mem
3248 is offsettable without checking. */
3249 && ((reg_equiv_mem[REGNO (operand)] != 0
3250 && offsettable_memref_p (reg_equiv_mem[REGNO (operand)]))
3251 || (reg_equiv_address[REGNO (operand)] != 0))))
3252 win = 1;
3253 if (CONST_POOL_OK_P (operand)
3254 || MEM_P (operand))
3255 badop = 0;
3256 constmemok = 1;
3257 offmemok = 1;
3258 break;
3260 case '&':
3261 /* Output operand that is stored before the need for the
3262 input operands (and their index registers) is over. */
3263 earlyclobber = 1, this_earlyclobber = 1;
3264 break;
3266 case 'E':
3267 case 'F':
3268 if (GET_CODE (operand) == CONST_DOUBLE
3269 || (GET_CODE (operand) == CONST_VECTOR
3270 && (GET_MODE_CLASS (GET_MODE (operand))
3271 == MODE_VECTOR_FLOAT)))
3272 win = 1;
3273 break;
3275 case 'G':
3276 case 'H':
3277 if (GET_CODE (operand) == CONST_DOUBLE
3278 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (operand, c, p))
3279 win = 1;
3280 break;
3282 case 's':
3283 if (GET_CODE (operand) == CONST_INT
3284 || (GET_CODE (operand) == CONST_DOUBLE
3285 && GET_MODE (operand) == VOIDmode))
3286 break;
3287 case 'i':
3288 if (CONSTANT_P (operand)
3289 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (operand)))
3290 win = 1;
3291 break;
3293 case 'n':
3294 if (GET_CODE (operand) == CONST_INT
3295 || (GET_CODE (operand) == CONST_DOUBLE
3296 && GET_MODE (operand) == VOIDmode))
3297 win = 1;
3298 break;
3300 case 'I':
3301 case 'J':
3302 case 'K':
3303 case 'L':
3304 case 'M':
3305 case 'N':
3306 case 'O':
3307 case 'P':
3308 if (GET_CODE (operand) == CONST_INT
3309 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (operand), c, p))
3310 win = 1;
3311 break;
3313 case 'X':
3314 force_reload = 0;
3315 win = 1;
3316 break;
3318 case 'g':
3319 if (! force_reload
3320 /* A PLUS is never a valid operand, but reload can make
3321 it from a register when eliminating registers. */
3322 && GET_CODE (operand) != PLUS
3323 /* A SCRATCH is not a valid operand. */
3324 && GET_CODE (operand) != SCRATCH
3325 && (! CONSTANT_P (operand)
3326 || ! flag_pic
3327 || LEGITIMATE_PIC_OPERAND_P (operand))
3328 && (GENERAL_REGS == ALL_REGS
3329 || !REG_P (operand)
3330 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3331 && reg_renumber[REGNO (operand)] < 0)))
3332 win = 1;
3333 /* Drop through into 'r' case. */
3335 case 'r':
3336 this_alternative[i]
3337 = (int) reg_class_subunion[this_alternative[i]][(int) GENERAL_REGS];
3338 goto reg;
3340 default:
3341 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS)
3343 #ifdef EXTRA_CONSTRAINT_STR
3344 if (EXTRA_MEMORY_CONSTRAINT (c, p))
3346 if (force_reload)
3347 break;
3348 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3349 win = 1;
3350 /* If the address was already reloaded,
3351 we win as well. */
3352 else if (MEM_P (operand)
3353 && address_reloaded[i] == 1)
3354 win = 1;
3355 /* Likewise if the address will be reloaded because
3356 reg_equiv_address is nonzero. For reg_equiv_mem
3357 we have to check. */
3358 else if (REG_P (operand)
3359 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3360 && reg_renumber[REGNO (operand)] < 0
3361 && ((reg_equiv_mem[REGNO (operand)] != 0
3362 && EXTRA_CONSTRAINT_STR (reg_equiv_mem[REGNO (operand)], c, p))
3363 || (reg_equiv_address[REGNO (operand)] != 0)))
3364 win = 1;
3366 /* If we didn't already win, we can reload
3367 constants via force_const_mem, and other
3368 MEMs by reloading the address like for 'o'. */
3369 if (CONST_POOL_OK_P (operand)
3370 || MEM_P (operand))
3371 badop = 0;
3372 constmemok = 1;
3373 offmemok = 1;
3374 break;
3376 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
3378 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3379 win = 1;
3381 /* If we didn't already win, we can reload
3382 the address into a base register. */
3383 this_alternative[i]
3384 = (int) base_reg_class (VOIDmode, ADDRESS, SCRATCH);
3385 badop = 0;
3386 break;
3389 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3390 win = 1;
3391 #endif
3392 break;
3395 this_alternative[i]
3396 = (int) (reg_class_subunion
3397 [this_alternative[i]]
3398 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)]);
3399 reg:
3400 if (GET_MODE (operand) == BLKmode)
3401 break;
3402 winreg = 1;
3403 if (REG_P (operand)
3404 && reg_fits_class_p (operand, this_alternative[i],
3405 offset, GET_MODE (recog_data.operand[i])))
3406 win = 1;
3407 break;
3409 while ((p += len), c);
3411 constraints[i] = p;
3413 /* If this operand could be handled with a reg,
3414 and some reg is allowed, then this operand can be handled. */
3415 if (winreg && this_alternative[i] != (int) NO_REGS)
3416 badop = 0;
3418 /* Record which operands fit this alternative. */
3419 this_alternative_earlyclobber[i] = earlyclobber;
3420 if (win && ! force_reload)
3421 this_alternative_win[i] = 1;
3422 else if (did_match && ! force_reload)
3423 this_alternative_match_win[i] = 1;
3424 else
3426 int const_to_mem = 0;
3428 this_alternative_offmemok[i] = offmemok;
3429 losers++;
3430 if (badop)
3431 bad = 1;
3432 /* Alternative loses if it has no regs for a reg operand. */
3433 if (REG_P (operand)
3434 && this_alternative[i] == (int) NO_REGS
3435 && this_alternative_matches[i] < 0)
3436 bad = 1;
3438 /* If this is a constant that is reloaded into the desired
3439 class by copying it to memory first, count that as another
3440 reload. This is consistent with other code and is
3441 required to avoid choosing another alternative when
3442 the constant is moved into memory by this function on
3443 an early reload pass. Note that the test here is
3444 precisely the same as in the code below that calls
3445 force_const_mem. */
3446 if (CONST_POOL_OK_P (operand)
3447 && ((PREFERRED_RELOAD_CLASS (operand,
3448 (enum reg_class) this_alternative[i])
3449 == NO_REGS)
3450 || no_input_reloads)
3451 && operand_mode[i] != VOIDmode)
3453 const_to_mem = 1;
3454 if (this_alternative[i] != (int) NO_REGS)
3455 losers++;
3458 /* Alternative loses if it requires a type of reload not
3459 permitted for this insn. We can always reload SCRATCH
3460 and objects with a REG_UNUSED note. */
3461 if (GET_CODE (operand) != SCRATCH
3462 && modified[i] != RELOAD_READ && no_output_reloads
3463 && ! find_reg_note (insn, REG_UNUSED, operand))
3464 bad = 1;
3465 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3466 && ! const_to_mem)
3467 bad = 1;
3469 /* If we can't reload this value at all, reject this
3470 alternative. Note that we could also lose due to
3471 LIMIT_RELOAD_CLASS, but we don't check that
3472 here. */
3474 if (! CONSTANT_P (operand)
3475 && (enum reg_class) this_alternative[i] != NO_REGS)
3477 if (PREFERRED_RELOAD_CLASS
3478 (operand, (enum reg_class) this_alternative[i])
3479 == NO_REGS)
3480 reject = 600;
3482 #ifdef PREFERRED_OUTPUT_RELOAD_CLASS
3483 if (operand_type[i] == RELOAD_FOR_OUTPUT
3484 && PREFERRED_OUTPUT_RELOAD_CLASS
3485 (operand, (enum reg_class) this_alternative[i])
3486 == NO_REGS)
3487 reject = 600;
3488 #endif
3491 /* We prefer to reload pseudos over reloading other things,
3492 since such reloads may be able to be eliminated later.
3493 If we are reloading a SCRATCH, we won't be generating any
3494 insns, just using a register, so it is also preferred.
3495 So bump REJECT in other cases. Don't do this in the
3496 case where we are forcing a constant into memory and
3497 it will then win since we don't want to have a different
3498 alternative match then. */
3499 if (! (REG_P (operand)
3500 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3501 && GET_CODE (operand) != SCRATCH
3502 && ! (const_to_mem && constmemok))
3503 reject += 2;
3505 /* Input reloads can be inherited more often than output
3506 reloads can be removed, so penalize output reloads. */
3507 if (operand_type[i] != RELOAD_FOR_INPUT
3508 && GET_CODE (operand) != SCRATCH)
3509 reject++;
3512 /* If this operand is a pseudo register that didn't get a hard
3513 reg and this alternative accepts some register, see if the
3514 class that we want is a subset of the preferred class for this
3515 register. If not, but it intersects that class, use the
3516 preferred class instead. If it does not intersect the preferred
3517 class, show that usage of this alternative should be discouraged;
3518 it will be discouraged more still if the register is `preferred
3519 or nothing'. We do this because it increases the chance of
3520 reusing our spill register in a later insn and avoiding a pair
3521 of memory stores and loads.
3523 Don't bother with this if this alternative will accept this
3524 operand.
3526 Don't do this for a multiword operand, since it is only a
3527 small win and has the risk of requiring more spill registers,
3528 which could cause a large loss.
3530 Don't do this if the preferred class has only one register
3531 because we might otherwise exhaust the class. */
3533 if (! win && ! did_match
3534 && this_alternative[i] != (int) NO_REGS
3535 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3536 && reg_class_size [(int) preferred_class[i]] > 0
3537 && ! SMALL_REGISTER_CLASS_P (preferred_class[i]))
3539 if (! reg_class_subset_p (this_alternative[i],
3540 preferred_class[i]))
3542 /* Since we don't have a way of forming the intersection,
3543 we just do something special if the preferred class
3544 is a subset of the class we have; that's the most
3545 common case anyway. */
3546 if (reg_class_subset_p (preferred_class[i],
3547 this_alternative[i]))
3548 this_alternative[i] = (int) preferred_class[i];
3549 else
3550 reject += (2 + 2 * pref_or_nothing[i]);
3555 /* Now see if any output operands that are marked "earlyclobber"
3556 in this alternative conflict with any input operands
3557 or any memory addresses. */
3559 for (i = 0; i < noperands; i++)
3560 if (this_alternative_earlyclobber[i]
3561 && (this_alternative_win[i] || this_alternative_match_win[i]))
3563 struct decomposition early_data;
3565 early_data = decompose (recog_data.operand[i]);
3567 gcc_assert (modified[i] != RELOAD_READ);
3569 if (this_alternative[i] == NO_REGS)
3571 this_alternative_earlyclobber[i] = 0;
3572 gcc_assert (this_insn_is_asm);
3573 error_for_asm (this_insn,
3574 "%<&%> constraint used with no register class");
3577 for (j = 0; j < noperands; j++)
3578 /* Is this an input operand or a memory ref? */
3579 if ((MEM_P (recog_data.operand[j])
3580 || modified[j] != RELOAD_WRITE)
3581 && j != i
3582 /* Ignore things like match_operator operands. */
3583 && *recog_data.constraints[j] != 0
3584 /* Don't count an input operand that is constrained to match
3585 the early clobber operand. */
3586 && ! (this_alternative_matches[j] == i
3587 && rtx_equal_p (recog_data.operand[i],
3588 recog_data.operand[j]))
3589 /* Is it altered by storing the earlyclobber operand? */
3590 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3591 early_data))
3593 /* If the output is in a non-empty few-regs class,
3594 it's costly to reload it, so reload the input instead. */
3595 if (SMALL_REGISTER_CLASS_P (this_alternative[i])
3596 && (REG_P (recog_data.operand[j])
3597 || GET_CODE (recog_data.operand[j]) == SUBREG))
3599 losers++;
3600 this_alternative_win[j] = 0;
3601 this_alternative_match_win[j] = 0;
3603 else
3604 break;
3606 /* If an earlyclobber operand conflicts with something,
3607 it must be reloaded, so request this and count the cost. */
3608 if (j != noperands)
3610 losers++;
3611 this_alternative_win[i] = 0;
3612 this_alternative_match_win[j] = 0;
3613 for (j = 0; j < noperands; j++)
3614 if (this_alternative_matches[j] == i
3615 && this_alternative_match_win[j])
3617 this_alternative_win[j] = 0;
3618 this_alternative_match_win[j] = 0;
3619 losers++;
3624 /* If one alternative accepts all the operands, no reload required,
3625 choose that alternative; don't consider the remaining ones. */
3626 if (losers == 0)
3628 /* Unswap these so that they are never swapped at `finish'. */
3629 if (commutative >= 0)
3631 recog_data.operand[commutative] = substed_operand[commutative];
3632 recog_data.operand[commutative + 1]
3633 = substed_operand[commutative + 1];
3635 for (i = 0; i < noperands; i++)
3637 goal_alternative_win[i] = this_alternative_win[i];
3638 goal_alternative_match_win[i] = this_alternative_match_win[i];
3639 goal_alternative[i] = this_alternative[i];
3640 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3641 goal_alternative_matches[i] = this_alternative_matches[i];
3642 goal_alternative_earlyclobber[i]
3643 = this_alternative_earlyclobber[i];
3645 goal_alternative_number = this_alternative_number;
3646 goal_alternative_swapped = swapped;
3647 goal_earlyclobber = this_earlyclobber;
3648 goto finish;
3651 /* REJECT, set by the ! and ? constraint characters and when a register
3652 would be reloaded into a non-preferred class, discourages the use of
3653 this alternative for a reload goal. REJECT is incremented by six
3654 for each ? and two for each non-preferred class. */
3655 losers = losers * 6 + reject;
3657 /* If this alternative can be made to work by reloading,
3658 and it needs less reloading than the others checked so far,
3659 record it as the chosen goal for reloading. */
3660 if (! bad && best > losers)
3662 for (i = 0; i < noperands; i++)
3664 goal_alternative[i] = this_alternative[i];
3665 goal_alternative_win[i] = this_alternative_win[i];
3666 goal_alternative_match_win[i] = this_alternative_match_win[i];
3667 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3668 goal_alternative_matches[i] = this_alternative_matches[i];
3669 goal_alternative_earlyclobber[i]
3670 = this_alternative_earlyclobber[i];
3672 goal_alternative_swapped = swapped;
3673 best = losers;
3674 goal_alternative_number = this_alternative_number;
3675 goal_earlyclobber = this_earlyclobber;
3679 /* If insn is commutative (it's safe to exchange a certain pair of operands)
3680 then we need to try each alternative twice,
3681 the second time matching those two operands
3682 as if we had exchanged them.
3683 To do this, really exchange them in operands.
3685 If we have just tried the alternatives the second time,
3686 return operands to normal and drop through. */
3688 if (commutative >= 0)
3690 swapped = !swapped;
3691 if (swapped)
3693 enum reg_class tclass;
3694 int t;
3696 recog_data.operand[commutative] = substed_operand[commutative + 1];
3697 recog_data.operand[commutative + 1] = substed_operand[commutative];
3698 /* Swap the duplicates too. */
3699 for (i = 0; i < recog_data.n_dups; i++)
3700 if (recog_data.dup_num[i] == commutative
3701 || recog_data.dup_num[i] == commutative + 1)
3702 *recog_data.dup_loc[i]
3703 = recog_data.operand[(int) recog_data.dup_num[i]];
3705 tclass = preferred_class[commutative];
3706 preferred_class[commutative] = preferred_class[commutative + 1];
3707 preferred_class[commutative + 1] = tclass;
3709 t = pref_or_nothing[commutative];
3710 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3711 pref_or_nothing[commutative + 1] = t;
3713 t = address_reloaded[commutative];
3714 address_reloaded[commutative] = address_reloaded[commutative + 1];
3715 address_reloaded[commutative + 1] = t;
3717 memcpy (constraints, recog_data.constraints,
3718 noperands * sizeof (char *));
3719 goto try_swapped;
3721 else
3723 recog_data.operand[commutative] = substed_operand[commutative];
3724 recog_data.operand[commutative + 1]
3725 = substed_operand[commutative + 1];
3726 /* Unswap the duplicates too. */
3727 for (i = 0; i < recog_data.n_dups; i++)
3728 if (recog_data.dup_num[i] == commutative
3729 || recog_data.dup_num[i] == commutative + 1)
3730 *recog_data.dup_loc[i]
3731 = recog_data.operand[(int) recog_data.dup_num[i]];
3735 /* The operands don't meet the constraints.
3736 goal_alternative describes the alternative
3737 that we could reach by reloading the fewest operands.
3738 Reload so as to fit it. */
3740 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3742 /* No alternative works with reloads?? */
3743 if (insn_code_number >= 0)
3744 fatal_insn ("unable to generate reloads for:", insn);
3745 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3746 /* Avoid further trouble with this insn. */
3747 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3748 n_reloads = 0;
3749 return 0;
3752 /* Jump to `finish' from above if all operands are valid already.
3753 In that case, goal_alternative_win is all 1. */
3754 finish:
3756 /* Right now, for any pair of operands I and J that are required to match,
3757 with I < J,
3758 goal_alternative_matches[J] is I.
3759 Set up goal_alternative_matched as the inverse function:
3760 goal_alternative_matched[I] = J. */
3762 for (i = 0; i < noperands; i++)
3763 goal_alternative_matched[i] = -1;
3765 for (i = 0; i < noperands; i++)
3766 if (! goal_alternative_win[i]
3767 && goal_alternative_matches[i] >= 0)
3768 goal_alternative_matched[goal_alternative_matches[i]] = i;
3770 for (i = 0; i < noperands; i++)
3771 goal_alternative_win[i] |= goal_alternative_match_win[i];
3773 /* If the best alternative is with operands 1 and 2 swapped,
3774 consider them swapped before reporting the reloads. Update the
3775 operand numbers of any reloads already pushed. */
3777 if (goal_alternative_swapped)
3779 rtx tem;
3781 tem = substed_operand[commutative];
3782 substed_operand[commutative] = substed_operand[commutative + 1];
3783 substed_operand[commutative + 1] = tem;
3784 tem = recog_data.operand[commutative];
3785 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3786 recog_data.operand[commutative + 1] = tem;
3787 tem = *recog_data.operand_loc[commutative];
3788 *recog_data.operand_loc[commutative]
3789 = *recog_data.operand_loc[commutative + 1];
3790 *recog_data.operand_loc[commutative + 1] = tem;
3792 for (i = 0; i < n_reloads; i++)
3794 if (rld[i].opnum == commutative)
3795 rld[i].opnum = commutative + 1;
3796 else if (rld[i].opnum == commutative + 1)
3797 rld[i].opnum = commutative;
3801 for (i = 0; i < noperands; i++)
3803 operand_reloadnum[i] = -1;
3805 /* If this is an earlyclobber operand, we need to widen the scope.
3806 The reload must remain valid from the start of the insn being
3807 reloaded until after the operand is stored into its destination.
3808 We approximate this with RELOAD_OTHER even though we know that we
3809 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3811 One special case that is worth checking is when we have an
3812 output that is earlyclobber but isn't used past the insn (typically
3813 a SCRATCH). In this case, we only need have the reload live
3814 through the insn itself, but not for any of our input or output
3815 reloads.
3816 But we must not accidentally narrow the scope of an existing
3817 RELOAD_OTHER reload - leave these alone.
3819 In any case, anything needed to address this operand can remain
3820 however they were previously categorized. */
3822 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3823 operand_type[i]
3824 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3825 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3828 /* Any constants that aren't allowed and can't be reloaded
3829 into registers are here changed into memory references. */
3830 for (i = 0; i < noperands; i++)
3831 if (! goal_alternative_win[i]
3832 && CONST_POOL_OK_P (recog_data.operand[i])
3833 && ((PREFERRED_RELOAD_CLASS (recog_data.operand[i],
3834 (enum reg_class) goal_alternative[i])
3835 == NO_REGS)
3836 || no_input_reloads)
3837 && operand_mode[i] != VOIDmode)
3839 int this_address_reloaded;
3841 this_address_reloaded = 0;
3842 substed_operand[i] = recog_data.operand[i]
3843 = find_reloads_toplev (force_const_mem (operand_mode[i],
3844 recog_data.operand[i]),
3845 i, address_type[i], ind_levels, 0, insn,
3846 &this_address_reloaded);
3847 if (alternative_allows_const_pool_ref (this_address_reloaded == 0
3848 ? substed_operand[i]
3849 : NULL,
3850 recog_data.constraints[i],
3851 goal_alternative_number))
3852 goal_alternative_win[i] = 1;
3855 /* Likewise any invalid constants appearing as operand of a PLUS
3856 that is to be reloaded. */
3857 for (i = 0; i < noperands; i++)
3858 if (! goal_alternative_win[i]
3859 && GET_CODE (recog_data.operand[i]) == PLUS
3860 && CONST_POOL_OK_P (XEXP (recog_data.operand[i], 1))
3861 && (PREFERRED_RELOAD_CLASS (XEXP (recog_data.operand[i], 1),
3862 (enum reg_class) goal_alternative[i])
3863 == NO_REGS)
3864 && operand_mode[i] != VOIDmode)
3866 rtx tem = force_const_mem (operand_mode[i],
3867 XEXP (recog_data.operand[i], 1));
3868 tem = gen_rtx_PLUS (operand_mode[i],
3869 XEXP (recog_data.operand[i], 0), tem);
3871 substed_operand[i] = recog_data.operand[i]
3872 = find_reloads_toplev (tem, i, address_type[i],
3873 ind_levels, 0, insn, NULL);
3876 /* Record the values of the earlyclobber operands for the caller. */
3877 if (goal_earlyclobber)
3878 for (i = 0; i < noperands; i++)
3879 if (goal_alternative_earlyclobber[i])
3880 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3882 /* Now record reloads for all the operands that need them. */
3883 for (i = 0; i < noperands; i++)
3884 if (! goal_alternative_win[i])
3886 /* Operands that match previous ones have already been handled. */
3887 if (goal_alternative_matches[i] >= 0)
3889 /* Handle an operand with a nonoffsettable address
3890 appearing where an offsettable address will do
3891 by reloading the address into a base register.
3893 ??? We can also do this when the operand is a register and
3894 reg_equiv_mem is not offsettable, but this is a bit tricky,
3895 so we don't bother with it. It may not be worth doing. */
3896 else if (goal_alternative_matched[i] == -1
3897 && goal_alternative_offmemok[i]
3898 && MEM_P (recog_data.operand[i]))
3900 /* If the address to be reloaded is a VOIDmode constant,
3901 use Pmode as mode of the reload register, as would have
3902 been done by find_reloads_address. */
3903 enum machine_mode address_mode;
3904 address_mode = GET_MODE (XEXP (recog_data.operand[i], 0));
3905 if (address_mode == VOIDmode)
3906 address_mode = Pmode;
3908 operand_reloadnum[i]
3909 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
3910 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
3911 base_reg_class (VOIDmode, MEM, SCRATCH),
3912 address_mode,
3913 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
3914 rld[operand_reloadnum[i]].inc
3915 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
3917 /* If this operand is an output, we will have made any
3918 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
3919 now we are treating part of the operand as an input, so
3920 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
3922 if (modified[i] == RELOAD_WRITE)
3924 for (j = 0; j < n_reloads; j++)
3926 if (rld[j].opnum == i)
3928 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
3929 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
3930 else if (rld[j].when_needed
3931 == RELOAD_FOR_OUTADDR_ADDRESS)
3932 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
3937 else if (goal_alternative_matched[i] == -1)
3939 operand_reloadnum[i]
3940 = push_reload ((modified[i] != RELOAD_WRITE
3941 ? recog_data.operand[i] : 0),
3942 (modified[i] != RELOAD_READ
3943 ? recog_data.operand[i] : 0),
3944 (modified[i] != RELOAD_WRITE
3945 ? recog_data.operand_loc[i] : 0),
3946 (modified[i] != RELOAD_READ
3947 ? recog_data.operand_loc[i] : 0),
3948 (enum reg_class) goal_alternative[i],
3949 (modified[i] == RELOAD_WRITE
3950 ? VOIDmode : operand_mode[i]),
3951 (modified[i] == RELOAD_READ
3952 ? VOIDmode : operand_mode[i]),
3953 (insn_code_number < 0 ? 0
3954 : insn_data[insn_code_number].operand[i].strict_low),
3955 0, i, operand_type[i]);
3957 /* In a matching pair of operands, one must be input only
3958 and the other must be output only.
3959 Pass the input operand as IN and the other as OUT. */
3960 else if (modified[i] == RELOAD_READ
3961 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
3963 operand_reloadnum[i]
3964 = push_reload (recog_data.operand[i],
3965 recog_data.operand[goal_alternative_matched[i]],
3966 recog_data.operand_loc[i],
3967 recog_data.operand_loc[goal_alternative_matched[i]],
3968 (enum reg_class) goal_alternative[i],
3969 operand_mode[i],
3970 operand_mode[goal_alternative_matched[i]],
3971 0, 0, i, RELOAD_OTHER);
3972 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
3974 else if (modified[i] == RELOAD_WRITE
3975 && modified[goal_alternative_matched[i]] == RELOAD_READ)
3977 operand_reloadnum[goal_alternative_matched[i]]
3978 = push_reload (recog_data.operand[goal_alternative_matched[i]],
3979 recog_data.operand[i],
3980 recog_data.operand_loc[goal_alternative_matched[i]],
3981 recog_data.operand_loc[i],
3982 (enum reg_class) goal_alternative[i],
3983 operand_mode[goal_alternative_matched[i]],
3984 operand_mode[i],
3985 0, 0, i, RELOAD_OTHER);
3986 operand_reloadnum[i] = output_reloadnum;
3988 else
3990 gcc_assert (insn_code_number < 0);
3991 error_for_asm (insn, "inconsistent operand constraints "
3992 "in an %<asm%>");
3993 /* Avoid further trouble with this insn. */
3994 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3995 n_reloads = 0;
3996 return 0;
3999 else if (goal_alternative_matched[i] < 0
4000 && goal_alternative_matches[i] < 0
4001 && address_operand_reloaded[i] != 1
4002 && optimize)
4004 /* For each non-matching operand that's a MEM or a pseudo-register
4005 that didn't get a hard register, make an optional reload.
4006 This may get done even if the insn needs no reloads otherwise. */
4008 rtx operand = recog_data.operand[i];
4010 while (GET_CODE (operand) == SUBREG)
4011 operand = SUBREG_REG (operand);
4012 if ((MEM_P (operand)
4013 || (REG_P (operand)
4014 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4015 /* If this is only for an output, the optional reload would not
4016 actually cause us to use a register now, just note that
4017 something is stored here. */
4018 && ((enum reg_class) goal_alternative[i] != NO_REGS
4019 || modified[i] == RELOAD_WRITE)
4020 && ! no_input_reloads
4021 /* An optional output reload might allow to delete INSN later.
4022 We mustn't make in-out reloads on insns that are not permitted
4023 output reloads.
4024 If this is an asm, we can't delete it; we must not even call
4025 push_reload for an optional output reload in this case,
4026 because we can't be sure that the constraint allows a register,
4027 and push_reload verifies the constraints for asms. */
4028 && (modified[i] == RELOAD_READ
4029 || (! no_output_reloads && ! this_insn_is_asm)))
4030 operand_reloadnum[i]
4031 = push_reload ((modified[i] != RELOAD_WRITE
4032 ? recog_data.operand[i] : 0),
4033 (modified[i] != RELOAD_READ
4034 ? recog_data.operand[i] : 0),
4035 (modified[i] != RELOAD_WRITE
4036 ? recog_data.operand_loc[i] : 0),
4037 (modified[i] != RELOAD_READ
4038 ? recog_data.operand_loc[i] : 0),
4039 (enum reg_class) goal_alternative[i],
4040 (modified[i] == RELOAD_WRITE
4041 ? VOIDmode : operand_mode[i]),
4042 (modified[i] == RELOAD_READ
4043 ? VOIDmode : operand_mode[i]),
4044 (insn_code_number < 0 ? 0
4045 : insn_data[insn_code_number].operand[i].strict_low),
4046 1, i, operand_type[i]);
4047 /* If a memory reference remains (either as a MEM or a pseudo that
4048 did not get a hard register), yet we can't make an optional
4049 reload, check if this is actually a pseudo register reference;
4050 we then need to emit a USE and/or a CLOBBER so that reload
4051 inheritance will do the right thing. */
4052 else if (replace
4053 && (MEM_P (operand)
4054 || (REG_P (operand)
4055 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4056 && reg_renumber [REGNO (operand)] < 0)))
4058 operand = *recog_data.operand_loc[i];
4060 while (GET_CODE (operand) == SUBREG)
4061 operand = SUBREG_REG (operand);
4062 if (REG_P (operand))
4064 if (modified[i] != RELOAD_WRITE)
4065 /* We mark the USE with QImode so that we recognize
4066 it as one that can be safely deleted at the end
4067 of reload. */
4068 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4069 insn), QImode);
4070 if (modified[i] != RELOAD_READ)
4071 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, operand), insn);
4075 else if (goal_alternative_matches[i] >= 0
4076 && goal_alternative_win[goal_alternative_matches[i]]
4077 && modified[i] == RELOAD_READ
4078 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4079 && ! no_input_reloads && ! no_output_reloads
4080 && optimize)
4082 /* Similarly, make an optional reload for a pair of matching
4083 objects that are in MEM or a pseudo that didn't get a hard reg. */
4085 rtx operand = recog_data.operand[i];
4087 while (GET_CODE (operand) == SUBREG)
4088 operand = SUBREG_REG (operand);
4089 if ((MEM_P (operand)
4090 || (REG_P (operand)
4091 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4092 && ((enum reg_class) goal_alternative[goal_alternative_matches[i]]
4093 != NO_REGS))
4094 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4095 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4096 recog_data.operand[i],
4097 recog_data.operand_loc[goal_alternative_matches[i]],
4098 recog_data.operand_loc[i],
4099 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4100 operand_mode[goal_alternative_matches[i]],
4101 operand_mode[i],
4102 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4105 /* Perform whatever substitutions on the operands we are supposed
4106 to make due to commutativity or replacement of registers
4107 with equivalent constants or memory slots. */
4109 for (i = 0; i < noperands; i++)
4111 /* We only do this on the last pass through reload, because it is
4112 possible for some data (like reg_equiv_address) to be changed during
4113 later passes. Moreover, we lose the opportunity to get a useful
4114 reload_{in,out}_reg when we do these replacements. */
4116 if (replace)
4118 rtx substitution = substed_operand[i];
4120 *recog_data.operand_loc[i] = substitution;
4122 /* If we're replacing an operand with a LABEL_REF, we need to
4123 make sure that there's a REG_LABEL_OPERAND note attached to
4124 this instruction. */
4125 if (GET_CODE (substitution) == LABEL_REF
4126 && !find_reg_note (insn, REG_LABEL_OPERAND,
4127 XEXP (substitution, 0))
4128 /* For a JUMP_P, if it was a branch target it must have
4129 already been recorded as such. */
4130 && (!JUMP_P (insn)
4131 || !label_is_jump_target_p (XEXP (substitution, 0),
4132 insn)))
4133 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL_OPERAND,
4134 XEXP (substitution, 0),
4135 REG_NOTES (insn));
4137 else
4138 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4141 /* If this insn pattern contains any MATCH_DUP's, make sure that
4142 they will be substituted if the operands they match are substituted.
4143 Also do now any substitutions we already did on the operands.
4145 Don't do this if we aren't making replacements because we might be
4146 propagating things allocated by frame pointer elimination into places
4147 it doesn't expect. */
4149 if (insn_code_number >= 0 && replace)
4150 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4152 int opno = recog_data.dup_num[i];
4153 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4154 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4157 #if 0
4158 /* This loses because reloading of prior insns can invalidate the equivalence
4159 (or at least find_equiv_reg isn't smart enough to find it any more),
4160 causing this insn to need more reload regs than it needed before.
4161 It may be too late to make the reload regs available.
4162 Now this optimization is done safely in choose_reload_regs. */
4164 /* For each reload of a reg into some other class of reg,
4165 search for an existing equivalent reg (same value now) in the right class.
4166 We can use it as long as we don't need to change its contents. */
4167 for (i = 0; i < n_reloads; i++)
4168 if (rld[i].reg_rtx == 0
4169 && rld[i].in != 0
4170 && REG_P (rld[i].in)
4171 && rld[i].out == 0)
4173 rld[i].reg_rtx
4174 = find_equiv_reg (rld[i].in, insn, rld[i].class, -1,
4175 static_reload_reg_p, 0, rld[i].inmode);
4176 /* Prevent generation of insn to load the value
4177 because the one we found already has the value. */
4178 if (rld[i].reg_rtx)
4179 rld[i].in = rld[i].reg_rtx;
4181 #endif
4183 /* If we detected error and replaced asm instruction by USE, forget about the
4184 reloads. */
4185 if (GET_CODE (PATTERN (insn)) == USE
4186 && GET_CODE (XEXP (PATTERN (insn), 0)) == CONST_INT)
4187 n_reloads = 0;
4189 /* Perhaps an output reload can be combined with another
4190 to reduce needs by one. */
4191 if (!goal_earlyclobber)
4192 combine_reloads ();
4194 /* If we have a pair of reloads for parts of an address, they are reloading
4195 the same object, the operands themselves were not reloaded, and they
4196 are for two operands that are supposed to match, merge the reloads and
4197 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4199 for (i = 0; i < n_reloads; i++)
4201 int k;
4203 for (j = i + 1; j < n_reloads; j++)
4204 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4205 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4206 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4207 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4208 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4209 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4210 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4211 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4212 && rtx_equal_p (rld[i].in, rld[j].in)
4213 && (operand_reloadnum[rld[i].opnum] < 0
4214 || rld[operand_reloadnum[rld[i].opnum]].optional)
4215 && (operand_reloadnum[rld[j].opnum] < 0
4216 || rld[operand_reloadnum[rld[j].opnum]].optional)
4217 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4218 || (goal_alternative_matches[rld[j].opnum]
4219 == rld[i].opnum)))
4221 for (k = 0; k < n_replacements; k++)
4222 if (replacements[k].what == j)
4223 replacements[k].what = i;
4225 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4226 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4227 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4228 else
4229 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4230 rld[j].in = 0;
4234 /* Scan all the reloads and update their type.
4235 If a reload is for the address of an operand and we didn't reload
4236 that operand, change the type. Similarly, change the operand number
4237 of a reload when two operands match. If a reload is optional, treat it
4238 as though the operand isn't reloaded.
4240 ??? This latter case is somewhat odd because if we do the optional
4241 reload, it means the object is hanging around. Thus we need only
4242 do the address reload if the optional reload was NOT done.
4244 Change secondary reloads to be the address type of their operand, not
4245 the normal type.
4247 If an operand's reload is now RELOAD_OTHER, change any
4248 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4249 RELOAD_FOR_OTHER_ADDRESS. */
4251 for (i = 0; i < n_reloads; i++)
4253 if (rld[i].secondary_p
4254 && rld[i].when_needed == operand_type[rld[i].opnum])
4255 rld[i].when_needed = address_type[rld[i].opnum];
4257 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4258 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4259 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4260 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4261 && (operand_reloadnum[rld[i].opnum] < 0
4262 || rld[operand_reloadnum[rld[i].opnum]].optional))
4264 /* If we have a secondary reload to go along with this reload,
4265 change its type to RELOAD_FOR_OPADDR_ADDR. */
4267 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4268 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4269 && rld[i].secondary_in_reload != -1)
4271 int secondary_in_reload = rld[i].secondary_in_reload;
4273 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4275 /* If there's a tertiary reload we have to change it also. */
4276 if (secondary_in_reload > 0
4277 && rld[secondary_in_reload].secondary_in_reload != -1)
4278 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4279 = RELOAD_FOR_OPADDR_ADDR;
4282 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4283 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4284 && rld[i].secondary_out_reload != -1)
4286 int secondary_out_reload = rld[i].secondary_out_reload;
4288 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4290 /* If there's a tertiary reload we have to change it also. */
4291 if (secondary_out_reload
4292 && rld[secondary_out_reload].secondary_out_reload != -1)
4293 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4294 = RELOAD_FOR_OPADDR_ADDR;
4297 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4298 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4299 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4300 else
4301 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4304 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4305 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4306 && operand_reloadnum[rld[i].opnum] >= 0
4307 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4308 == RELOAD_OTHER))
4309 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4311 if (goal_alternative_matches[rld[i].opnum] >= 0)
4312 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4315 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4316 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4317 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4319 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4320 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4321 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4322 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4323 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4324 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4325 This is complicated by the fact that a single operand can have more
4326 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4327 choose_reload_regs without affecting code quality, and cases that
4328 actually fail are extremely rare, so it turns out to be better to fix
4329 the problem here by not generating cases that choose_reload_regs will
4330 fail for. */
4331 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4332 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4333 a single operand.
4334 We can reduce the register pressure by exploiting that a
4335 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4336 does not conflict with any of them, if it is only used for the first of
4337 the RELOAD_FOR_X_ADDRESS reloads. */
4339 int first_op_addr_num = -2;
4340 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4341 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4342 int need_change = 0;
4343 /* We use last_op_addr_reload and the contents of the above arrays
4344 first as flags - -2 means no instance encountered, -1 means exactly
4345 one instance encountered.
4346 If more than one instance has been encountered, we store the reload
4347 number of the first reload of the kind in question; reload numbers
4348 are known to be non-negative. */
4349 for (i = 0; i < noperands; i++)
4350 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4351 for (i = n_reloads - 1; i >= 0; i--)
4353 switch (rld[i].when_needed)
4355 case RELOAD_FOR_OPERAND_ADDRESS:
4356 if (++first_op_addr_num >= 0)
4358 first_op_addr_num = i;
4359 need_change = 1;
4361 break;
4362 case RELOAD_FOR_INPUT_ADDRESS:
4363 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4365 first_inpaddr_num[rld[i].opnum] = i;
4366 need_change = 1;
4368 break;
4369 case RELOAD_FOR_OUTPUT_ADDRESS:
4370 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4372 first_outpaddr_num[rld[i].opnum] = i;
4373 need_change = 1;
4375 break;
4376 default:
4377 break;
4381 if (need_change)
4383 for (i = 0; i < n_reloads; i++)
4385 int first_num;
4386 enum reload_type type;
4388 switch (rld[i].when_needed)
4390 case RELOAD_FOR_OPADDR_ADDR:
4391 first_num = first_op_addr_num;
4392 type = RELOAD_FOR_OPERAND_ADDRESS;
4393 break;
4394 case RELOAD_FOR_INPADDR_ADDRESS:
4395 first_num = first_inpaddr_num[rld[i].opnum];
4396 type = RELOAD_FOR_INPUT_ADDRESS;
4397 break;
4398 case RELOAD_FOR_OUTADDR_ADDRESS:
4399 first_num = first_outpaddr_num[rld[i].opnum];
4400 type = RELOAD_FOR_OUTPUT_ADDRESS;
4401 break;
4402 default:
4403 continue;
4405 if (first_num < 0)
4406 continue;
4407 else if (i > first_num)
4408 rld[i].when_needed = type;
4409 else
4411 /* Check if the only TYPE reload that uses reload I is
4412 reload FIRST_NUM. */
4413 for (j = n_reloads - 1; j > first_num; j--)
4415 if (rld[j].when_needed == type
4416 && (rld[i].secondary_p
4417 ? rld[j].secondary_in_reload == i
4418 : reg_mentioned_p (rld[i].in, rld[j].in)))
4420 rld[i].when_needed = type;
4421 break;
4429 /* See if we have any reloads that are now allowed to be merged
4430 because we've changed when the reload is needed to
4431 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4432 check for the most common cases. */
4434 for (i = 0; i < n_reloads; i++)
4435 if (rld[i].in != 0 && rld[i].out == 0
4436 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4437 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4438 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4439 for (j = 0; j < n_reloads; j++)
4440 if (i != j && rld[j].in != 0 && rld[j].out == 0
4441 && rld[j].when_needed == rld[i].when_needed
4442 && MATCHES (rld[i].in, rld[j].in)
4443 && rld[i].class == rld[j].class
4444 && !rld[i].nocombine && !rld[j].nocombine
4445 && rld[i].reg_rtx == rld[j].reg_rtx)
4447 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4448 transfer_replacements (i, j);
4449 rld[j].in = 0;
4452 #ifdef HAVE_cc0
4453 /* If we made any reloads for addresses, see if they violate a
4454 "no input reloads" requirement for this insn. But loads that we
4455 do after the insn (such as for output addresses) are fine. */
4456 if (no_input_reloads)
4457 for (i = 0; i < n_reloads; i++)
4458 gcc_assert (rld[i].in == 0
4459 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4460 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4461 #endif
4463 /* Compute reload_mode and reload_nregs. */
4464 for (i = 0; i < n_reloads; i++)
4466 rld[i].mode
4467 = (rld[i].inmode == VOIDmode
4468 || (GET_MODE_SIZE (rld[i].outmode)
4469 > GET_MODE_SIZE (rld[i].inmode)))
4470 ? rld[i].outmode : rld[i].inmode;
4472 rld[i].nregs = CLASS_MAX_NREGS (rld[i].class, rld[i].mode);
4475 /* Special case a simple move with an input reload and a
4476 destination of a hard reg, if the hard reg is ok, use it. */
4477 for (i = 0; i < n_reloads; i++)
4478 if (rld[i].when_needed == RELOAD_FOR_INPUT
4479 && GET_CODE (PATTERN (insn)) == SET
4480 && REG_P (SET_DEST (PATTERN (insn)))
4481 && (SET_SRC (PATTERN (insn)) == rld[i].in
4482 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4483 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4485 rtx dest = SET_DEST (PATTERN (insn));
4486 unsigned int regno = REGNO (dest);
4488 if (regno < FIRST_PSEUDO_REGISTER
4489 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].class], regno)
4490 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4492 int nr = hard_regno_nregs[regno][rld[i].mode];
4493 int ok = 1, nri;
4495 for (nri = 1; nri < nr; nri ++)
4496 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].class], regno + nri))
4497 ok = 0;
4499 if (ok)
4500 rld[i].reg_rtx = dest;
4504 return retval;
4507 /* Return true if alternative number ALTNUM in constraint-string
4508 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4509 MEM gives the reference if it didn't need any reloads, otherwise it
4510 is null. */
4512 static bool
4513 alternative_allows_const_pool_ref (rtx mem, const char *constraint, int altnum)
4515 int c;
4517 /* Skip alternatives before the one requested. */
4518 while (altnum > 0)
4520 while (*constraint++ != ',');
4521 altnum--;
4523 /* Scan the requested alternative for 'm' or 'o'.
4524 If one of them is present, this alternative accepts the result of
4525 passing a constant-pool reference through find_reloads_toplev.
4527 The same is true of extra memory constraints if the address
4528 was reloaded into a register. However, the target may elect
4529 to disallow the original constant address, forcing it to be
4530 reloaded into a register instead. */
4531 for (; (c = *constraint) && c != ',' && c != '#';
4532 constraint += CONSTRAINT_LEN (c, constraint))
4534 if (c == 'm' || c == 'o')
4535 return true;
4536 #ifdef EXTRA_CONSTRAINT_STR
4537 if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
4538 && (mem == NULL || EXTRA_CONSTRAINT_STR (mem, c, constraint)))
4539 return true;
4540 #endif
4542 return false;
4545 /* Scan X for memory references and scan the addresses for reloading.
4546 Also checks for references to "constant" regs that we want to eliminate
4547 and replaces them with the values they stand for.
4548 We may alter X destructively if it contains a reference to such.
4549 If X is just a constant reg, we return the equivalent value
4550 instead of X.
4552 IND_LEVELS says how many levels of indirect addressing this machine
4553 supports.
4555 OPNUM and TYPE identify the purpose of the reload.
4557 IS_SET_DEST is true if X is the destination of a SET, which is not
4558 appropriate to be replaced by a constant.
4560 INSN, if nonzero, is the insn in which we do the reload. It is used
4561 to determine if we may generate output reloads, and where to put USEs
4562 for pseudos that we have to replace with stack slots.
4564 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4565 result of find_reloads_address. */
4567 static rtx
4568 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4569 int ind_levels, int is_set_dest, rtx insn,
4570 int *address_reloaded)
4572 RTX_CODE code = GET_CODE (x);
4574 const char *fmt = GET_RTX_FORMAT (code);
4575 int i;
4576 int copied;
4578 if (code == REG)
4580 /* This code is duplicated for speed in find_reloads. */
4581 int regno = REGNO (x);
4582 if (reg_equiv_constant[regno] != 0 && !is_set_dest)
4583 x = reg_equiv_constant[regno];
4584 #if 0
4585 /* This creates (subreg (mem...)) which would cause an unnecessary
4586 reload of the mem. */
4587 else if (reg_equiv_mem[regno] != 0)
4588 x = reg_equiv_mem[regno];
4589 #endif
4590 else if (reg_equiv_memory_loc[regno]
4591 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
4593 rtx mem = make_memloc (x, regno);
4594 if (reg_equiv_address[regno]
4595 || ! rtx_equal_p (mem, reg_equiv_mem[regno]))
4597 /* If this is not a toplevel operand, find_reloads doesn't see
4598 this substitution. We have to emit a USE of the pseudo so
4599 that delete_output_reload can see it. */
4600 if (replace_reloads && recog_data.operand[opnum] != x)
4601 /* We mark the USE with QImode so that we recognize it
4602 as one that can be safely deleted at the end of
4603 reload. */
4604 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4605 QImode);
4606 x = mem;
4607 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4608 opnum, type, ind_levels, insn);
4609 if (!rtx_equal_p (x, mem))
4610 push_reg_equiv_alt_mem (regno, x);
4611 if (address_reloaded)
4612 *address_reloaded = i;
4615 return x;
4617 if (code == MEM)
4619 rtx tem = x;
4621 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4622 opnum, type, ind_levels, insn);
4623 if (address_reloaded)
4624 *address_reloaded = i;
4626 return tem;
4629 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4631 /* Check for SUBREG containing a REG that's equivalent to a
4632 constant. If the constant has a known value, truncate it
4633 right now. Similarly if we are extracting a single-word of a
4634 multi-word constant. If the constant is symbolic, allow it
4635 to be substituted normally. push_reload will strip the
4636 subreg later. The constant must not be VOIDmode, because we
4637 will lose the mode of the register (this should never happen
4638 because one of the cases above should handle it). */
4640 int regno = REGNO (SUBREG_REG (x));
4641 rtx tem;
4643 if (regno >= FIRST_PSEUDO_REGISTER
4644 && reg_renumber[regno] < 0
4645 && reg_equiv_constant[regno] != 0)
4647 tem =
4648 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant[regno],
4649 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4650 gcc_assert (tem);
4651 if (CONSTANT_P (tem) && !LEGITIMATE_CONSTANT_P (tem))
4653 tem = force_const_mem (GET_MODE (x), tem);
4654 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4655 &XEXP (tem, 0), opnum, type,
4656 ind_levels, insn);
4657 if (address_reloaded)
4658 *address_reloaded = i;
4660 return tem;
4663 /* If the subreg contains a reg that will be converted to a mem,
4664 convert the subreg to a narrower memref now.
4665 Otherwise, we would get (subreg (mem ...) ...),
4666 which would force reload of the mem.
4668 We also need to do this if there is an equivalent MEM that is
4669 not offsettable. In that case, alter_subreg would produce an
4670 invalid address on big-endian machines.
4672 For machines that extend byte loads, we must not reload using
4673 a wider mode if we have a paradoxical SUBREG. find_reloads will
4674 force a reload in that case. So we should not do anything here. */
4676 if (regno >= FIRST_PSEUDO_REGISTER
4677 #ifdef LOAD_EXTEND_OP
4678 && (GET_MODE_SIZE (GET_MODE (x))
4679 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4680 #endif
4681 && (reg_equiv_address[regno] != 0
4682 || (reg_equiv_mem[regno] != 0
4683 && (! strict_memory_address_p (GET_MODE (x),
4684 XEXP (reg_equiv_mem[regno], 0))
4685 || ! offsettable_memref_p (reg_equiv_mem[regno])
4686 || num_not_at_initial_offset))))
4687 x = find_reloads_subreg_address (x, 1, opnum, type, ind_levels,
4688 insn);
4691 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4693 if (fmt[i] == 'e')
4695 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4696 ind_levels, is_set_dest, insn,
4697 address_reloaded);
4698 /* If we have replaced a reg with it's equivalent memory loc -
4699 that can still be handled here e.g. if it's in a paradoxical
4700 subreg - we must make the change in a copy, rather than using
4701 a destructive change. This way, find_reloads can still elect
4702 not to do the change. */
4703 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4705 x = shallow_copy_rtx (x);
4706 copied = 1;
4708 XEXP (x, i) = new_part;
4711 return x;
4714 /* Return a mem ref for the memory equivalent of reg REGNO.
4715 This mem ref is not shared with anything. */
4717 static rtx
4718 make_memloc (rtx ad, int regno)
4720 /* We must rerun eliminate_regs, in case the elimination
4721 offsets have changed. */
4722 rtx tem
4723 = XEXP (eliminate_regs (reg_equiv_memory_loc[regno], 0, NULL_RTX), 0);
4725 /* If TEM might contain a pseudo, we must copy it to avoid
4726 modifying it when we do the substitution for the reload. */
4727 if (rtx_varies_p (tem, 0))
4728 tem = copy_rtx (tem);
4730 tem = replace_equiv_address_nv (reg_equiv_memory_loc[regno], tem);
4731 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4733 /* Copy the result if it's still the same as the equivalence, to avoid
4734 modifying it when we do the substitution for the reload. */
4735 if (tem == reg_equiv_memory_loc[regno])
4736 tem = copy_rtx (tem);
4737 return tem;
4740 /* Returns true if AD could be turned into a valid memory reference
4741 to mode MODE by reloading the part pointed to by PART into a
4742 register. */
4744 static int
4745 maybe_memory_address_p (enum machine_mode mode, rtx ad, rtx *part)
4747 int retv;
4748 rtx tem = *part;
4749 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4751 *part = reg;
4752 retv = memory_address_p (mode, ad);
4753 *part = tem;
4755 return retv;
4758 /* Record all reloads needed for handling memory address AD
4759 which appears in *LOC in a memory reference to mode MODE
4760 which itself is found in location *MEMREFLOC.
4761 Note that we take shortcuts assuming that no multi-reg machine mode
4762 occurs as part of an address.
4764 OPNUM and TYPE specify the purpose of this reload.
4766 IND_LEVELS says how many levels of indirect addressing this machine
4767 supports.
4769 INSN, if nonzero, is the insn in which we do the reload. It is used
4770 to determine if we may generate output reloads, and where to put USEs
4771 for pseudos that we have to replace with stack slots.
4773 Value is one if this address is reloaded or replaced as a whole; it is
4774 zero if the top level of this address was not reloaded or replaced, and
4775 it is -1 if it may or may not have been reloaded or replaced.
4777 Note that there is no verification that the address will be valid after
4778 this routine does its work. Instead, we rely on the fact that the address
4779 was valid when reload started. So we need only undo things that reload
4780 could have broken. These are wrong register types, pseudos not allocated
4781 to a hard register, and frame pointer elimination. */
4783 static int
4784 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
4785 rtx *loc, int opnum, enum reload_type type,
4786 int ind_levels, rtx insn)
4788 int regno;
4789 int removed_and = 0;
4790 int op_index;
4791 rtx tem;
4793 /* If the address is a register, see if it is a legitimate address and
4794 reload if not. We first handle the cases where we need not reload
4795 or where we must reload in a non-standard way. */
4797 if (REG_P (ad))
4799 regno = REGNO (ad);
4801 if (reg_equiv_constant[regno] != 0)
4803 find_reloads_address_part (reg_equiv_constant[regno], loc,
4804 base_reg_class (mode, MEM, SCRATCH),
4805 GET_MODE (ad), opnum, type, ind_levels);
4806 return 1;
4809 tem = reg_equiv_memory_loc[regno];
4810 if (tem != 0)
4812 if (reg_equiv_address[regno] != 0 || num_not_at_initial_offset)
4814 tem = make_memloc (ad, regno);
4815 if (! strict_memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
4817 rtx orig = tem;
4819 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4820 &XEXP (tem, 0), opnum,
4821 ADDR_TYPE (type), ind_levels, insn);
4822 if (!rtx_equal_p (tem, orig))
4823 push_reg_equiv_alt_mem (regno, tem);
4825 /* We can avoid a reload if the register's equivalent memory
4826 expression is valid as an indirect memory address.
4827 But not all addresses are valid in a mem used as an indirect
4828 address: only reg or reg+constant. */
4830 if (ind_levels > 0
4831 && strict_memory_address_p (mode, tem)
4832 && (REG_P (XEXP (tem, 0))
4833 || (GET_CODE (XEXP (tem, 0)) == PLUS
4834 && REG_P (XEXP (XEXP (tem, 0), 0))
4835 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4837 /* TEM is not the same as what we'll be replacing the
4838 pseudo with after reload, put a USE in front of INSN
4839 in the final reload pass. */
4840 if (replace_reloads
4841 && num_not_at_initial_offset
4842 && ! rtx_equal_p (tem, reg_equiv_mem[regno]))
4844 *loc = tem;
4845 /* We mark the USE with QImode so that we
4846 recognize it as one that can be safely
4847 deleted at the end of reload. */
4848 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4849 insn), QImode);
4851 /* This doesn't really count as replacing the address
4852 as a whole, since it is still a memory access. */
4854 return 0;
4856 ad = tem;
4860 /* The only remaining case where we can avoid a reload is if this is a
4861 hard register that is valid as a base register and which is not the
4862 subject of a CLOBBER in this insn. */
4864 else if (regno < FIRST_PSEUDO_REGISTER
4865 && regno_ok_for_base_p (regno, mode, MEM, SCRATCH)
4866 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4867 return 0;
4869 /* If we do not have one of the cases above, we must do the reload. */
4870 push_reload (ad, NULL_RTX, loc, (rtx*) 0, base_reg_class (mode, MEM, SCRATCH),
4871 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4872 return 1;
4875 if (strict_memory_address_p (mode, ad))
4877 /* The address appears valid, so reloads are not needed.
4878 But the address may contain an eliminable register.
4879 This can happen because a machine with indirect addressing
4880 may consider a pseudo register by itself a valid address even when
4881 it has failed to get a hard reg.
4882 So do a tree-walk to find and eliminate all such regs. */
4884 /* But first quickly dispose of a common case. */
4885 if (GET_CODE (ad) == PLUS
4886 && GET_CODE (XEXP (ad, 1)) == CONST_INT
4887 && REG_P (XEXP (ad, 0))
4888 && reg_equiv_constant[REGNO (XEXP (ad, 0))] == 0)
4889 return 0;
4891 subst_reg_equivs_changed = 0;
4892 *loc = subst_reg_equivs (ad, insn);
4894 if (! subst_reg_equivs_changed)
4895 return 0;
4897 /* Check result for validity after substitution. */
4898 if (strict_memory_address_p (mode, ad))
4899 return 0;
4902 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4905 if (memrefloc)
4907 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
4908 ind_levels, win);
4910 break;
4911 win:
4912 *memrefloc = copy_rtx (*memrefloc);
4913 XEXP (*memrefloc, 0) = ad;
4914 move_replacements (&ad, &XEXP (*memrefloc, 0));
4915 return -1;
4917 while (0);
4918 #endif
4920 /* The address is not valid. We have to figure out why. First see if
4921 we have an outer AND and remove it if so. Then analyze what's inside. */
4923 if (GET_CODE (ad) == AND)
4925 removed_and = 1;
4926 loc = &XEXP (ad, 0);
4927 ad = *loc;
4930 /* One possibility for why the address is invalid is that it is itself
4931 a MEM. This can happen when the frame pointer is being eliminated, a
4932 pseudo is not allocated to a hard register, and the offset between the
4933 frame and stack pointers is not its initial value. In that case the
4934 pseudo will have been replaced by a MEM referring to the
4935 stack pointer. */
4936 if (MEM_P (ad))
4938 /* First ensure that the address in this MEM is valid. Then, unless
4939 indirect addresses are valid, reload the MEM into a register. */
4940 tem = ad;
4941 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
4942 opnum, ADDR_TYPE (type),
4943 ind_levels == 0 ? 0 : ind_levels - 1, insn);
4945 /* If tem was changed, then we must create a new memory reference to
4946 hold it and store it back into memrefloc. */
4947 if (tem != ad && memrefloc)
4949 *memrefloc = copy_rtx (*memrefloc);
4950 copy_replacements (tem, XEXP (*memrefloc, 0));
4951 loc = &XEXP (*memrefloc, 0);
4952 if (removed_and)
4953 loc = &XEXP (*loc, 0);
4956 /* Check similar cases as for indirect addresses as above except
4957 that we can allow pseudos and a MEM since they should have been
4958 taken care of above. */
4960 if (ind_levels == 0
4961 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
4962 || MEM_P (XEXP (tem, 0))
4963 || ! (REG_P (XEXP (tem, 0))
4964 || (GET_CODE (XEXP (tem, 0)) == PLUS
4965 && REG_P (XEXP (XEXP (tem, 0), 0))
4966 && GET_CODE (XEXP (XEXP (tem, 0), 1)) == CONST_INT)))
4968 /* Must use TEM here, not AD, since it is the one that will
4969 have any subexpressions reloaded, if needed. */
4970 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
4971 base_reg_class (mode, MEM, SCRATCH), GET_MODE (tem),
4972 VOIDmode, 0,
4973 0, opnum, type);
4974 return ! removed_and;
4976 else
4977 return 0;
4980 /* If we have address of a stack slot but it's not valid because the
4981 displacement is too large, compute the sum in a register.
4982 Handle all base registers here, not just fp/ap/sp, because on some
4983 targets (namely SH) we can also get too large displacements from
4984 big-endian corrections. */
4985 else if (GET_CODE (ad) == PLUS
4986 && REG_P (XEXP (ad, 0))
4987 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
4988 && GET_CODE (XEXP (ad, 1)) == CONST_INT
4989 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, PLUS,
4990 CONST_INT))
4993 /* Unshare the MEM rtx so we can safely alter it. */
4994 if (memrefloc)
4996 *memrefloc = copy_rtx (*memrefloc);
4997 loc = &XEXP (*memrefloc, 0);
4998 if (removed_and)
4999 loc = &XEXP (*loc, 0);
5002 if (double_reg_address_ok)
5004 /* Unshare the sum as well. */
5005 *loc = ad = copy_rtx (ad);
5007 /* Reload the displacement into an index reg.
5008 We assume the frame pointer or arg pointer is a base reg. */
5009 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5010 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5011 type, ind_levels);
5012 return 0;
5014 else
5016 /* If the sum of two regs is not necessarily valid,
5017 reload the sum into a base reg.
5018 That will at least work. */
5019 find_reloads_address_part (ad, loc,
5020 base_reg_class (mode, MEM, SCRATCH),
5021 Pmode, opnum, type, ind_levels);
5023 return ! removed_and;
5026 /* If we have an indexed stack slot, there are three possible reasons why
5027 it might be invalid: The index might need to be reloaded, the address
5028 might have been made by frame pointer elimination and hence have a
5029 constant out of range, or both reasons might apply.
5031 We can easily check for an index needing reload, but even if that is the
5032 case, we might also have an invalid constant. To avoid making the
5033 conservative assumption and requiring two reloads, we see if this address
5034 is valid when not interpreted strictly. If it is, the only problem is
5035 that the index needs a reload and find_reloads_address_1 will take care
5036 of it.
5038 Handle all base registers here, not just fp/ap/sp, because on some
5039 targets (namely SPARC) we can also get invalid addresses from preventive
5040 subreg big-endian corrections made by find_reloads_toplev. We
5041 can also get expressions involving LO_SUM (rather than PLUS) from
5042 find_reloads_subreg_address.
5044 If we decide to do something, it must be that `double_reg_address_ok'
5045 is true. We generate a reload of the base register + constant and
5046 rework the sum so that the reload register will be added to the index.
5047 This is safe because we know the address isn't shared.
5049 We check for the base register as both the first and second operand of
5050 the innermost PLUS and/or LO_SUM. */
5052 for (op_index = 0; op_index < 2; ++op_index)
5054 rtx operand, addend;
5055 enum rtx_code inner_code;
5057 if (GET_CODE (ad) != PLUS)
5058 continue;
5060 inner_code = GET_CODE (XEXP (ad, 0));
5061 if (!(GET_CODE (ad) == PLUS
5062 && GET_CODE (XEXP (ad, 1)) == CONST_INT
5063 && (inner_code == PLUS || inner_code == LO_SUM)))
5064 continue;
5066 operand = XEXP (XEXP (ad, 0), op_index);
5067 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5068 continue;
5070 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5072 if ((regno_ok_for_base_p (REGNO (operand), mode, inner_code,
5073 GET_CODE (addend))
5074 || operand == frame_pointer_rtx
5075 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5076 || operand == hard_frame_pointer_rtx
5077 #endif
5078 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5079 || operand == arg_pointer_rtx
5080 #endif
5081 || operand == stack_pointer_rtx)
5082 && ! maybe_memory_address_p (mode, ad,
5083 &XEXP (XEXP (ad, 0), 1 - op_index)))
5085 rtx offset_reg;
5086 enum reg_class cls;
5088 offset_reg = plus_constant (operand, INTVAL (XEXP (ad, 1)));
5090 /* Form the adjusted address. */
5091 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5092 ad = gen_rtx_PLUS (GET_MODE (ad),
5093 op_index == 0 ? offset_reg : addend,
5094 op_index == 0 ? addend : offset_reg);
5095 else
5096 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5097 op_index == 0 ? offset_reg : addend,
5098 op_index == 0 ? addend : offset_reg);
5099 *loc = ad;
5101 cls = base_reg_class (mode, MEM, GET_CODE (addend));
5102 find_reloads_address_part (XEXP (ad, op_index),
5103 &XEXP (ad, op_index), cls,
5104 GET_MODE (ad), opnum, type, ind_levels);
5105 find_reloads_address_1 (mode,
5106 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5107 GET_CODE (XEXP (ad, op_index)),
5108 &XEXP (ad, 1 - op_index), opnum,
5109 type, 0, insn);
5111 return 0;
5115 /* See if address becomes valid when an eliminable register
5116 in a sum is replaced. */
5118 tem = ad;
5119 if (GET_CODE (ad) == PLUS)
5120 tem = subst_indexed_address (ad);
5121 if (tem != ad && strict_memory_address_p (mode, tem))
5123 /* Ok, we win that way. Replace any additional eliminable
5124 registers. */
5126 subst_reg_equivs_changed = 0;
5127 tem = subst_reg_equivs (tem, insn);
5129 /* Make sure that didn't make the address invalid again. */
5131 if (! subst_reg_equivs_changed || strict_memory_address_p (mode, tem))
5133 *loc = tem;
5134 return 0;
5138 /* If constants aren't valid addresses, reload the constant address
5139 into a register. */
5140 if (CONSTANT_P (ad) && ! strict_memory_address_p (mode, ad))
5142 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5143 Unshare it so we can safely alter it. */
5144 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5145 && CONSTANT_POOL_ADDRESS_P (ad))
5147 *memrefloc = copy_rtx (*memrefloc);
5148 loc = &XEXP (*memrefloc, 0);
5149 if (removed_and)
5150 loc = &XEXP (*loc, 0);
5153 find_reloads_address_part (ad, loc, base_reg_class (mode, MEM, SCRATCH),
5154 Pmode, opnum, type, ind_levels);
5155 return ! removed_and;
5158 return find_reloads_address_1 (mode, ad, 0, MEM, SCRATCH, loc, opnum, type,
5159 ind_levels, insn);
5162 /* Find all pseudo regs appearing in AD
5163 that are eliminable in favor of equivalent values
5164 and do not have hard regs; replace them by their equivalents.
5165 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5166 front of it for pseudos that we have to replace with stack slots. */
5168 static rtx
5169 subst_reg_equivs (rtx ad, rtx insn)
5171 RTX_CODE code = GET_CODE (ad);
5172 int i;
5173 const char *fmt;
5175 switch (code)
5177 case HIGH:
5178 case CONST_INT:
5179 case CONST:
5180 case CONST_DOUBLE:
5181 case CONST_FIXED:
5182 case CONST_VECTOR:
5183 case SYMBOL_REF:
5184 case LABEL_REF:
5185 case PC:
5186 case CC0:
5187 return ad;
5189 case REG:
5191 int regno = REGNO (ad);
5193 if (reg_equiv_constant[regno] != 0)
5195 subst_reg_equivs_changed = 1;
5196 return reg_equiv_constant[regno];
5198 if (reg_equiv_memory_loc[regno] && num_not_at_initial_offset)
5200 rtx mem = make_memloc (ad, regno);
5201 if (! rtx_equal_p (mem, reg_equiv_mem[regno]))
5203 subst_reg_equivs_changed = 1;
5204 /* We mark the USE with QImode so that we recognize it
5205 as one that can be safely deleted at the end of
5206 reload. */
5207 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5208 QImode);
5209 return mem;
5213 return ad;
5215 case PLUS:
5216 /* Quickly dispose of a common case. */
5217 if (XEXP (ad, 0) == frame_pointer_rtx
5218 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
5219 return ad;
5220 break;
5222 default:
5223 break;
5226 fmt = GET_RTX_FORMAT (code);
5227 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5228 if (fmt[i] == 'e')
5229 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5230 return ad;
5233 /* Compute the sum of X and Y, making canonicalizations assumed in an
5234 address, namely: sum constant integers, surround the sum of two
5235 constants with a CONST, put the constant as the second operand, and
5236 group the constant on the outermost sum.
5238 This routine assumes both inputs are already in canonical form. */
5241 form_sum (rtx x, rtx y)
5243 rtx tem;
5244 enum machine_mode mode = GET_MODE (x);
5246 if (mode == VOIDmode)
5247 mode = GET_MODE (y);
5249 if (mode == VOIDmode)
5250 mode = Pmode;
5252 if (GET_CODE (x) == CONST_INT)
5253 return plus_constant (y, INTVAL (x));
5254 else if (GET_CODE (y) == CONST_INT)
5255 return plus_constant (x, INTVAL (y));
5256 else if (CONSTANT_P (x))
5257 tem = x, x = y, y = tem;
5259 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5260 return form_sum (XEXP (x, 0), form_sum (XEXP (x, 1), y));
5262 /* Note that if the operands of Y are specified in the opposite
5263 order in the recursive calls below, infinite recursion will occur. */
5264 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5265 return form_sum (form_sum (x, XEXP (y, 0)), XEXP (y, 1));
5267 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5268 constant will have been placed second. */
5269 if (CONSTANT_P (x) && CONSTANT_P (y))
5271 if (GET_CODE (x) == CONST)
5272 x = XEXP (x, 0);
5273 if (GET_CODE (y) == CONST)
5274 y = XEXP (y, 0);
5276 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5279 return gen_rtx_PLUS (mode, x, y);
5282 /* If ADDR is a sum containing a pseudo register that should be
5283 replaced with a constant (from reg_equiv_constant),
5284 return the result of doing so, and also apply the associative
5285 law so that the result is more likely to be a valid address.
5286 (But it is not guaranteed to be one.)
5288 Note that at most one register is replaced, even if more are
5289 replaceable. Also, we try to put the result into a canonical form
5290 so it is more likely to be a valid address.
5292 In all other cases, return ADDR. */
5294 static rtx
5295 subst_indexed_address (rtx addr)
5297 rtx op0 = 0, op1 = 0, op2 = 0;
5298 rtx tem;
5299 int regno;
5301 if (GET_CODE (addr) == PLUS)
5303 /* Try to find a register to replace. */
5304 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5305 if (REG_P (op0)
5306 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5307 && reg_renumber[regno] < 0
5308 && reg_equiv_constant[regno] != 0)
5309 op0 = reg_equiv_constant[regno];
5310 else if (REG_P (op1)
5311 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5312 && reg_renumber[regno] < 0
5313 && reg_equiv_constant[regno] != 0)
5314 op1 = reg_equiv_constant[regno];
5315 else if (GET_CODE (op0) == PLUS
5316 && (tem = subst_indexed_address (op0)) != op0)
5317 op0 = tem;
5318 else if (GET_CODE (op1) == PLUS
5319 && (tem = subst_indexed_address (op1)) != op1)
5320 op1 = tem;
5321 else
5322 return addr;
5324 /* Pick out up to three things to add. */
5325 if (GET_CODE (op1) == PLUS)
5326 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5327 else if (GET_CODE (op0) == PLUS)
5328 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5330 /* Compute the sum. */
5331 if (op2 != 0)
5332 op1 = form_sum (op1, op2);
5333 if (op1 != 0)
5334 op0 = form_sum (op0, op1);
5336 return op0;
5338 return addr;
5341 /* Update the REG_INC notes for an insn. It updates all REG_INC
5342 notes for the instruction which refer to REGNO the to refer
5343 to the reload number.
5345 INSN is the insn for which any REG_INC notes need updating.
5347 REGNO is the register number which has been reloaded.
5349 RELOADNUM is the reload number. */
5351 static void
5352 update_auto_inc_notes (rtx insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5353 int reloadnum ATTRIBUTE_UNUSED)
5355 #ifdef AUTO_INC_DEC
5356 rtx link;
5358 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5359 if (REG_NOTE_KIND (link) == REG_INC
5360 && (int) REGNO (XEXP (link, 0)) == regno)
5361 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5362 #endif
5365 /* Record the pseudo registers we must reload into hard registers in a
5366 subexpression of a would-be memory address, X referring to a value
5367 in mode MODE. (This function is not called if the address we find
5368 is strictly valid.)
5370 CONTEXT = 1 means we are considering regs as index regs,
5371 = 0 means we are considering them as base regs.
5372 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5373 or an autoinc code.
5374 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5375 is the code of the index part of the address. Otherwise, pass SCRATCH
5376 for this argument.
5377 OPNUM and TYPE specify the purpose of any reloads made.
5379 IND_LEVELS says how many levels of indirect addressing are
5380 supported at this point in the address.
5382 INSN, if nonzero, is the insn in which we do the reload. It is used
5383 to determine if we may generate output reloads.
5385 We return nonzero if X, as a whole, is reloaded or replaced. */
5387 /* Note that we take shortcuts assuming that no multi-reg machine mode
5388 occurs as part of an address.
5389 Also, this is not fully machine-customizable; it works for machines
5390 such as VAXen and 68000's and 32000's, but other possible machines
5391 could have addressing modes that this does not handle right.
5392 If you add push_reload calls here, you need to make sure gen_reload
5393 handles those cases gracefully. */
5395 static int
5396 find_reloads_address_1 (enum machine_mode mode, rtx x, int context,
5397 enum rtx_code outer_code, enum rtx_code index_code,
5398 rtx *loc, int opnum, enum reload_type type,
5399 int ind_levels, rtx insn)
5401 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, OUTER, INDEX) \
5402 ((CONTEXT) == 0 \
5403 ? regno_ok_for_base_p (REGNO, MODE, OUTER, INDEX) \
5404 : REGNO_OK_FOR_INDEX_P (REGNO))
5406 enum reg_class context_reg_class;
5407 RTX_CODE code = GET_CODE (x);
5409 if (context == 1)
5410 context_reg_class = INDEX_REG_CLASS;
5411 else
5412 context_reg_class = base_reg_class (mode, outer_code, index_code);
5414 switch (code)
5416 case PLUS:
5418 rtx orig_op0 = XEXP (x, 0);
5419 rtx orig_op1 = XEXP (x, 1);
5420 RTX_CODE code0 = GET_CODE (orig_op0);
5421 RTX_CODE code1 = GET_CODE (orig_op1);
5422 rtx op0 = orig_op0;
5423 rtx op1 = orig_op1;
5425 if (GET_CODE (op0) == SUBREG)
5427 op0 = SUBREG_REG (op0);
5428 code0 = GET_CODE (op0);
5429 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5430 op0 = gen_rtx_REG (word_mode,
5431 (REGNO (op0) +
5432 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5433 GET_MODE (SUBREG_REG (orig_op0)),
5434 SUBREG_BYTE (orig_op0),
5435 GET_MODE (orig_op0))));
5438 if (GET_CODE (op1) == SUBREG)
5440 op1 = SUBREG_REG (op1);
5441 code1 = GET_CODE (op1);
5442 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5443 /* ??? Why is this given op1's mode and above for
5444 ??? op0 SUBREGs we use word_mode? */
5445 op1 = gen_rtx_REG (GET_MODE (op1),
5446 (REGNO (op1) +
5447 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5448 GET_MODE (SUBREG_REG (orig_op1)),
5449 SUBREG_BYTE (orig_op1),
5450 GET_MODE (orig_op1))));
5452 /* Plus in the index register may be created only as a result of
5453 register rematerialization for expression like &localvar*4. Reload it.
5454 It may be possible to combine the displacement on the outer level,
5455 but it is probably not worthwhile to do so. */
5456 if (context == 1)
5458 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5459 opnum, ADDR_TYPE (type), ind_levels, insn);
5460 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5461 context_reg_class,
5462 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5463 return 1;
5466 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5467 || code0 == ZERO_EXTEND || code1 == MEM)
5469 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5470 &XEXP (x, 0), opnum, type, ind_levels,
5471 insn);
5472 find_reloads_address_1 (mode, orig_op1, 0, PLUS, code0,
5473 &XEXP (x, 1), opnum, type, ind_levels,
5474 insn);
5477 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5478 || code1 == ZERO_EXTEND || code0 == MEM)
5480 find_reloads_address_1 (mode, orig_op0, 0, PLUS, code1,
5481 &XEXP (x, 0), opnum, type, ind_levels,
5482 insn);
5483 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5484 &XEXP (x, 1), opnum, type, ind_levels,
5485 insn);
5488 else if (code0 == CONST_INT || code0 == CONST
5489 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5490 find_reloads_address_1 (mode, orig_op1, 0, PLUS, code0,
5491 &XEXP (x, 1), opnum, type, ind_levels,
5492 insn);
5494 else if (code1 == CONST_INT || code1 == CONST
5495 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5496 find_reloads_address_1 (mode, orig_op0, 0, PLUS, code1,
5497 &XEXP (x, 0), opnum, type, ind_levels,
5498 insn);
5500 else if (code0 == REG && code1 == REG)
5502 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5503 && regno_ok_for_base_p (REGNO (op0), mode, PLUS, REG))
5504 return 0;
5505 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5506 && regno_ok_for_base_p (REGNO (op1), mode, PLUS, REG))
5507 return 0;
5508 else if (regno_ok_for_base_p (REGNO (op0), mode, PLUS, REG))
5509 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5510 &XEXP (x, 1), opnum, type, ind_levels,
5511 insn);
5512 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5513 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5514 &XEXP (x, 0), opnum, type, ind_levels,
5515 insn);
5516 else if (regno_ok_for_base_p (REGNO (op1), mode, PLUS, REG))
5517 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5518 &XEXP (x, 0), opnum, type, ind_levels,
5519 insn);
5520 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5521 find_reloads_address_1 (mode, orig_op1, 0, PLUS, REG,
5522 &XEXP (x, 1), opnum, type, ind_levels,
5523 insn);
5524 else
5526 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5527 &XEXP (x, 0), opnum, type, ind_levels,
5528 insn);
5529 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5530 &XEXP (x, 1), opnum, type, ind_levels,
5531 insn);
5535 else if (code0 == REG)
5537 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5538 &XEXP (x, 0), opnum, type, ind_levels,
5539 insn);
5540 find_reloads_address_1 (mode, orig_op1, 0, PLUS, REG,
5541 &XEXP (x, 1), opnum, type, ind_levels,
5542 insn);
5545 else if (code1 == REG)
5547 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5548 &XEXP (x, 1), opnum, type, ind_levels,
5549 insn);
5550 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5551 &XEXP (x, 0), opnum, type, ind_levels,
5552 insn);
5556 return 0;
5558 case POST_MODIFY:
5559 case PRE_MODIFY:
5561 rtx op0 = XEXP (x, 0);
5562 rtx op1 = XEXP (x, 1);
5563 enum rtx_code index_code;
5564 int regno;
5565 int reloadnum;
5567 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5568 return 0;
5570 /* Currently, we only support {PRE,POST}_MODIFY constructs
5571 where a base register is {inc,dec}remented by the contents
5572 of another register or by a constant value. Thus, these
5573 operands must match. */
5574 gcc_assert (op0 == XEXP (op1, 0));
5576 /* Require index register (or constant). Let's just handle the
5577 register case in the meantime... If the target allows
5578 auto-modify by a constant then we could try replacing a pseudo
5579 register with its equivalent constant where applicable.
5581 We also handle the case where the register was eliminated
5582 resulting in a PLUS subexpression.
5584 If we later decide to reload the whole PRE_MODIFY or
5585 POST_MODIFY, inc_for_reload might clobber the reload register
5586 before reading the index. The index register might therefore
5587 need to live longer than a TYPE reload normally would, so be
5588 conservative and class it as RELOAD_OTHER. */
5589 if ((REG_P (XEXP (op1, 1))
5590 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5591 || GET_CODE (XEXP (op1, 1)) == PLUS)
5592 find_reloads_address_1 (mode, XEXP (op1, 1), 1, code, SCRATCH,
5593 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5594 ind_levels, insn);
5596 gcc_assert (REG_P (XEXP (op1, 0)));
5598 regno = REGNO (XEXP (op1, 0));
5599 index_code = GET_CODE (XEXP (op1, 1));
5601 /* A register that is incremented cannot be constant! */
5602 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5603 || reg_equiv_constant[regno] == 0);
5605 /* Handle a register that is equivalent to a memory location
5606 which cannot be addressed directly. */
5607 if (reg_equiv_memory_loc[regno] != 0
5608 && (reg_equiv_address[regno] != 0
5609 || num_not_at_initial_offset))
5611 rtx tem = make_memloc (XEXP (x, 0), regno);
5613 if (reg_equiv_address[regno]
5614 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5616 rtx orig = tem;
5618 /* First reload the memory location's address.
5619 We can't use ADDR_TYPE (type) here, because we need to
5620 write back the value after reading it, hence we actually
5621 need two registers. */
5622 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5623 &XEXP (tem, 0), opnum,
5624 RELOAD_OTHER,
5625 ind_levels, insn);
5627 if (!rtx_equal_p (tem, orig))
5628 push_reg_equiv_alt_mem (regno, tem);
5630 /* Then reload the memory location into a base
5631 register. */
5632 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5633 &XEXP (op1, 0),
5634 base_reg_class (mode, code,
5635 index_code),
5636 GET_MODE (x), GET_MODE (x), 0,
5637 0, opnum, RELOAD_OTHER);
5639 update_auto_inc_notes (this_insn, regno, reloadnum);
5640 return 0;
5644 if (reg_renumber[regno] >= 0)
5645 regno = reg_renumber[regno];
5647 /* We require a base register here... */
5648 if (!regno_ok_for_base_p (regno, GET_MODE (x), code, index_code))
5650 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5651 &XEXP (op1, 0), &XEXP (x, 0),
5652 base_reg_class (mode, code, index_code),
5653 GET_MODE (x), GET_MODE (x), 0, 0,
5654 opnum, RELOAD_OTHER);
5656 update_auto_inc_notes (this_insn, regno, reloadnum);
5657 return 0;
5660 return 0;
5662 case POST_INC:
5663 case POST_DEC:
5664 case PRE_INC:
5665 case PRE_DEC:
5666 if (REG_P (XEXP (x, 0)))
5668 int regno = REGNO (XEXP (x, 0));
5669 int value = 0;
5670 rtx x_orig = x;
5672 /* A register that is incremented cannot be constant! */
5673 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5674 || reg_equiv_constant[regno] == 0);
5676 /* Handle a register that is equivalent to a memory location
5677 which cannot be addressed directly. */
5678 if (reg_equiv_memory_loc[regno] != 0
5679 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
5681 rtx tem = make_memloc (XEXP (x, 0), regno);
5682 if (reg_equiv_address[regno]
5683 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5685 rtx orig = tem;
5687 /* First reload the memory location's address.
5688 We can't use ADDR_TYPE (type) here, because we need to
5689 write back the value after reading it, hence we actually
5690 need two registers. */
5691 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5692 &XEXP (tem, 0), opnum, type,
5693 ind_levels, insn);
5694 if (!rtx_equal_p (tem, orig))
5695 push_reg_equiv_alt_mem (regno, tem);
5696 /* Put this inside a new increment-expression. */
5697 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5698 /* Proceed to reload that, as if it contained a register. */
5702 /* If we have a hard register that is ok in this incdec context,
5703 don't make a reload. If the register isn't nice enough for
5704 autoincdec, we can reload it. But, if an autoincrement of a
5705 register that we here verified as playing nice, still outside
5706 isn't "valid", it must be that no autoincrement is "valid".
5707 If that is true and something made an autoincrement anyway,
5708 this must be a special context where one is allowed.
5709 (For example, a "push" instruction.)
5710 We can't improve this address, so leave it alone. */
5712 /* Otherwise, reload the autoincrement into a suitable hard reg
5713 and record how much to increment by. */
5715 if (reg_renumber[regno] >= 0)
5716 regno = reg_renumber[regno];
5717 if (regno >= FIRST_PSEUDO_REGISTER
5718 || !REG_OK_FOR_CONTEXT (context, regno, mode, code,
5719 index_code))
5721 int reloadnum;
5723 /* If we can output the register afterwards, do so, this
5724 saves the extra update.
5725 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5726 CALL_INSN - and it does not set CC0.
5727 But don't do this if we cannot directly address the
5728 memory location, since this will make it harder to
5729 reuse address reloads, and increases register pressure.
5730 Also don't do this if we can probably update x directly. */
5731 rtx equiv = (MEM_P (XEXP (x, 0))
5732 ? XEXP (x, 0)
5733 : reg_equiv_mem[regno]);
5734 int icode = (int) optab_handler (add_optab, Pmode)->insn_code;
5735 if (insn && NONJUMP_INSN_P (insn) && equiv
5736 && memory_operand (equiv, GET_MODE (equiv))
5737 #ifdef HAVE_cc0
5738 && ! sets_cc0_p (PATTERN (insn))
5739 #endif
5740 && ! (icode != CODE_FOR_nothing
5741 && ((*insn_data[icode].operand[0].predicate)
5742 (equiv, Pmode))
5743 && ((*insn_data[icode].operand[1].predicate)
5744 (equiv, Pmode))))
5746 /* We use the original pseudo for loc, so that
5747 emit_reload_insns() knows which pseudo this
5748 reload refers to and updates the pseudo rtx, not
5749 its equivalent memory location, as well as the
5750 corresponding entry in reg_last_reload_reg. */
5751 loc = &XEXP (x_orig, 0);
5752 x = XEXP (x, 0);
5753 reloadnum
5754 = push_reload (x, x, loc, loc,
5755 context_reg_class,
5756 GET_MODE (x), GET_MODE (x), 0, 0,
5757 opnum, RELOAD_OTHER);
5759 else
5761 reloadnum
5762 = push_reload (x, x, loc, (rtx*) 0,
5763 context_reg_class,
5764 GET_MODE (x), GET_MODE (x), 0, 0,
5765 opnum, type);
5766 rld[reloadnum].inc
5767 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5769 value = 1;
5772 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5773 reloadnum);
5775 return value;
5777 return 0;
5779 case TRUNCATE:
5780 case SIGN_EXTEND:
5781 case ZERO_EXTEND:
5782 /* Look for parts to reload in the inner expression and reload them
5783 too, in addition to this operation. Reloading all inner parts in
5784 addition to this one shouldn't be necessary, but at this point,
5785 we don't know if we can possibly omit any part that *can* be
5786 reloaded. Targets that are better off reloading just either part
5787 (or perhaps even a different part of an outer expression), should
5788 define LEGITIMIZE_RELOAD_ADDRESS. */
5789 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), XEXP (x, 0),
5790 context, code, SCRATCH, &XEXP (x, 0), opnum,
5791 type, ind_levels, insn);
5792 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5793 context_reg_class,
5794 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5795 return 1;
5797 case MEM:
5798 /* This is probably the result of a substitution, by eliminate_regs, of
5799 an equivalent address for a pseudo that was not allocated to a hard
5800 register. Verify that the specified address is valid and reload it
5801 into a register.
5803 Since we know we are going to reload this item, don't decrement for
5804 the indirection level.
5806 Note that this is actually conservative: it would be slightly more
5807 efficient to use the value of SPILL_INDIRECT_LEVELS from
5808 reload1.c here. */
5810 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5811 opnum, ADDR_TYPE (type), ind_levels, insn);
5812 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5813 context_reg_class,
5814 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5815 return 1;
5817 case REG:
5819 int regno = REGNO (x);
5821 if (reg_equiv_constant[regno] != 0)
5823 find_reloads_address_part (reg_equiv_constant[regno], loc,
5824 context_reg_class,
5825 GET_MODE (x), opnum, type, ind_levels);
5826 return 1;
5829 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5830 that feeds this insn. */
5831 if (reg_equiv_mem[regno] != 0)
5833 push_reload (reg_equiv_mem[regno], NULL_RTX, loc, (rtx*) 0,
5834 context_reg_class,
5835 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5836 return 1;
5838 #endif
5840 if (reg_equiv_memory_loc[regno]
5841 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
5843 rtx tem = make_memloc (x, regno);
5844 if (reg_equiv_address[regno] != 0
5845 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5847 x = tem;
5848 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5849 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5850 ind_levels, insn);
5851 if (!rtx_equal_p (x, tem))
5852 push_reg_equiv_alt_mem (regno, x);
5856 if (reg_renumber[regno] >= 0)
5857 regno = reg_renumber[regno];
5859 if (regno >= FIRST_PSEUDO_REGISTER
5860 || !REG_OK_FOR_CONTEXT (context, regno, mode, outer_code,
5861 index_code))
5863 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5864 context_reg_class,
5865 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5866 return 1;
5869 /* If a register appearing in an address is the subject of a CLOBBER
5870 in this insn, reload it into some other register to be safe.
5871 The CLOBBER is supposed to make the register unavailable
5872 from before this insn to after it. */
5873 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5875 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5876 context_reg_class,
5877 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5878 return 1;
5881 return 0;
5883 case SUBREG:
5884 if (REG_P (SUBREG_REG (x)))
5886 /* If this is a SUBREG of a hard register and the resulting register
5887 is of the wrong class, reload the whole SUBREG. This avoids
5888 needless copies if SUBREG_REG is multi-word. */
5889 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5891 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5893 if (!REG_OK_FOR_CONTEXT (context, regno, mode, outer_code,
5894 index_code))
5896 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5897 context_reg_class,
5898 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5899 return 1;
5902 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
5903 is larger than the class size, then reload the whole SUBREG. */
5904 else
5906 enum reg_class class = context_reg_class;
5907 if ((unsigned) CLASS_MAX_NREGS (class, GET_MODE (SUBREG_REG (x)))
5908 > reg_class_size[class])
5910 x = find_reloads_subreg_address (x, 0, opnum,
5911 ADDR_TYPE (type),
5912 ind_levels, insn);
5913 push_reload (x, NULL_RTX, loc, (rtx*) 0, class,
5914 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5915 return 1;
5919 break;
5921 default:
5922 break;
5926 const char *fmt = GET_RTX_FORMAT (code);
5927 int i;
5929 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5931 if (fmt[i] == 'e')
5932 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
5933 we get here. */
5934 find_reloads_address_1 (mode, XEXP (x, i), context, code, SCRATCH,
5935 &XEXP (x, i), opnum, type, ind_levels, insn);
5939 #undef REG_OK_FOR_CONTEXT
5940 return 0;
5943 /* X, which is found at *LOC, is a part of an address that needs to be
5944 reloaded into a register of class CLASS. If X is a constant, or if
5945 X is a PLUS that contains a constant, check that the constant is a
5946 legitimate operand and that we are supposed to be able to load
5947 it into the register.
5949 If not, force the constant into memory and reload the MEM instead.
5951 MODE is the mode to use, in case X is an integer constant.
5953 OPNUM and TYPE describe the purpose of any reloads made.
5955 IND_LEVELS says how many levels of indirect addressing this machine
5956 supports. */
5958 static void
5959 find_reloads_address_part (rtx x, rtx *loc, enum reg_class class,
5960 enum machine_mode mode, int opnum,
5961 enum reload_type type, int ind_levels)
5963 if (CONSTANT_P (x)
5964 && (! LEGITIMATE_CONSTANT_P (x)
5965 || PREFERRED_RELOAD_CLASS (x, class) == NO_REGS))
5967 x = force_const_mem (mode, x);
5968 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
5969 opnum, type, ind_levels, 0);
5972 else if (GET_CODE (x) == PLUS
5973 && CONSTANT_P (XEXP (x, 1))
5974 && (! LEGITIMATE_CONSTANT_P (XEXP (x, 1))
5975 || PREFERRED_RELOAD_CLASS (XEXP (x, 1), class) == NO_REGS))
5977 rtx tem;
5979 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
5980 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
5981 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
5982 opnum, type, ind_levels, 0);
5985 push_reload (x, NULL_RTX, loc, (rtx*) 0, class,
5986 mode, VOIDmode, 0, 0, opnum, type);
5989 /* X, a subreg of a pseudo, is a part of an address that needs to be
5990 reloaded.
5992 If the pseudo is equivalent to a memory location that cannot be directly
5993 addressed, make the necessary address reloads.
5995 If address reloads have been necessary, or if the address is changed
5996 by register elimination, return the rtx of the memory location;
5997 otherwise, return X.
5999 If FORCE_REPLACE is nonzero, unconditionally replace the subreg with the
6000 memory location.
6002 OPNUM and TYPE identify the purpose of the reload.
6004 IND_LEVELS says how many levels of indirect addressing are
6005 supported at this point in the address.
6007 INSN, if nonzero, is the insn in which we do the reload. It is used
6008 to determine where to put USEs for pseudos that we have to replace with
6009 stack slots. */
6011 static rtx
6012 find_reloads_subreg_address (rtx x, int force_replace, int opnum,
6013 enum reload_type type, int ind_levels, rtx insn)
6015 int regno = REGNO (SUBREG_REG (x));
6017 if (reg_equiv_memory_loc[regno])
6019 /* If the address is not directly addressable, or if the address is not
6020 offsettable, then it must be replaced. */
6021 if (! force_replace
6022 && (reg_equiv_address[regno]
6023 || ! offsettable_memref_p (reg_equiv_mem[regno])))
6024 force_replace = 1;
6026 if (force_replace || num_not_at_initial_offset)
6028 rtx tem = make_memloc (SUBREG_REG (x), regno);
6030 /* If the address changes because of register elimination, then
6031 it must be replaced. */
6032 if (force_replace
6033 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
6035 unsigned outer_size = GET_MODE_SIZE (GET_MODE (x));
6036 unsigned inner_size = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
6037 int offset;
6038 rtx orig = tem;
6039 enum machine_mode orig_mode = GET_MODE (orig);
6040 int reloaded;
6042 /* For big-endian paradoxical subregs, SUBREG_BYTE does not
6043 hold the correct (negative) byte offset. */
6044 if (BYTES_BIG_ENDIAN && outer_size > inner_size)
6045 offset = inner_size - outer_size;
6046 else
6047 offset = SUBREG_BYTE (x);
6049 XEXP (tem, 0) = plus_constant (XEXP (tem, 0), offset);
6050 PUT_MODE (tem, GET_MODE (x));
6051 if (MEM_OFFSET (tem))
6052 set_mem_offset (tem, plus_constant (MEM_OFFSET (tem), offset));
6054 /* If this was a paradoxical subreg that we replaced, the
6055 resulting memory must be sufficiently aligned to allow
6056 us to widen the mode of the memory. */
6057 if (outer_size > inner_size)
6059 rtx base;
6061 base = XEXP (tem, 0);
6062 if (GET_CODE (base) == PLUS)
6064 if (GET_CODE (XEXP (base, 1)) == CONST_INT
6065 && INTVAL (XEXP (base, 1)) % outer_size != 0)
6066 return x;
6067 base = XEXP (base, 0);
6069 if (!REG_P (base)
6070 || (REGNO_POINTER_ALIGN (REGNO (base))
6071 < outer_size * BITS_PER_UNIT))
6072 return x;
6075 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6076 XEXP (tem, 0), &XEXP (tem, 0),
6077 opnum, type, ind_levels, insn);
6078 /* ??? Do we need to handle nonzero offsets somehow? */
6079 if (!offset && !rtx_equal_p (tem, orig))
6080 push_reg_equiv_alt_mem (regno, tem);
6082 /* For some processors an address may be valid in the
6083 original mode but not in a smaller mode. For
6084 example, ARM accepts a scaled index register in
6085 SImode but not in HImode. find_reloads_address
6086 assumes that we pass it a valid address, and doesn't
6087 force a reload. This will probably be fine if
6088 find_reloads_address finds some reloads. But if it
6089 doesn't find any, then we may have just converted a
6090 valid address into an invalid one. Check for that
6091 here. */
6092 if (reloaded != 1
6093 && strict_memory_address_p (orig_mode, XEXP (tem, 0))
6094 && !strict_memory_address_p (GET_MODE (tem),
6095 XEXP (tem, 0)))
6096 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6097 base_reg_class (GET_MODE (tem), MEM, SCRATCH),
6098 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0,
6099 opnum, type);
6101 /* If this is not a toplevel operand, find_reloads doesn't see
6102 this substitution. We have to emit a USE of the pseudo so
6103 that delete_output_reload can see it. */
6104 if (replace_reloads && recog_data.operand[opnum] != x)
6105 /* We mark the USE with QImode so that we recognize it
6106 as one that can be safely deleted at the end of
6107 reload. */
6108 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode,
6109 SUBREG_REG (x)),
6110 insn), QImode);
6111 x = tem;
6115 return x;
6118 /* Substitute into the current INSN the registers into which we have reloaded
6119 the things that need reloading. The array `replacements'
6120 contains the locations of all pointers that must be changed
6121 and says what to replace them with.
6123 Return the rtx that X translates into; usually X, but modified. */
6125 void
6126 subst_reloads (rtx insn)
6128 int i;
6130 for (i = 0; i < n_replacements; i++)
6132 struct replacement *r = &replacements[i];
6133 rtx reloadreg = rld[r->what].reg_rtx;
6134 if (reloadreg)
6136 #ifdef DEBUG_RELOAD
6137 /* This checking takes a very long time on some platforms
6138 causing the gcc.c-torture/compile/limits-fnargs.c test
6139 to time out during testing. See PR 31850.
6141 Internal consistency test. Check that we don't modify
6142 anything in the equivalence arrays. Whenever something from
6143 those arrays needs to be reloaded, it must be unshared before
6144 being substituted into; the equivalence must not be modified.
6145 Otherwise, if the equivalence is used after that, it will
6146 have been modified, and the thing substituted (probably a
6147 register) is likely overwritten and not a usable equivalence. */
6148 int check_regno;
6150 for (check_regno = 0; check_regno < max_regno; check_regno++)
6152 #define CHECK_MODF(ARRAY) \
6153 gcc_assert (!ARRAY[check_regno] \
6154 || !loc_mentioned_in_p (r->where, \
6155 ARRAY[check_regno]))
6157 CHECK_MODF (reg_equiv_constant);
6158 CHECK_MODF (reg_equiv_memory_loc);
6159 CHECK_MODF (reg_equiv_address);
6160 CHECK_MODF (reg_equiv_mem);
6161 #undef CHECK_MODF
6163 #endif /* DEBUG_RELOAD */
6165 /* If we're replacing a LABEL_REF with a register, there must
6166 already be an indication (to e.g. flow) which label this
6167 register refers to. */
6168 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6169 || !JUMP_P (insn)
6170 || find_reg_note (insn,
6171 REG_LABEL_OPERAND,
6172 XEXP (*r->where, 0))
6173 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6175 /* Encapsulate RELOADREG so its machine mode matches what
6176 used to be there. Note that gen_lowpart_common will
6177 do the wrong thing if RELOADREG is multi-word. RELOADREG
6178 will always be a REG here. */
6179 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6180 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6182 /* If we are putting this into a SUBREG and RELOADREG is a
6183 SUBREG, we would be making nested SUBREGs, so we have to fix
6184 this up. Note that r->where == &SUBREG_REG (*r->subreg_loc). */
6186 if (r->subreg_loc != 0 && GET_CODE (reloadreg) == SUBREG)
6188 if (GET_MODE (*r->subreg_loc)
6189 == GET_MODE (SUBREG_REG (reloadreg)))
6190 *r->subreg_loc = SUBREG_REG (reloadreg);
6191 else
6193 int final_offset =
6194 SUBREG_BYTE (*r->subreg_loc) + SUBREG_BYTE (reloadreg);
6196 /* When working with SUBREGs the rule is that the byte
6197 offset must be a multiple of the SUBREG's mode. */
6198 final_offset = (final_offset /
6199 GET_MODE_SIZE (GET_MODE (*r->subreg_loc)));
6200 final_offset = (final_offset *
6201 GET_MODE_SIZE (GET_MODE (*r->subreg_loc)));
6203 *r->where = SUBREG_REG (reloadreg);
6204 SUBREG_BYTE (*r->subreg_loc) = final_offset;
6207 else
6208 *r->where = reloadreg;
6210 /* If reload got no reg and isn't optional, something's wrong. */
6211 else
6212 gcc_assert (rld[r->what].optional);
6216 /* Make a copy of any replacements being done into X and move those
6217 copies to locations in Y, a copy of X. */
6219 void
6220 copy_replacements (rtx x, rtx y)
6222 /* We can't support X being a SUBREG because we might then need to know its
6223 location if something inside it was replaced. */
6224 gcc_assert (GET_CODE (x) != SUBREG);
6226 copy_replacements_1 (&x, &y, n_replacements);
6229 static void
6230 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6232 int i, j;
6233 rtx x, y;
6234 struct replacement *r;
6235 enum rtx_code code;
6236 const char *fmt;
6238 for (j = 0; j < orig_replacements; j++)
6240 if (replacements[j].subreg_loc == px)
6242 r = &replacements[n_replacements++];
6243 r->where = replacements[j].where;
6244 r->subreg_loc = py;
6245 r->what = replacements[j].what;
6246 r->mode = replacements[j].mode;
6248 else if (replacements[j].where == px)
6250 r = &replacements[n_replacements++];
6251 r->where = py;
6252 r->subreg_loc = 0;
6253 r->what = replacements[j].what;
6254 r->mode = replacements[j].mode;
6258 x = *px;
6259 y = *py;
6260 code = GET_CODE (x);
6261 fmt = GET_RTX_FORMAT (code);
6263 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6265 if (fmt[i] == 'e')
6266 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6267 else if (fmt[i] == 'E')
6268 for (j = XVECLEN (x, i); --j >= 0; )
6269 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6270 orig_replacements);
6274 /* Change any replacements being done to *X to be done to *Y. */
6276 void
6277 move_replacements (rtx *x, rtx *y)
6279 int i;
6281 for (i = 0; i < n_replacements; i++)
6282 if (replacements[i].subreg_loc == x)
6283 replacements[i].subreg_loc = y;
6284 else if (replacements[i].where == x)
6286 replacements[i].where = y;
6287 replacements[i].subreg_loc = 0;
6291 /* If LOC was scheduled to be replaced by something, return the replacement.
6292 Otherwise, return *LOC. */
6295 find_replacement (rtx *loc)
6297 struct replacement *r;
6299 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6301 rtx reloadreg = rld[r->what].reg_rtx;
6303 if (reloadreg && r->where == loc)
6305 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6306 reloadreg = gen_rtx_REG (r->mode, REGNO (reloadreg));
6308 return reloadreg;
6310 else if (reloadreg && r->subreg_loc == loc)
6312 /* RELOADREG must be either a REG or a SUBREG.
6314 ??? Is it actually still ever a SUBREG? If so, why? */
6316 if (REG_P (reloadreg))
6317 return gen_rtx_REG (GET_MODE (*loc),
6318 (REGNO (reloadreg) +
6319 subreg_regno_offset (REGNO (SUBREG_REG (*loc)),
6320 GET_MODE (SUBREG_REG (*loc)),
6321 SUBREG_BYTE (*loc),
6322 GET_MODE (*loc))));
6323 else if (GET_MODE (reloadreg) == GET_MODE (*loc))
6324 return reloadreg;
6325 else
6327 int final_offset = SUBREG_BYTE (reloadreg) + SUBREG_BYTE (*loc);
6329 /* When working with SUBREGs the rule is that the byte
6330 offset must be a multiple of the SUBREG's mode. */
6331 final_offset = (final_offset / GET_MODE_SIZE (GET_MODE (*loc)));
6332 final_offset = (final_offset * GET_MODE_SIZE (GET_MODE (*loc)));
6333 return gen_rtx_SUBREG (GET_MODE (*loc), SUBREG_REG (reloadreg),
6334 final_offset);
6339 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6340 what's inside and make a new rtl if so. */
6341 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6342 || GET_CODE (*loc) == MULT)
6344 rtx x = find_replacement (&XEXP (*loc, 0));
6345 rtx y = find_replacement (&XEXP (*loc, 1));
6347 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6348 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6351 return *loc;
6354 /* Return nonzero if register in range [REGNO, ENDREGNO)
6355 appears either explicitly or implicitly in X
6356 other than being stored into (except for earlyclobber operands).
6358 References contained within the substructure at LOC do not count.
6359 LOC may be zero, meaning don't ignore anything.
6361 This is similar to refers_to_regno_p in rtlanal.c except that we
6362 look at equivalences for pseudos that didn't get hard registers. */
6364 static int
6365 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6366 rtx x, rtx *loc)
6368 int i;
6369 unsigned int r;
6370 RTX_CODE code;
6371 const char *fmt;
6373 if (x == 0)
6374 return 0;
6376 repeat:
6377 code = GET_CODE (x);
6379 switch (code)
6381 case REG:
6382 r = REGNO (x);
6384 /* If this is a pseudo, a hard register must not have been allocated.
6385 X must therefore either be a constant or be in memory. */
6386 if (r >= FIRST_PSEUDO_REGISTER)
6388 if (reg_equiv_memory_loc[r])
6389 return refers_to_regno_for_reload_p (regno, endregno,
6390 reg_equiv_memory_loc[r],
6391 (rtx*) 0);
6393 gcc_assert (reg_equiv_constant[r] || reg_equiv_invariant[r]);
6394 return 0;
6397 return (endregno > r
6398 && regno < r + (r < FIRST_PSEUDO_REGISTER
6399 ? hard_regno_nregs[r][GET_MODE (x)]
6400 : 1));
6402 case SUBREG:
6403 /* If this is a SUBREG of a hard reg, we can see exactly which
6404 registers are being modified. Otherwise, handle normally. */
6405 if (REG_P (SUBREG_REG (x))
6406 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6408 unsigned int inner_regno = subreg_regno (x);
6409 unsigned int inner_endregno
6410 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6411 ? subreg_nregs (x) : 1);
6413 return endregno > inner_regno && regno < inner_endregno;
6415 break;
6417 case CLOBBER:
6418 case SET:
6419 if (&SET_DEST (x) != loc
6420 /* Note setting a SUBREG counts as referring to the REG it is in for
6421 a pseudo but not for hard registers since we can
6422 treat each word individually. */
6423 && ((GET_CODE (SET_DEST (x)) == SUBREG
6424 && loc != &SUBREG_REG (SET_DEST (x))
6425 && REG_P (SUBREG_REG (SET_DEST (x)))
6426 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6427 && refers_to_regno_for_reload_p (regno, endregno,
6428 SUBREG_REG (SET_DEST (x)),
6429 loc))
6430 /* If the output is an earlyclobber operand, this is
6431 a conflict. */
6432 || ((!REG_P (SET_DEST (x))
6433 || earlyclobber_operand_p (SET_DEST (x)))
6434 && refers_to_regno_for_reload_p (regno, endregno,
6435 SET_DEST (x), loc))))
6436 return 1;
6438 if (code == CLOBBER || loc == &SET_SRC (x))
6439 return 0;
6440 x = SET_SRC (x);
6441 goto repeat;
6443 default:
6444 break;
6447 /* X does not match, so try its subexpressions. */
6449 fmt = GET_RTX_FORMAT (code);
6450 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6452 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6454 if (i == 0)
6456 x = XEXP (x, 0);
6457 goto repeat;
6459 else
6460 if (refers_to_regno_for_reload_p (regno, endregno,
6461 XEXP (x, i), loc))
6462 return 1;
6464 else if (fmt[i] == 'E')
6466 int j;
6467 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6468 if (loc != &XVECEXP (x, i, j)
6469 && refers_to_regno_for_reload_p (regno, endregno,
6470 XVECEXP (x, i, j), loc))
6471 return 1;
6474 return 0;
6477 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6478 we check if any register number in X conflicts with the relevant register
6479 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6480 contains a MEM (we don't bother checking for memory addresses that can't
6481 conflict because we expect this to be a rare case.
6483 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6484 that we look at equivalences for pseudos that didn't get hard registers. */
6487 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6489 int regno, endregno;
6491 /* Overly conservative. */
6492 if (GET_CODE (x) == STRICT_LOW_PART
6493 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6494 x = XEXP (x, 0);
6496 /* If either argument is a constant, then modifying X can not affect IN. */
6497 if (CONSTANT_P (x) || CONSTANT_P (in))
6498 return 0;
6499 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
6500 return refers_to_mem_for_reload_p (in);
6501 else if (GET_CODE (x) == SUBREG)
6503 regno = REGNO (SUBREG_REG (x));
6504 if (regno < FIRST_PSEUDO_REGISTER)
6505 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6506 GET_MODE (SUBREG_REG (x)),
6507 SUBREG_BYTE (x),
6508 GET_MODE (x));
6509 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6510 ? subreg_nregs (x) : 1);
6512 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6514 else if (REG_P (x))
6516 regno = REGNO (x);
6518 /* If this is a pseudo, it must not have been assigned a hard register.
6519 Therefore, it must either be in memory or be a constant. */
6521 if (regno >= FIRST_PSEUDO_REGISTER)
6523 if (reg_equiv_memory_loc[regno])
6524 return refers_to_mem_for_reload_p (in);
6525 gcc_assert (reg_equiv_constant[regno]);
6526 return 0;
6529 endregno = END_HARD_REGNO (x);
6531 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6533 else if (MEM_P (x))
6534 return refers_to_mem_for_reload_p (in);
6535 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6536 || GET_CODE (x) == CC0)
6537 return reg_mentioned_p (x, in);
6538 else
6540 gcc_assert (GET_CODE (x) == PLUS);
6542 /* We actually want to know if X is mentioned somewhere inside IN.
6543 We must not say that (plus (sp) (const_int 124)) is in
6544 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6545 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6546 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6547 while (MEM_P (in))
6548 in = XEXP (in, 0);
6549 if (REG_P (in))
6550 return 0;
6551 else if (GET_CODE (in) == PLUS)
6552 return (rtx_equal_p (x, in)
6553 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6554 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6555 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6556 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6559 gcc_unreachable ();
6562 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6563 registers. */
6565 static int
6566 refers_to_mem_for_reload_p (rtx x)
6568 const char *fmt;
6569 int i;
6571 if (MEM_P (x))
6572 return 1;
6574 if (REG_P (x))
6575 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6576 && reg_equiv_memory_loc[REGNO (x)]);
6578 fmt = GET_RTX_FORMAT (GET_CODE (x));
6579 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6580 if (fmt[i] == 'e'
6581 && (MEM_P (XEXP (x, i))
6582 || refers_to_mem_for_reload_p (XEXP (x, i))))
6583 return 1;
6585 return 0;
6588 /* Check the insns before INSN to see if there is a suitable register
6589 containing the same value as GOAL.
6590 If OTHER is -1, look for a register in class CLASS.
6591 Otherwise, just see if register number OTHER shares GOAL's value.
6593 Return an rtx for the register found, or zero if none is found.
6595 If RELOAD_REG_P is (short *)1,
6596 we reject any hard reg that appears in reload_reg_rtx
6597 because such a hard reg is also needed coming into this insn.
6599 If RELOAD_REG_P is any other nonzero value,
6600 it is a vector indexed by hard reg number
6601 and we reject any hard reg whose element in the vector is nonnegative
6602 as well as any that appears in reload_reg_rtx.
6604 If GOAL is zero, then GOALREG is a register number; we look
6605 for an equivalent for that register.
6607 MODE is the machine mode of the value we want an equivalence for.
6608 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6610 This function is used by jump.c as well as in the reload pass.
6612 If GOAL is the sum of the stack pointer and a constant, we treat it
6613 as if it were a constant except that sp is required to be unchanging. */
6616 find_equiv_reg (rtx goal, rtx insn, enum reg_class class, int other,
6617 short *reload_reg_p, int goalreg, enum machine_mode mode)
6619 rtx p = insn;
6620 rtx goaltry, valtry, value, where;
6621 rtx pat;
6622 int regno = -1;
6623 int valueno;
6624 int goal_mem = 0;
6625 int goal_const = 0;
6626 int goal_mem_addr_varies = 0;
6627 int need_stable_sp = 0;
6628 int nregs;
6629 int valuenregs;
6630 int num = 0;
6632 if (goal == 0)
6633 regno = goalreg;
6634 else if (REG_P (goal))
6635 regno = REGNO (goal);
6636 else if (MEM_P (goal))
6638 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6639 if (MEM_VOLATILE_P (goal))
6640 return 0;
6641 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6642 return 0;
6643 /* An address with side effects must be reexecuted. */
6644 switch (code)
6646 case POST_INC:
6647 case PRE_INC:
6648 case POST_DEC:
6649 case PRE_DEC:
6650 case POST_MODIFY:
6651 case PRE_MODIFY:
6652 return 0;
6653 default:
6654 break;
6656 goal_mem = 1;
6658 else if (CONSTANT_P (goal))
6659 goal_const = 1;
6660 else if (GET_CODE (goal) == PLUS
6661 && XEXP (goal, 0) == stack_pointer_rtx
6662 && CONSTANT_P (XEXP (goal, 1)))
6663 goal_const = need_stable_sp = 1;
6664 else if (GET_CODE (goal) == PLUS
6665 && XEXP (goal, 0) == frame_pointer_rtx
6666 && CONSTANT_P (XEXP (goal, 1)))
6667 goal_const = 1;
6668 else
6669 return 0;
6671 num = 0;
6672 /* Scan insns back from INSN, looking for one that copies
6673 a value into or out of GOAL.
6674 Stop and give up if we reach a label. */
6676 while (1)
6678 p = PREV_INSN (p);
6679 num++;
6680 if (p == 0 || LABEL_P (p)
6681 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6682 return 0;
6684 if (NONJUMP_INSN_P (p)
6685 /* If we don't want spill regs ... */
6686 && (! (reload_reg_p != 0
6687 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6688 /* ... then ignore insns introduced by reload; they aren't
6689 useful and can cause results in reload_as_needed to be
6690 different from what they were when calculating the need for
6691 spills. If we notice an input-reload insn here, we will
6692 reject it below, but it might hide a usable equivalent.
6693 That makes bad code. It may even fail: perhaps no reg was
6694 spilled for this insn because it was assumed we would find
6695 that equivalent. */
6696 || INSN_UID (p) < reload_first_uid))
6698 rtx tem;
6699 pat = single_set (p);
6701 /* First check for something that sets some reg equal to GOAL. */
6702 if (pat != 0
6703 && ((regno >= 0
6704 && true_regnum (SET_SRC (pat)) == regno
6705 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6707 (regno >= 0
6708 && true_regnum (SET_DEST (pat)) == regno
6709 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6711 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6712 /* When looking for stack pointer + const,
6713 make sure we don't use a stack adjust. */
6714 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6715 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6716 || (goal_mem
6717 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6718 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6719 || (goal_mem
6720 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6721 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6722 /* If we are looking for a constant,
6723 and something equivalent to that constant was copied
6724 into a reg, we can use that reg. */
6725 || (goal_const && REG_NOTES (p) != 0
6726 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6727 && ((rtx_equal_p (XEXP (tem, 0), goal)
6728 && (valueno
6729 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6730 || (REG_P (SET_DEST (pat))
6731 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6732 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6733 && GET_CODE (goal) == CONST_INT
6734 && 0 != (goaltry
6735 = operand_subword (XEXP (tem, 0), 0, 0,
6736 VOIDmode))
6737 && rtx_equal_p (goal, goaltry)
6738 && (valtry
6739 = operand_subword (SET_DEST (pat), 0, 0,
6740 VOIDmode))
6741 && (valueno = true_regnum (valtry)) >= 0)))
6742 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6743 NULL_RTX))
6744 && REG_P (SET_DEST (pat))
6745 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6746 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6747 && GET_CODE (goal) == CONST_INT
6748 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6749 VOIDmode))
6750 && rtx_equal_p (goal, goaltry)
6751 && (valtry
6752 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6753 && (valueno = true_regnum (valtry)) >= 0)))
6755 if (other >= 0)
6757 if (valueno != other)
6758 continue;
6760 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6761 continue;
6762 else if (!in_hard_reg_set_p (reg_class_contents[(int) class],
6763 mode, valueno))
6764 continue;
6765 value = valtry;
6766 where = p;
6767 break;
6772 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6773 (or copying VALUE into GOAL, if GOAL is also a register).
6774 Now verify that VALUE is really valid. */
6776 /* VALUENO is the register number of VALUE; a hard register. */
6778 /* Don't try to re-use something that is killed in this insn. We want
6779 to be able to trust REG_UNUSED notes. */
6780 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6781 return 0;
6783 /* If we propose to get the value from the stack pointer or if GOAL is
6784 a MEM based on the stack pointer, we need a stable SP. */
6785 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6786 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6787 goal)))
6788 need_stable_sp = 1;
6790 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6791 if (GET_MODE (value) != mode)
6792 return 0;
6794 /* Reject VALUE if it was loaded from GOAL
6795 and is also a register that appears in the address of GOAL. */
6797 if (goal_mem && value == SET_DEST (single_set (where))
6798 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6799 goal, (rtx*) 0))
6800 return 0;
6802 /* Reject registers that overlap GOAL. */
6804 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6805 nregs = hard_regno_nregs[regno][mode];
6806 else
6807 nregs = 1;
6808 valuenregs = hard_regno_nregs[valueno][mode];
6810 if (!goal_mem && !goal_const
6811 && regno + nregs > valueno && regno < valueno + valuenregs)
6812 return 0;
6814 /* Reject VALUE if it is one of the regs reserved for reloads.
6815 Reload1 knows how to reuse them anyway, and it would get
6816 confused if we allocated one without its knowledge.
6817 (Now that insns introduced by reload are ignored above,
6818 this case shouldn't happen, but I'm not positive.) */
6820 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6822 int i;
6823 for (i = 0; i < valuenregs; ++i)
6824 if (reload_reg_p[valueno + i] >= 0)
6825 return 0;
6828 /* Reject VALUE if it is a register being used for an input reload
6829 even if it is not one of those reserved. */
6831 if (reload_reg_p != 0)
6833 int i;
6834 for (i = 0; i < n_reloads; i++)
6835 if (rld[i].reg_rtx != 0 && rld[i].in)
6837 int regno1 = REGNO (rld[i].reg_rtx);
6838 int nregs1 = hard_regno_nregs[regno1]
6839 [GET_MODE (rld[i].reg_rtx)];
6840 if (regno1 < valueno + valuenregs
6841 && regno1 + nregs1 > valueno)
6842 return 0;
6846 if (goal_mem)
6847 /* We must treat frame pointer as varying here,
6848 since it can vary--in a nonlocal goto as generated by expand_goto. */
6849 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6851 /* Now verify that the values of GOAL and VALUE remain unaltered
6852 until INSN is reached. */
6854 p = insn;
6855 while (1)
6857 p = PREV_INSN (p);
6858 if (p == where)
6859 return value;
6861 /* Don't trust the conversion past a function call
6862 if either of the two is in a call-clobbered register, or memory. */
6863 if (CALL_P (p))
6865 int i;
6867 if (goal_mem || need_stable_sp)
6868 return 0;
6870 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6871 for (i = 0; i < nregs; ++i)
6872 if (call_used_regs[regno + i]
6873 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6874 return 0;
6876 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6877 for (i = 0; i < valuenregs; ++i)
6878 if (call_used_regs[valueno + i]
6879 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6880 return 0;
6883 if (INSN_P (p))
6885 pat = PATTERN (p);
6887 /* Watch out for unspec_volatile, and volatile asms. */
6888 if (volatile_insn_p (pat))
6889 return 0;
6891 /* If this insn P stores in either GOAL or VALUE, return 0.
6892 If GOAL is a memory ref and this insn writes memory, return 0.
6893 If GOAL is a memory ref and its address is not constant,
6894 and this insn P changes a register used in GOAL, return 0. */
6896 if (GET_CODE (pat) == COND_EXEC)
6897 pat = COND_EXEC_CODE (pat);
6898 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6900 rtx dest = SET_DEST (pat);
6901 while (GET_CODE (dest) == SUBREG
6902 || GET_CODE (dest) == ZERO_EXTRACT
6903 || GET_CODE (dest) == STRICT_LOW_PART)
6904 dest = XEXP (dest, 0);
6905 if (REG_P (dest))
6907 int xregno = REGNO (dest);
6908 int xnregs;
6909 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6910 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6911 else
6912 xnregs = 1;
6913 if (xregno < regno + nregs && xregno + xnregs > regno)
6914 return 0;
6915 if (xregno < valueno + valuenregs
6916 && xregno + xnregs > valueno)
6917 return 0;
6918 if (goal_mem_addr_varies
6919 && reg_overlap_mentioned_for_reload_p (dest, goal))
6920 return 0;
6921 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6922 return 0;
6924 else if (goal_mem && MEM_P (dest)
6925 && ! push_operand (dest, GET_MODE (dest)))
6926 return 0;
6927 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6928 && reg_equiv_memory_loc[regno] != 0)
6929 return 0;
6930 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
6931 return 0;
6933 else if (GET_CODE (pat) == PARALLEL)
6935 int i;
6936 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
6938 rtx v1 = XVECEXP (pat, 0, i);
6939 if (GET_CODE (v1) == COND_EXEC)
6940 v1 = COND_EXEC_CODE (v1);
6941 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
6943 rtx dest = SET_DEST (v1);
6944 while (GET_CODE (dest) == SUBREG
6945 || GET_CODE (dest) == ZERO_EXTRACT
6946 || GET_CODE (dest) == STRICT_LOW_PART)
6947 dest = XEXP (dest, 0);
6948 if (REG_P (dest))
6950 int xregno = REGNO (dest);
6951 int xnregs;
6952 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6953 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6954 else
6955 xnregs = 1;
6956 if (xregno < regno + nregs
6957 && xregno + xnregs > regno)
6958 return 0;
6959 if (xregno < valueno + valuenregs
6960 && xregno + xnregs > valueno)
6961 return 0;
6962 if (goal_mem_addr_varies
6963 && reg_overlap_mentioned_for_reload_p (dest,
6964 goal))
6965 return 0;
6966 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6967 return 0;
6969 else if (goal_mem && MEM_P (dest)
6970 && ! push_operand (dest, GET_MODE (dest)))
6971 return 0;
6972 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6973 && reg_equiv_memory_loc[regno] != 0)
6974 return 0;
6975 else if (need_stable_sp
6976 && push_operand (dest, GET_MODE (dest)))
6977 return 0;
6982 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
6984 rtx link;
6986 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
6987 link = XEXP (link, 1))
6989 pat = XEXP (link, 0);
6990 if (GET_CODE (pat) == CLOBBER)
6992 rtx dest = SET_DEST (pat);
6994 if (REG_P (dest))
6996 int xregno = REGNO (dest);
6997 int xnregs
6998 = hard_regno_nregs[xregno][GET_MODE (dest)];
7000 if (xregno < regno + nregs
7001 && xregno + xnregs > regno)
7002 return 0;
7003 else if (xregno < valueno + valuenregs
7004 && xregno + xnregs > valueno)
7005 return 0;
7006 else if (goal_mem_addr_varies
7007 && reg_overlap_mentioned_for_reload_p (dest,
7008 goal))
7009 return 0;
7012 else if (goal_mem && MEM_P (dest)
7013 && ! push_operand (dest, GET_MODE (dest)))
7014 return 0;
7015 else if (need_stable_sp
7016 && push_operand (dest, GET_MODE (dest)))
7017 return 0;
7022 #ifdef AUTO_INC_DEC
7023 /* If this insn auto-increments or auto-decrements
7024 either regno or valueno, return 0 now.
7025 If GOAL is a memory ref and its address is not constant,
7026 and this insn P increments a register used in GOAL, return 0. */
7028 rtx link;
7030 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7031 if (REG_NOTE_KIND (link) == REG_INC
7032 && REG_P (XEXP (link, 0)))
7034 int incno = REGNO (XEXP (link, 0));
7035 if (incno < regno + nregs && incno >= regno)
7036 return 0;
7037 if (incno < valueno + valuenregs && incno >= valueno)
7038 return 0;
7039 if (goal_mem_addr_varies
7040 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7041 goal))
7042 return 0;
7045 #endif
7050 /* Find a place where INCED appears in an increment or decrement operator
7051 within X, and return the amount INCED is incremented or decremented by.
7052 The value is always positive. */
7054 static int
7055 find_inc_amount (rtx x, rtx inced)
7057 enum rtx_code code = GET_CODE (x);
7058 const char *fmt;
7059 int i;
7061 if (code == MEM)
7063 rtx addr = XEXP (x, 0);
7064 if ((GET_CODE (addr) == PRE_DEC
7065 || GET_CODE (addr) == POST_DEC
7066 || GET_CODE (addr) == PRE_INC
7067 || GET_CODE (addr) == POST_INC)
7068 && XEXP (addr, 0) == inced)
7069 return GET_MODE_SIZE (GET_MODE (x));
7070 else if ((GET_CODE (addr) == PRE_MODIFY
7071 || GET_CODE (addr) == POST_MODIFY)
7072 && GET_CODE (XEXP (addr, 1)) == PLUS
7073 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7074 && XEXP (addr, 0) == inced
7075 && GET_CODE (XEXP (XEXP (addr, 1), 1)) == CONST_INT)
7077 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7078 return i < 0 ? -i : i;
7082 fmt = GET_RTX_FORMAT (code);
7083 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7085 if (fmt[i] == 'e')
7087 int tem = find_inc_amount (XEXP (x, i), inced);
7088 if (tem != 0)
7089 return tem;
7091 if (fmt[i] == 'E')
7093 int j;
7094 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7096 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7097 if (tem != 0)
7098 return tem;
7103 return 0;
7106 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7107 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7109 #ifdef AUTO_INC_DEC
7110 static int
7111 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7112 rtx insn)
7114 rtx link;
7116 gcc_assert (insn);
7118 if (! INSN_P (insn))
7119 return 0;
7121 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7122 if (REG_NOTE_KIND (link) == REG_INC)
7124 unsigned int test = (int) REGNO (XEXP (link, 0));
7125 if (test >= regno && test < endregno)
7126 return 1;
7128 return 0;
7130 #else
7132 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7134 #endif
7136 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7137 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7138 REG_INC. REGNO must refer to a hard register. */
7141 regno_clobbered_p (unsigned int regno, rtx insn, enum machine_mode mode,
7142 int sets)
7144 unsigned int nregs, endregno;
7146 /* regno must be a hard register. */
7147 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7149 nregs = hard_regno_nregs[regno][mode];
7150 endregno = regno + nregs;
7152 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7153 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7154 && REG_P (XEXP (PATTERN (insn), 0)))
7156 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7158 return test >= regno && test < endregno;
7161 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7162 return 1;
7164 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7166 int i = XVECLEN (PATTERN (insn), 0) - 1;
7168 for (; i >= 0; i--)
7170 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7171 if ((GET_CODE (elt) == CLOBBER
7172 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7173 && REG_P (XEXP (elt, 0)))
7175 unsigned int test = REGNO (XEXP (elt, 0));
7177 if (test >= regno && test < endregno)
7178 return 1;
7180 if (sets == 2
7181 && reg_inc_found_and_valid_p (regno, endregno, elt))
7182 return 1;
7186 return 0;
7189 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7191 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
7193 int regno;
7195 if (GET_MODE (reloadreg) == mode)
7196 return reloadreg;
7198 regno = REGNO (reloadreg);
7200 if (WORDS_BIG_ENDIAN)
7201 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7202 - (int) hard_regno_nregs[regno][mode];
7204 return gen_rtx_REG (mode, regno);
7207 static const char *const reload_when_needed_name[] =
7209 "RELOAD_FOR_INPUT",
7210 "RELOAD_FOR_OUTPUT",
7211 "RELOAD_FOR_INSN",
7212 "RELOAD_FOR_INPUT_ADDRESS",
7213 "RELOAD_FOR_INPADDR_ADDRESS",
7214 "RELOAD_FOR_OUTPUT_ADDRESS",
7215 "RELOAD_FOR_OUTADDR_ADDRESS",
7216 "RELOAD_FOR_OPERAND_ADDRESS",
7217 "RELOAD_FOR_OPADDR_ADDR",
7218 "RELOAD_OTHER",
7219 "RELOAD_FOR_OTHER_ADDRESS"
7222 /* These functions are used to print the variables set by 'find_reloads' */
7224 void
7225 debug_reload_to_stream (FILE *f)
7227 int r;
7228 const char *prefix;
7230 if (! f)
7231 f = stderr;
7232 for (r = 0; r < n_reloads; r++)
7234 fprintf (f, "Reload %d: ", r);
7236 if (rld[r].in != 0)
7238 fprintf (f, "reload_in (%s) = ",
7239 GET_MODE_NAME (rld[r].inmode));
7240 print_inline_rtx (f, rld[r].in, 24);
7241 fprintf (f, "\n\t");
7244 if (rld[r].out != 0)
7246 fprintf (f, "reload_out (%s) = ",
7247 GET_MODE_NAME (rld[r].outmode));
7248 print_inline_rtx (f, rld[r].out, 24);
7249 fprintf (f, "\n\t");
7252 fprintf (f, "%s, ", reg_class_names[(int) rld[r].class]);
7254 fprintf (f, "%s (opnum = %d)",
7255 reload_when_needed_name[(int) rld[r].when_needed],
7256 rld[r].opnum);
7258 if (rld[r].optional)
7259 fprintf (f, ", optional");
7261 if (rld[r].nongroup)
7262 fprintf (f, ", nongroup");
7264 if (rld[r].inc != 0)
7265 fprintf (f, ", inc by %d", rld[r].inc);
7267 if (rld[r].nocombine)
7268 fprintf (f, ", can't combine");
7270 if (rld[r].secondary_p)
7271 fprintf (f, ", secondary_reload_p");
7273 if (rld[r].in_reg != 0)
7275 fprintf (f, "\n\treload_in_reg: ");
7276 print_inline_rtx (f, rld[r].in_reg, 24);
7279 if (rld[r].out_reg != 0)
7281 fprintf (f, "\n\treload_out_reg: ");
7282 print_inline_rtx (f, rld[r].out_reg, 24);
7285 if (rld[r].reg_rtx != 0)
7287 fprintf (f, "\n\treload_reg_rtx: ");
7288 print_inline_rtx (f, rld[r].reg_rtx, 24);
7291 prefix = "\n\t";
7292 if (rld[r].secondary_in_reload != -1)
7294 fprintf (f, "%ssecondary_in_reload = %d",
7295 prefix, rld[r].secondary_in_reload);
7296 prefix = ", ";
7299 if (rld[r].secondary_out_reload != -1)
7300 fprintf (f, "%ssecondary_out_reload = %d\n",
7301 prefix, rld[r].secondary_out_reload);
7303 prefix = "\n\t";
7304 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7306 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7307 insn_data[rld[r].secondary_in_icode].name);
7308 prefix = ", ";
7311 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7312 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7313 insn_data[rld[r].secondary_out_icode].name);
7315 fprintf (f, "\n");
7319 void
7320 debug_reload (void)
7322 debug_reload_to_stream (stderr);