* gcc.dg/atomic/c11-atomic-exec-5.c (dg-additional-options): Use
[official-gcc.git] / gcc / reload.c
blobe55968e4882d146a332fe0d466ab372e99af8b1d
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains subroutines used only from the file reload1.c.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
56 NOTE SIDE EFFECTS:
58 find_reloads can alter the operands of the instruction it is called on.
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
63 better that way.
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
72 Using a reload register for several reloads in one insn:
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload. */
86 #define REG_OK_STRICT
88 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
89 #undef DEBUG_RELOAD
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "tm.h"
95 #include "rtl-error.h"
96 #include "tm_p.h"
97 #include "insn-config.h"
98 #include "expr.h"
99 #include "optabs.h"
100 #include "recog.h"
101 #include "df.h"
102 #include "reload.h"
103 #include "regs.h"
104 #include "addresses.h"
105 #include "hard-reg-set.h"
106 #include "flags.h"
107 #include "function.h"
108 #include "params.h"
109 #include "target.h"
110 #include "ira.h"
112 /* True if X is a constant that can be forced into the constant pool.
113 MODE is the mode of the operand, or VOIDmode if not known. */
114 #define CONST_POOL_OK_P(MODE, X) \
115 ((MODE) != VOIDmode \
116 && CONSTANT_P (X) \
117 && GET_CODE (X) != HIGH \
118 && !targetm.cannot_force_const_mem (MODE, X))
120 /* True if C is a non-empty register class that has too few registers
121 to be safely used as a reload target class. */
123 static inline bool
124 small_register_class_p (reg_class_t rclass)
126 return (reg_class_size [(int) rclass] == 1
127 || (reg_class_size [(int) rclass] >= 1
128 && targetm.class_likely_spilled_p (rclass)));
132 /* All reloads of the current insn are recorded here. See reload.h for
133 comments. */
134 int n_reloads;
135 struct reload rld[MAX_RELOADS];
137 /* All the "earlyclobber" operands of the current insn
138 are recorded here. */
139 int n_earlyclobbers;
140 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
142 int reload_n_operands;
144 /* Replacing reloads.
146 If `replace_reloads' is nonzero, then as each reload is recorded
147 an entry is made for it in the table `replacements'.
148 Then later `subst_reloads' can look through that table and
149 perform all the replacements needed. */
151 /* Nonzero means record the places to replace. */
152 static int replace_reloads;
154 /* Each replacement is recorded with a structure like this. */
155 struct replacement
157 rtx *where; /* Location to store in */
158 int what; /* which reload this is for */
159 enum machine_mode mode; /* mode it must have */
162 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
164 /* Number of replacements currently recorded. */
165 static int n_replacements;
167 /* Used to track what is modified by an operand. */
168 struct decomposition
170 int reg_flag; /* Nonzero if referencing a register. */
171 int safe; /* Nonzero if this can't conflict with anything. */
172 rtx base; /* Base address for MEM. */
173 HOST_WIDE_INT start; /* Starting offset or register number. */
174 HOST_WIDE_INT end; /* Ending offset or register number. */
177 #ifdef SECONDARY_MEMORY_NEEDED
179 /* Save MEMs needed to copy from one class of registers to another. One MEM
180 is used per mode, but normally only one or two modes are ever used.
182 We keep two versions, before and after register elimination. The one
183 after register elimination is record separately for each operand. This
184 is done in case the address is not valid to be sure that we separately
185 reload each. */
187 static rtx secondary_memlocs[NUM_MACHINE_MODES];
188 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
189 static int secondary_memlocs_elim_used = 0;
190 #endif
192 /* The instruction we are doing reloads for;
193 so we can test whether a register dies in it. */
194 static rtx this_insn;
196 /* Nonzero if this instruction is a user-specified asm with operands. */
197 static int this_insn_is_asm;
199 /* If hard_regs_live_known is nonzero,
200 we can tell which hard regs are currently live,
201 at least enough to succeed in choosing dummy reloads. */
202 static int hard_regs_live_known;
204 /* Indexed by hard reg number,
205 element is nonnegative if hard reg has been spilled.
206 This vector is passed to `find_reloads' as an argument
207 and is not changed here. */
208 static short *static_reload_reg_p;
210 /* Set to 1 in subst_reg_equivs if it changes anything. */
211 static int subst_reg_equivs_changed;
213 /* On return from push_reload, holds the reload-number for the OUT
214 operand, which can be different for that from the input operand. */
215 static int output_reloadnum;
217 /* Compare two RTX's. */
218 #define MATCHES(x, y) \
219 (x == y || (x != 0 && (REG_P (x) \
220 ? REG_P (y) && REGNO (x) == REGNO (y) \
221 : rtx_equal_p (x, y) && ! side_effects_p (x))))
223 /* Indicates if two reloads purposes are for similar enough things that we
224 can merge their reloads. */
225 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
226 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
227 || ((when1) == (when2) && (op1) == (op2)) \
228 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
229 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
230 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
231 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
232 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
234 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
235 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
236 ((when1) != (when2) \
237 || ! ((op1) == (op2) \
238 || (when1) == RELOAD_FOR_INPUT \
239 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
240 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
242 /* If we are going to reload an address, compute the reload type to
243 use. */
244 #define ADDR_TYPE(type) \
245 ((type) == RELOAD_FOR_INPUT_ADDRESS \
246 ? RELOAD_FOR_INPADDR_ADDRESS \
247 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
248 ? RELOAD_FOR_OUTADDR_ADDRESS \
249 : (type)))
251 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
252 enum machine_mode, enum reload_type,
253 enum insn_code *, secondary_reload_info *);
254 static enum reg_class find_valid_class (enum machine_mode, enum machine_mode,
255 int, unsigned int);
256 static void push_replacement (rtx *, int, enum machine_mode);
257 static void dup_replacements (rtx *, rtx *);
258 static void combine_reloads (void);
259 static int find_reusable_reload (rtx *, rtx, enum reg_class,
260 enum reload_type, int, int);
261 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
262 enum machine_mode, reg_class_t, int, int);
263 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
264 static struct decomposition decompose (rtx);
265 static int immune_p (rtx, rtx, struct decomposition);
266 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
267 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int, rtx,
268 int *);
269 static rtx make_memloc (rtx, int);
270 static int maybe_memory_address_addr_space_p (enum machine_mode, rtx,
271 addr_space_t, rtx *);
272 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
273 int, enum reload_type, int, rtx);
274 static rtx subst_reg_equivs (rtx, rtx);
275 static rtx subst_indexed_address (rtx);
276 static void update_auto_inc_notes (rtx, int, int);
277 static int find_reloads_address_1 (enum machine_mode, addr_space_t, rtx, int,
278 enum rtx_code, enum rtx_code, rtx *,
279 int, enum reload_type,int, rtx);
280 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
281 enum machine_mode, int,
282 enum reload_type, int);
283 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
284 int, rtx, int *);
285 static void copy_replacements_1 (rtx *, rtx *, int);
286 static int find_inc_amount (rtx, rtx);
287 static int refers_to_mem_for_reload_p (rtx);
288 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
289 rtx, rtx *);
291 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
292 list yet. */
294 static void
295 push_reg_equiv_alt_mem (int regno, rtx mem)
297 rtx it;
299 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
300 if (rtx_equal_p (XEXP (it, 0), mem))
301 return;
303 reg_equiv_alt_mem_list (regno)
304 = alloc_EXPR_LIST (REG_EQUIV, mem,
305 reg_equiv_alt_mem_list (regno));
308 /* Determine if any secondary reloads are needed for loading (if IN_P is
309 nonzero) or storing (if IN_P is zero) X to or from a reload register of
310 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
311 are needed, push them.
313 Return the reload number of the secondary reload we made, or -1 if
314 we didn't need one. *PICODE is set to the insn_code to use if we do
315 need a secondary reload. */
317 static int
318 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
319 enum reg_class reload_class,
320 enum machine_mode reload_mode, enum reload_type type,
321 enum insn_code *picode, secondary_reload_info *prev_sri)
323 enum reg_class rclass = NO_REGS;
324 enum reg_class scratch_class;
325 enum machine_mode mode = reload_mode;
326 enum insn_code icode = CODE_FOR_nothing;
327 enum insn_code t_icode = CODE_FOR_nothing;
328 enum reload_type secondary_type;
329 int s_reload, t_reload = -1;
330 const char *scratch_constraint;
331 secondary_reload_info sri;
333 if (type == RELOAD_FOR_INPUT_ADDRESS
334 || type == RELOAD_FOR_OUTPUT_ADDRESS
335 || type == RELOAD_FOR_INPADDR_ADDRESS
336 || type == RELOAD_FOR_OUTADDR_ADDRESS)
337 secondary_type = type;
338 else
339 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
341 *picode = CODE_FOR_nothing;
343 /* If X is a paradoxical SUBREG, use the inner value to determine both the
344 mode and object being reloaded. */
345 if (paradoxical_subreg_p (x))
347 x = SUBREG_REG (x);
348 reload_mode = GET_MODE (x);
351 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
352 is still a pseudo-register by now, it *must* have an equivalent MEM
353 but we don't want to assume that), use that equivalent when seeing if
354 a secondary reload is needed since whether or not a reload is needed
355 might be sensitive to the form of the MEM. */
357 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
358 && reg_equiv_mem (REGNO (x)))
359 x = reg_equiv_mem (REGNO (x));
361 sri.icode = CODE_FOR_nothing;
362 sri.prev_sri = prev_sri;
363 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
364 reload_mode, &sri);
365 icode = (enum insn_code) sri.icode;
367 /* If we don't need any secondary registers, done. */
368 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
369 return -1;
371 if (rclass != NO_REGS)
372 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
373 reload_mode, type, &t_icode, &sri);
375 /* If we will be using an insn, the secondary reload is for a
376 scratch register. */
378 if (icode != CODE_FOR_nothing)
380 /* If IN_P is nonzero, the reload register will be the output in
381 operand 0. If IN_P is zero, the reload register will be the input
382 in operand 1. Outputs should have an initial "=", which we must
383 skip. */
385 /* ??? It would be useful to be able to handle only two, or more than
386 three, operands, but for now we can only handle the case of having
387 exactly three: output, input and one temp/scratch. */
388 gcc_assert (insn_data[(int) icode].n_operands == 3);
390 /* ??? We currently have no way to represent a reload that needs
391 an icode to reload from an intermediate tertiary reload register.
392 We should probably have a new field in struct reload to tag a
393 chain of scratch operand reloads onto. */
394 gcc_assert (rclass == NO_REGS);
396 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
397 gcc_assert (*scratch_constraint == '=');
398 scratch_constraint++;
399 if (*scratch_constraint == '&')
400 scratch_constraint++;
401 scratch_class = (reg_class_for_constraint
402 (lookup_constraint (scratch_constraint)));
404 rclass = scratch_class;
405 mode = insn_data[(int) icode].operand[2].mode;
408 /* This case isn't valid, so fail. Reload is allowed to use the same
409 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
410 in the case of a secondary register, we actually need two different
411 registers for correct code. We fail here to prevent the possibility of
412 silently generating incorrect code later.
414 The convention is that secondary input reloads are valid only if the
415 secondary_class is different from class. If you have such a case, you
416 can not use secondary reloads, you must work around the problem some
417 other way.
419 Allow this when a reload_in/out pattern is being used. I.e. assume
420 that the generated code handles this case. */
422 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
423 || t_icode != CODE_FOR_nothing);
425 /* See if we can reuse an existing secondary reload. */
426 for (s_reload = 0; s_reload < n_reloads; s_reload++)
427 if (rld[s_reload].secondary_p
428 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
429 || reg_class_subset_p (rld[s_reload].rclass, rclass))
430 && ((in_p && rld[s_reload].inmode == mode)
431 || (! in_p && rld[s_reload].outmode == mode))
432 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
433 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
434 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
435 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
436 && (small_register_class_p (rclass)
437 || targetm.small_register_classes_for_mode_p (VOIDmode))
438 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
439 opnum, rld[s_reload].opnum))
441 if (in_p)
442 rld[s_reload].inmode = mode;
443 if (! in_p)
444 rld[s_reload].outmode = mode;
446 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
447 rld[s_reload].rclass = rclass;
449 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
450 rld[s_reload].optional &= optional;
451 rld[s_reload].secondary_p = 1;
452 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
453 opnum, rld[s_reload].opnum))
454 rld[s_reload].when_needed = RELOAD_OTHER;
456 break;
459 if (s_reload == n_reloads)
461 #ifdef SECONDARY_MEMORY_NEEDED
462 /* If we need a memory location to copy between the two reload regs,
463 set it up now. Note that we do the input case before making
464 the reload and the output case after. This is due to the
465 way reloads are output. */
467 if (in_p && icode == CODE_FOR_nothing
468 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
470 get_secondary_mem (x, reload_mode, opnum, type);
472 /* We may have just added new reloads. Make sure we add
473 the new reload at the end. */
474 s_reload = n_reloads;
476 #endif
478 /* We need to make a new secondary reload for this register class. */
479 rld[s_reload].in = rld[s_reload].out = 0;
480 rld[s_reload].rclass = rclass;
482 rld[s_reload].inmode = in_p ? mode : VOIDmode;
483 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
484 rld[s_reload].reg_rtx = 0;
485 rld[s_reload].optional = optional;
486 rld[s_reload].inc = 0;
487 /* Maybe we could combine these, but it seems too tricky. */
488 rld[s_reload].nocombine = 1;
489 rld[s_reload].in_reg = 0;
490 rld[s_reload].out_reg = 0;
491 rld[s_reload].opnum = opnum;
492 rld[s_reload].when_needed = secondary_type;
493 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
494 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
495 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
496 rld[s_reload].secondary_out_icode
497 = ! in_p ? t_icode : CODE_FOR_nothing;
498 rld[s_reload].secondary_p = 1;
500 n_reloads++;
502 #ifdef SECONDARY_MEMORY_NEEDED
503 if (! in_p && icode == CODE_FOR_nothing
504 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
505 get_secondary_mem (x, mode, opnum, type);
506 #endif
509 *picode = icode;
510 return s_reload;
513 /* If a secondary reload is needed, return its class. If both an intermediate
514 register and a scratch register is needed, we return the class of the
515 intermediate register. */
516 reg_class_t
517 secondary_reload_class (bool in_p, reg_class_t rclass, enum machine_mode mode,
518 rtx x)
520 enum insn_code icode;
521 secondary_reload_info sri;
523 sri.icode = CODE_FOR_nothing;
524 sri.prev_sri = NULL;
525 rclass
526 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
527 icode = (enum insn_code) sri.icode;
529 /* If there are no secondary reloads at all, we return NO_REGS.
530 If an intermediate register is needed, we return its class. */
531 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
532 return rclass;
534 /* No intermediate register is needed, but we have a special reload
535 pattern, which we assume for now needs a scratch register. */
536 return scratch_reload_class (icode);
539 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
540 three operands, verify that operand 2 is an output operand, and return
541 its register class.
542 ??? We'd like to be able to handle any pattern with at least 2 operands,
543 for zero or more scratch registers, but that needs more infrastructure. */
544 enum reg_class
545 scratch_reload_class (enum insn_code icode)
547 const char *scratch_constraint;
548 enum reg_class rclass;
550 gcc_assert (insn_data[(int) icode].n_operands == 3);
551 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
552 gcc_assert (*scratch_constraint == '=');
553 scratch_constraint++;
554 if (*scratch_constraint == '&')
555 scratch_constraint++;
556 rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
557 gcc_assert (rclass != NO_REGS);
558 return rclass;
561 #ifdef SECONDARY_MEMORY_NEEDED
563 /* Return a memory location that will be used to copy X in mode MODE.
564 If we haven't already made a location for this mode in this insn,
565 call find_reloads_address on the location being returned. */
568 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
569 int opnum, enum reload_type type)
571 rtx loc;
572 int mem_valid;
574 /* By default, if MODE is narrower than a word, widen it to a word.
575 This is required because most machines that require these memory
576 locations do not support short load and stores from all registers
577 (e.g., FP registers). */
579 #ifdef SECONDARY_MEMORY_NEEDED_MODE
580 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
581 #else
582 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
583 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
584 #endif
586 /* If we already have made a MEM for this operand in MODE, return it. */
587 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
588 return secondary_memlocs_elim[(int) mode][opnum];
590 /* If this is the first time we've tried to get a MEM for this mode,
591 allocate a new one. `something_changed' in reload will get set
592 by noticing that the frame size has changed. */
594 if (secondary_memlocs[(int) mode] == 0)
596 #ifdef SECONDARY_MEMORY_NEEDED_RTX
597 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
598 #else
599 secondary_memlocs[(int) mode]
600 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
601 #endif
604 /* Get a version of the address doing any eliminations needed. If that
605 didn't give us a new MEM, make a new one if it isn't valid. */
607 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
608 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
609 MEM_ADDR_SPACE (loc));
611 if (! mem_valid && loc == secondary_memlocs[(int) mode])
612 loc = copy_rtx (loc);
614 /* The only time the call below will do anything is if the stack
615 offset is too large. In that case IND_LEVELS doesn't matter, so we
616 can just pass a zero. Adjust the type to be the address of the
617 corresponding object. If the address was valid, save the eliminated
618 address. If it wasn't valid, we need to make a reload each time, so
619 don't save it. */
621 if (! mem_valid)
623 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
624 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
625 : RELOAD_OTHER);
627 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
628 opnum, type, 0, 0);
631 secondary_memlocs_elim[(int) mode][opnum] = loc;
632 if (secondary_memlocs_elim_used <= (int)mode)
633 secondary_memlocs_elim_used = (int)mode + 1;
634 return loc;
637 /* Clear any secondary memory locations we've made. */
639 void
640 clear_secondary_mem (void)
642 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
644 #endif /* SECONDARY_MEMORY_NEEDED */
647 /* Find the largest class which has at least one register valid in
648 mode INNER, and which for every such register, that register number
649 plus N is also valid in OUTER (if in range) and is cheap to move
650 into REGNO. Such a class must exist. */
652 static enum reg_class
653 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
654 enum machine_mode inner ATTRIBUTE_UNUSED, int n,
655 unsigned int dest_regno ATTRIBUTE_UNUSED)
657 int best_cost = -1;
658 int rclass;
659 int regno;
660 enum reg_class best_class = NO_REGS;
661 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
662 unsigned int best_size = 0;
663 int cost;
665 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
667 int bad = 0;
668 int good = 0;
669 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
670 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
672 if (HARD_REGNO_MODE_OK (regno, inner))
674 good = 1;
675 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
676 && ! HARD_REGNO_MODE_OK (regno + n, outer))
677 bad = 1;
681 if (bad || !good)
682 continue;
683 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
685 if ((reg_class_size[rclass] > best_size
686 && (best_cost < 0 || best_cost >= cost))
687 || best_cost > cost)
689 best_class = (enum reg_class) rclass;
690 best_size = reg_class_size[rclass];
691 best_cost = register_move_cost (outer, (enum reg_class) rclass,
692 dest_class);
696 gcc_assert (best_size != 0);
698 return best_class;
701 /* We are trying to reload a subreg of something that is not a register.
702 Find the largest class which contains only registers valid in
703 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
704 which we would eventually like to obtain the object. */
706 static enum reg_class
707 find_valid_class_1 (enum machine_mode outer ATTRIBUTE_UNUSED,
708 enum machine_mode mode ATTRIBUTE_UNUSED,
709 enum reg_class dest_class ATTRIBUTE_UNUSED)
711 int best_cost = -1;
712 int rclass;
713 int regno;
714 enum reg_class best_class = NO_REGS;
715 unsigned int best_size = 0;
716 int cost;
718 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
720 int bad = 0;
721 for (regno = 0; regno < FIRST_PSEUDO_REGISTER && !bad; regno++)
723 if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
724 && !HARD_REGNO_MODE_OK (regno, mode))
725 bad = 1;
728 if (bad)
729 continue;
731 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
733 if ((reg_class_size[rclass] > best_size
734 && (best_cost < 0 || best_cost >= cost))
735 || best_cost > cost)
737 best_class = (enum reg_class) rclass;
738 best_size = reg_class_size[rclass];
739 best_cost = register_move_cost (outer, (enum reg_class) rclass,
740 dest_class);
744 gcc_assert (best_size != 0);
746 #ifdef LIMIT_RELOAD_CLASS
747 best_class = LIMIT_RELOAD_CLASS (mode, best_class);
748 #endif
749 return best_class;
752 /* Return the number of a previously made reload that can be combined with
753 a new one, or n_reloads if none of the existing reloads can be used.
754 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
755 push_reload, they determine the kind of the new reload that we try to
756 combine. P_IN points to the corresponding value of IN, which can be
757 modified by this function.
758 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
760 static int
761 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
762 enum reload_type type, int opnum, int dont_share)
764 rtx in = *p_in;
765 int i;
766 /* We can't merge two reloads if the output of either one is
767 earlyclobbered. */
769 if (earlyclobber_operand_p (out))
770 return n_reloads;
772 /* We can use an existing reload if the class is right
773 and at least one of IN and OUT is a match
774 and the other is at worst neutral.
775 (A zero compared against anything is neutral.)
777 For targets with small register classes, don't use existing reloads
778 unless they are for the same thing since that can cause us to need
779 more reload registers than we otherwise would. */
781 for (i = 0; i < n_reloads; i++)
782 if ((reg_class_subset_p (rclass, rld[i].rclass)
783 || reg_class_subset_p (rld[i].rclass, rclass))
784 /* If the existing reload has a register, it must fit our class. */
785 && (rld[i].reg_rtx == 0
786 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
787 true_regnum (rld[i].reg_rtx)))
788 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
789 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
790 || (out != 0 && MATCHES (rld[i].out, out)
791 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
792 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
793 && (small_register_class_p (rclass)
794 || targetm.small_register_classes_for_mode_p (VOIDmode))
795 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
796 return i;
798 /* Reloading a plain reg for input can match a reload to postincrement
799 that reg, since the postincrement's value is the right value.
800 Likewise, it can match a preincrement reload, since we regard
801 the preincrementation as happening before any ref in this insn
802 to that register. */
803 for (i = 0; i < n_reloads; i++)
804 if ((reg_class_subset_p (rclass, rld[i].rclass)
805 || reg_class_subset_p (rld[i].rclass, rclass))
806 /* If the existing reload has a register, it must fit our
807 class. */
808 && (rld[i].reg_rtx == 0
809 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
810 true_regnum (rld[i].reg_rtx)))
811 && out == 0 && rld[i].out == 0 && rld[i].in != 0
812 && ((REG_P (in)
813 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
814 && MATCHES (XEXP (rld[i].in, 0), in))
815 || (REG_P (rld[i].in)
816 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
817 && MATCHES (XEXP (in, 0), rld[i].in)))
818 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
819 && (small_register_class_p (rclass)
820 || targetm.small_register_classes_for_mode_p (VOIDmode))
821 && MERGABLE_RELOADS (type, rld[i].when_needed,
822 opnum, rld[i].opnum))
824 /* Make sure reload_in ultimately has the increment,
825 not the plain register. */
826 if (REG_P (in))
827 *p_in = rld[i].in;
828 return i;
830 return n_reloads;
833 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
834 expression. MODE is the mode that X will be used in. OUTPUT is true if
835 the function is invoked for the output part of an enclosing reload. */
837 static bool
838 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, bool output)
840 rtx inner;
842 /* Only SUBREGs are problematical. */
843 if (GET_CODE (x) != SUBREG)
844 return false;
846 inner = SUBREG_REG (x);
848 /* If INNER is a constant or PLUS, then INNER will need reloading. */
849 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
850 return true;
852 /* If INNER is not a hard register, then INNER will not need reloading. */
853 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
854 return false;
856 /* If INNER is not ok for MODE, then INNER will need reloading. */
857 if (!HARD_REGNO_MODE_OK (subreg_regno (x), mode))
858 return true;
860 /* If this is for an output, and the outer part is a word or smaller,
861 INNER is larger than a word and the number of registers in INNER is
862 not the same as the number of words in INNER, then INNER will need
863 reloading (with an in-out reload). */
864 return (output
865 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
866 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
867 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
868 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
871 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
872 requiring an extra reload register. The caller has already found that
873 IN contains some reference to REGNO, so check that we can produce the
874 new value in a single step. E.g. if we have
875 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
876 instruction that adds one to a register, this should succeed.
877 However, if we have something like
878 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
879 needs to be loaded into a register first, we need a separate reload
880 register.
881 Such PLUS reloads are generated by find_reload_address_part.
882 The out-of-range PLUS expressions are usually introduced in the instruction
883 patterns by register elimination and substituting pseudos without a home
884 by their function-invariant equivalences. */
885 static int
886 can_reload_into (rtx in, int regno, enum machine_mode mode)
888 rtx dst, test_insn;
889 int r = 0;
890 struct recog_data_d save_recog_data;
892 /* For matching constraints, we often get notional input reloads where
893 we want to use the original register as the reload register. I.e.
894 technically this is a non-optional input-output reload, but IN is
895 already a valid register, and has been chosen as the reload register.
896 Speed this up, since it trivially works. */
897 if (REG_P (in))
898 return 1;
900 /* To test MEMs properly, we'd have to take into account all the reloads
901 that are already scheduled, which can become quite complicated.
902 And since we've already handled address reloads for this MEM, it
903 should always succeed anyway. */
904 if (MEM_P (in))
905 return 1;
907 /* If we can make a simple SET insn that does the job, everything should
908 be fine. */
909 dst = gen_rtx_REG (mode, regno);
910 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
911 save_recog_data = recog_data;
912 if (recog_memoized (test_insn) >= 0)
914 extract_insn (test_insn);
915 r = constrain_operands (1);
917 recog_data = save_recog_data;
918 return r;
921 /* Record one reload that needs to be performed.
922 IN is an rtx saying where the data are to be found before this instruction.
923 OUT says where they must be stored after the instruction.
924 (IN is zero for data not read, and OUT is zero for data not written.)
925 INLOC and OUTLOC point to the places in the instructions where
926 IN and OUT were found.
927 If IN and OUT are both nonzero, it means the same register must be used
928 to reload both IN and OUT.
930 RCLASS is a register class required for the reloaded data.
931 INMODE is the machine mode that the instruction requires
932 for the reg that replaces IN and OUTMODE is likewise for OUT.
934 If IN is zero, then OUT's location and mode should be passed as
935 INLOC and INMODE.
937 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
939 OPTIONAL nonzero means this reload does not need to be performed:
940 it can be discarded if that is more convenient.
942 OPNUM and TYPE say what the purpose of this reload is.
944 The return value is the reload-number for this reload.
946 If both IN and OUT are nonzero, in some rare cases we might
947 want to make two separate reloads. (Actually we never do this now.)
948 Therefore, the reload-number for OUT is stored in
949 output_reloadnum when we return; the return value applies to IN.
950 Usually (presently always), when IN and OUT are nonzero,
951 the two reload-numbers are equal, but the caller should be careful to
952 distinguish them. */
955 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
956 enum reg_class rclass, enum machine_mode inmode,
957 enum machine_mode outmode, int strict_low, int optional,
958 int opnum, enum reload_type type)
960 int i;
961 int dont_share = 0;
962 int dont_remove_subreg = 0;
963 #ifdef LIMIT_RELOAD_CLASS
964 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
965 #endif
966 int secondary_in_reload = -1, secondary_out_reload = -1;
967 enum insn_code secondary_in_icode = CODE_FOR_nothing;
968 enum insn_code secondary_out_icode = CODE_FOR_nothing;
969 enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
970 subreg_in_class = NO_REGS;
972 /* INMODE and/or OUTMODE could be VOIDmode if no mode
973 has been specified for the operand. In that case,
974 use the operand's mode as the mode to reload. */
975 if (inmode == VOIDmode && in != 0)
976 inmode = GET_MODE (in);
977 if (outmode == VOIDmode && out != 0)
978 outmode = GET_MODE (out);
980 /* If find_reloads and friends until now missed to replace a pseudo
981 with a constant of reg_equiv_constant something went wrong
982 beforehand.
983 Note that it can't simply be done here if we missed it earlier
984 since the constant might need to be pushed into the literal pool
985 and the resulting memref would probably need further
986 reloading. */
987 if (in != 0 && REG_P (in))
989 int regno = REGNO (in);
991 gcc_assert (regno < FIRST_PSEUDO_REGISTER
992 || reg_renumber[regno] >= 0
993 || reg_equiv_constant (regno) == NULL_RTX);
996 /* reg_equiv_constant only contains constants which are obviously
997 not appropriate as destination. So if we would need to replace
998 the destination pseudo with a constant we are in real
999 trouble. */
1000 if (out != 0 && REG_P (out))
1002 int regno = REGNO (out);
1004 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1005 || reg_renumber[regno] >= 0
1006 || reg_equiv_constant (regno) == NULL_RTX);
1009 /* If we have a read-write operand with an address side-effect,
1010 change either IN or OUT so the side-effect happens only once. */
1011 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1012 switch (GET_CODE (XEXP (in, 0)))
1014 case POST_INC: case POST_DEC: case POST_MODIFY:
1015 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1016 break;
1018 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1019 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1020 break;
1022 default:
1023 break;
1026 /* If we are reloading a (SUBREG constant ...), really reload just the
1027 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1028 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1029 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1030 register is a pseudo, also reload the inside expression.
1031 For machines that extend byte loads, do this for any SUBREG of a pseudo
1032 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1033 M2 is an integral mode that gets extended when loaded.
1034 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1035 where either M1 is not valid for R or M2 is wider than a word but we
1036 only need one register to store an M2-sized quantity in R.
1037 (However, if OUT is nonzero, we need to reload the reg *and*
1038 the subreg, so do nothing here, and let following statement handle it.)
1040 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1041 we can't handle it here because CONST_INT does not indicate a mode.
1043 Similarly, we must reload the inside expression if we have a
1044 STRICT_LOW_PART (presumably, in == out in this case).
1046 Also reload the inner expression if it does not require a secondary
1047 reload but the SUBREG does.
1049 Finally, reload the inner expression if it is a register that is in
1050 the class whose registers cannot be referenced in a different size
1051 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1052 cannot reload just the inside since we might end up with the wrong
1053 register class. But if it is inside a STRICT_LOW_PART, we have
1054 no choice, so we hope we do get the right register class there. */
1056 if (in != 0 && GET_CODE (in) == SUBREG
1057 && (subreg_lowpart_p (in) || strict_low)
1058 #ifdef CANNOT_CHANGE_MODE_CLASS
1059 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1060 #endif
1061 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (in))]
1062 && (CONSTANT_P (SUBREG_REG (in))
1063 || GET_CODE (SUBREG_REG (in)) == PLUS
1064 || strict_low
1065 || (((REG_P (SUBREG_REG (in))
1066 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1067 || MEM_P (SUBREG_REG (in)))
1068 && ((GET_MODE_PRECISION (inmode)
1069 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1070 #ifdef LOAD_EXTEND_OP
1071 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1072 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1073 <= UNITS_PER_WORD)
1074 && (GET_MODE_PRECISION (inmode)
1075 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1076 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1077 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1078 #endif
1079 #ifdef WORD_REGISTER_OPERATIONS
1080 || ((GET_MODE_PRECISION (inmode)
1081 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1082 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1083 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1084 / UNITS_PER_WORD)))
1085 #endif
1087 || (REG_P (SUBREG_REG (in))
1088 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1089 /* The case where out is nonzero
1090 is handled differently in the following statement. */
1091 && (out == 0 || subreg_lowpart_p (in))
1092 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1093 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1094 > UNITS_PER_WORD)
1095 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1096 / UNITS_PER_WORD)
1097 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1098 [GET_MODE (SUBREG_REG (in))]))
1099 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1100 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1101 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1102 SUBREG_REG (in))
1103 == NO_REGS))
1104 #ifdef CANNOT_CHANGE_MODE_CLASS
1105 || (REG_P (SUBREG_REG (in))
1106 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1107 && REG_CANNOT_CHANGE_MODE_P
1108 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1109 #endif
1112 #ifdef LIMIT_RELOAD_CLASS
1113 in_subreg_loc = inloc;
1114 #endif
1115 inloc = &SUBREG_REG (in);
1116 in = *inloc;
1117 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1118 if (MEM_P (in))
1119 /* This is supposed to happen only for paradoxical subregs made by
1120 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1121 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1122 #endif
1123 inmode = GET_MODE (in);
1126 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1127 where M1 is not valid for R if it was not handled by the code above.
1129 Similar issue for (SUBREG constant ...) if it was not handled by the
1130 code above. This can happen if SUBREG_BYTE != 0.
1132 However, we must reload the inner reg *as well as* the subreg in
1133 that case. */
1135 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1137 if (REG_P (SUBREG_REG (in)))
1138 subreg_in_class
1139 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1140 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1141 GET_MODE (SUBREG_REG (in)),
1142 SUBREG_BYTE (in),
1143 GET_MODE (in)),
1144 REGNO (SUBREG_REG (in)));
1145 else if (GET_CODE (SUBREG_REG (in)) == SYMBOL_REF)
1146 subreg_in_class = find_valid_class_1 (inmode,
1147 GET_MODE (SUBREG_REG (in)),
1148 rclass);
1150 /* This relies on the fact that emit_reload_insns outputs the
1151 instructions for input reloads of type RELOAD_OTHER in the same
1152 order as the reloads. Thus if the outer reload is also of type
1153 RELOAD_OTHER, we are guaranteed that this inner reload will be
1154 output before the outer reload. */
1155 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1156 subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1157 dont_remove_subreg = 1;
1160 /* Similarly for paradoxical and problematical SUBREGs on the output.
1161 Note that there is no reason we need worry about the previous value
1162 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1163 entitled to clobber it all (except in the case of a word mode subreg
1164 or of a STRICT_LOW_PART, in that latter case the constraint should
1165 label it input-output.) */
1166 if (out != 0 && GET_CODE (out) == SUBREG
1167 && (subreg_lowpart_p (out) || strict_low)
1168 #ifdef CANNOT_CHANGE_MODE_CLASS
1169 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1170 #endif
1171 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (out))]
1172 && (CONSTANT_P (SUBREG_REG (out))
1173 || strict_low
1174 || (((REG_P (SUBREG_REG (out))
1175 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1176 || MEM_P (SUBREG_REG (out)))
1177 && ((GET_MODE_PRECISION (outmode)
1178 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1179 #ifdef WORD_REGISTER_OPERATIONS
1180 || ((GET_MODE_PRECISION (outmode)
1181 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1182 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1183 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1184 / UNITS_PER_WORD)))
1185 #endif
1187 || (REG_P (SUBREG_REG (out))
1188 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1189 /* The case of a word mode subreg
1190 is handled differently in the following statement. */
1191 && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1192 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1193 > UNITS_PER_WORD))
1194 && ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode))
1195 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1196 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1197 SUBREG_REG (out))
1198 == NO_REGS))
1199 #ifdef CANNOT_CHANGE_MODE_CLASS
1200 || (REG_P (SUBREG_REG (out))
1201 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1202 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1203 GET_MODE (SUBREG_REG (out)),
1204 outmode))
1205 #endif
1208 #ifdef LIMIT_RELOAD_CLASS
1209 out_subreg_loc = outloc;
1210 #endif
1211 outloc = &SUBREG_REG (out);
1212 out = *outloc;
1213 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1214 gcc_assert (!MEM_P (out)
1215 || GET_MODE_SIZE (GET_MODE (out))
1216 <= GET_MODE_SIZE (outmode));
1217 #endif
1218 outmode = GET_MODE (out);
1221 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1222 where either M1 is not valid for R or M2 is wider than a word but we
1223 only need one register to store an M2-sized quantity in R.
1225 However, we must reload the inner reg *as well as* the subreg in
1226 that case and the inner reg is an in-out reload. */
1228 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1230 enum reg_class in_out_class
1231 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1232 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1233 GET_MODE (SUBREG_REG (out)),
1234 SUBREG_BYTE (out),
1235 GET_MODE (out)),
1236 REGNO (SUBREG_REG (out)));
1238 /* This relies on the fact that emit_reload_insns outputs the
1239 instructions for output reloads of type RELOAD_OTHER in reverse
1240 order of the reloads. Thus if the outer reload is also of type
1241 RELOAD_OTHER, we are guaranteed that this inner reload will be
1242 output after the outer reload. */
1243 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1244 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1245 0, 0, opnum, RELOAD_OTHER);
1246 dont_remove_subreg = 1;
1249 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1250 if (in != 0 && out != 0 && MEM_P (out)
1251 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1252 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1253 dont_share = 1;
1255 /* If IN is a SUBREG of a hard register, make a new REG. This
1256 simplifies some of the cases below. */
1258 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1259 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1260 && ! dont_remove_subreg)
1261 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1263 /* Similarly for OUT. */
1264 if (out != 0 && GET_CODE (out) == SUBREG
1265 && REG_P (SUBREG_REG (out))
1266 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1267 && ! dont_remove_subreg)
1268 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1270 /* Narrow down the class of register wanted if that is
1271 desirable on this machine for efficiency. */
1273 reg_class_t preferred_class = rclass;
1275 if (in != 0)
1276 preferred_class = targetm.preferred_reload_class (in, rclass);
1278 /* Output reloads may need analogous treatment, different in detail. */
1279 if (out != 0)
1280 preferred_class
1281 = targetm.preferred_output_reload_class (out, preferred_class);
1283 /* Discard what the target said if we cannot do it. */
1284 if (preferred_class != NO_REGS
1285 || (optional && type == RELOAD_FOR_OUTPUT))
1286 rclass = (enum reg_class) preferred_class;
1289 /* Make sure we use a class that can handle the actual pseudo
1290 inside any subreg. For example, on the 386, QImode regs
1291 can appear within SImode subregs. Although GENERAL_REGS
1292 can handle SImode, QImode needs a smaller class. */
1293 #ifdef LIMIT_RELOAD_CLASS
1294 if (in_subreg_loc)
1295 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1296 else if (in != 0 && GET_CODE (in) == SUBREG)
1297 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1299 if (out_subreg_loc)
1300 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1301 if (out != 0 && GET_CODE (out) == SUBREG)
1302 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1303 #endif
1305 /* Verify that this class is at least possible for the mode that
1306 is specified. */
1307 if (this_insn_is_asm)
1309 enum machine_mode mode;
1310 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1311 mode = inmode;
1312 else
1313 mode = outmode;
1314 if (mode == VOIDmode)
1316 error_for_asm (this_insn, "cannot reload integer constant "
1317 "operand in %<asm%>");
1318 mode = word_mode;
1319 if (in != 0)
1320 inmode = word_mode;
1321 if (out != 0)
1322 outmode = word_mode;
1324 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1325 if (HARD_REGNO_MODE_OK (i, mode)
1326 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1327 break;
1328 if (i == FIRST_PSEUDO_REGISTER)
1330 error_for_asm (this_insn, "impossible register constraint "
1331 "in %<asm%>");
1332 /* Avoid further trouble with this insn. */
1333 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1334 /* We used to continue here setting class to ALL_REGS, but it triggers
1335 sanity check on i386 for:
1336 void foo(long double d)
1338 asm("" :: "a" (d));
1340 Returning zero here ought to be safe as we take care in
1341 find_reloads to not process the reloads when instruction was
1342 replaced by USE. */
1344 return 0;
1348 /* Optional output reloads are always OK even if we have no register class,
1349 since the function of these reloads is only to have spill_reg_store etc.
1350 set, so that the storing insn can be deleted later. */
1351 gcc_assert (rclass != NO_REGS
1352 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1354 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1356 if (i == n_reloads)
1358 /* See if we need a secondary reload register to move between CLASS
1359 and IN or CLASS and OUT. Get the icode and push any required reloads
1360 needed for each of them if so. */
1362 if (in != 0)
1363 secondary_in_reload
1364 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1365 &secondary_in_icode, NULL);
1366 if (out != 0 && GET_CODE (out) != SCRATCH)
1367 secondary_out_reload
1368 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1369 type, &secondary_out_icode, NULL);
1371 /* We found no existing reload suitable for re-use.
1372 So add an additional reload. */
1374 #ifdef SECONDARY_MEMORY_NEEDED
1375 if (subreg_in_class == NO_REGS
1376 && in != 0
1377 && (REG_P (in)
1378 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1379 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1380 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1381 /* If a memory location is needed for the copy, make one. */
1382 if (subreg_in_class != NO_REGS
1383 && SECONDARY_MEMORY_NEEDED (subreg_in_class, rclass, inmode))
1384 get_secondary_mem (in, inmode, opnum, type);
1385 #endif
1387 i = n_reloads;
1388 rld[i].in = in;
1389 rld[i].out = out;
1390 rld[i].rclass = rclass;
1391 rld[i].inmode = inmode;
1392 rld[i].outmode = outmode;
1393 rld[i].reg_rtx = 0;
1394 rld[i].optional = optional;
1395 rld[i].inc = 0;
1396 rld[i].nocombine = 0;
1397 rld[i].in_reg = inloc ? *inloc : 0;
1398 rld[i].out_reg = outloc ? *outloc : 0;
1399 rld[i].opnum = opnum;
1400 rld[i].when_needed = type;
1401 rld[i].secondary_in_reload = secondary_in_reload;
1402 rld[i].secondary_out_reload = secondary_out_reload;
1403 rld[i].secondary_in_icode = secondary_in_icode;
1404 rld[i].secondary_out_icode = secondary_out_icode;
1405 rld[i].secondary_p = 0;
1407 n_reloads++;
1409 #ifdef SECONDARY_MEMORY_NEEDED
1410 if (out != 0
1411 && (REG_P (out)
1412 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1413 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1414 && SECONDARY_MEMORY_NEEDED (rclass,
1415 REGNO_REG_CLASS (reg_or_subregno (out)),
1416 outmode))
1417 get_secondary_mem (out, outmode, opnum, type);
1418 #endif
1420 else
1422 /* We are reusing an existing reload,
1423 but we may have additional information for it.
1424 For example, we may now have both IN and OUT
1425 while the old one may have just one of them. */
1427 /* The modes can be different. If they are, we want to reload in
1428 the larger mode, so that the value is valid for both modes. */
1429 if (inmode != VOIDmode
1430 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1431 rld[i].inmode = inmode;
1432 if (outmode != VOIDmode
1433 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1434 rld[i].outmode = outmode;
1435 if (in != 0)
1437 rtx in_reg = inloc ? *inloc : 0;
1438 /* If we merge reloads for two distinct rtl expressions that
1439 are identical in content, there might be duplicate address
1440 reloads. Remove the extra set now, so that if we later find
1441 that we can inherit this reload, we can get rid of the
1442 address reloads altogether.
1444 Do not do this if both reloads are optional since the result
1445 would be an optional reload which could potentially leave
1446 unresolved address replacements.
1448 It is not sufficient to call transfer_replacements since
1449 choose_reload_regs will remove the replacements for address
1450 reloads of inherited reloads which results in the same
1451 problem. */
1452 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1453 && ! (rld[i].optional && optional))
1455 /* We must keep the address reload with the lower operand
1456 number alive. */
1457 if (opnum > rld[i].opnum)
1459 remove_address_replacements (in);
1460 in = rld[i].in;
1461 in_reg = rld[i].in_reg;
1463 else
1464 remove_address_replacements (rld[i].in);
1466 /* When emitting reloads we don't necessarily look at the in-
1467 and outmode, but also directly at the operands (in and out).
1468 So we can't simply overwrite them with whatever we have found
1469 for this (to-be-merged) reload, we have to "merge" that too.
1470 Reusing another reload already verified that we deal with the
1471 same operands, just possibly in different modes. So we
1472 overwrite the operands only when the new mode is larger.
1473 See also PR33613. */
1474 if (!rld[i].in
1475 || GET_MODE_SIZE (GET_MODE (in))
1476 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1477 rld[i].in = in;
1478 if (!rld[i].in_reg
1479 || (in_reg
1480 && GET_MODE_SIZE (GET_MODE (in_reg))
1481 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1482 rld[i].in_reg = in_reg;
1484 if (out != 0)
1486 if (!rld[i].out
1487 || (out
1488 && GET_MODE_SIZE (GET_MODE (out))
1489 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1490 rld[i].out = out;
1491 if (outloc
1492 && (!rld[i].out_reg
1493 || GET_MODE_SIZE (GET_MODE (*outloc))
1494 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1495 rld[i].out_reg = *outloc;
1497 if (reg_class_subset_p (rclass, rld[i].rclass))
1498 rld[i].rclass = rclass;
1499 rld[i].optional &= optional;
1500 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1501 opnum, rld[i].opnum))
1502 rld[i].when_needed = RELOAD_OTHER;
1503 rld[i].opnum = MIN (rld[i].opnum, opnum);
1506 /* If the ostensible rtx being reloaded differs from the rtx found
1507 in the location to substitute, this reload is not safe to combine
1508 because we cannot reliably tell whether it appears in the insn. */
1510 if (in != 0 && in != *inloc)
1511 rld[i].nocombine = 1;
1513 #if 0
1514 /* This was replaced by changes in find_reloads_address_1 and the new
1515 function inc_for_reload, which go with a new meaning of reload_inc. */
1517 /* If this is an IN/OUT reload in an insn that sets the CC,
1518 it must be for an autoincrement. It doesn't work to store
1519 the incremented value after the insn because that would clobber the CC.
1520 So we must do the increment of the value reloaded from,
1521 increment it, store it back, then decrement again. */
1522 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1524 out = 0;
1525 rld[i].out = 0;
1526 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1527 /* If we did not find a nonzero amount-to-increment-by,
1528 that contradicts the belief that IN is being incremented
1529 in an address in this insn. */
1530 gcc_assert (rld[i].inc != 0);
1532 #endif
1534 /* If we will replace IN and OUT with the reload-reg,
1535 record where they are located so that substitution need
1536 not do a tree walk. */
1538 if (replace_reloads)
1540 if (inloc != 0)
1542 struct replacement *r = &replacements[n_replacements++];
1543 r->what = i;
1544 r->where = inloc;
1545 r->mode = inmode;
1547 if (outloc != 0 && outloc != inloc)
1549 struct replacement *r = &replacements[n_replacements++];
1550 r->what = i;
1551 r->where = outloc;
1552 r->mode = outmode;
1556 /* If this reload is just being introduced and it has both
1557 an incoming quantity and an outgoing quantity that are
1558 supposed to be made to match, see if either one of the two
1559 can serve as the place to reload into.
1561 If one of them is acceptable, set rld[i].reg_rtx
1562 to that one. */
1564 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1566 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1567 inmode, outmode,
1568 rld[i].rclass, i,
1569 earlyclobber_operand_p (out));
1571 /* If the outgoing register already contains the same value
1572 as the incoming one, we can dispense with loading it.
1573 The easiest way to tell the caller that is to give a phony
1574 value for the incoming operand (same as outgoing one). */
1575 if (rld[i].reg_rtx == out
1576 && (REG_P (in) || CONSTANT_P (in))
1577 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1578 static_reload_reg_p, i, inmode))
1579 rld[i].in = out;
1582 /* If this is an input reload and the operand contains a register that
1583 dies in this insn and is used nowhere else, see if it is the right class
1584 to be used for this reload. Use it if so. (This occurs most commonly
1585 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1586 this if it is also an output reload that mentions the register unless
1587 the output is a SUBREG that clobbers an entire register.
1589 Note that the operand might be one of the spill regs, if it is a
1590 pseudo reg and we are in a block where spilling has not taken place.
1591 But if there is no spilling in this block, that is OK.
1592 An explicitly used hard reg cannot be a spill reg. */
1594 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1596 rtx note;
1597 int regno;
1598 enum machine_mode rel_mode = inmode;
1600 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1601 rel_mode = outmode;
1603 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1604 if (REG_NOTE_KIND (note) == REG_DEAD
1605 && REG_P (XEXP (note, 0))
1606 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1607 && reg_mentioned_p (XEXP (note, 0), in)
1608 /* Check that a former pseudo is valid; see find_dummy_reload. */
1609 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1610 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1611 ORIGINAL_REGNO (XEXP (note, 0)))
1612 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1613 && ! refers_to_regno_for_reload_p (regno,
1614 end_hard_regno (rel_mode,
1615 regno),
1616 PATTERN (this_insn), inloc)
1617 /* If this is also an output reload, IN cannot be used as
1618 the reload register if it is set in this insn unless IN
1619 is also OUT. */
1620 && (out == 0 || in == out
1621 || ! hard_reg_set_here_p (regno,
1622 end_hard_regno (rel_mode, regno),
1623 PATTERN (this_insn)))
1624 /* ??? Why is this code so different from the previous?
1625 Is there any simple coherent way to describe the two together?
1626 What's going on here. */
1627 && (in != out
1628 || (GET_CODE (in) == SUBREG
1629 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1630 / UNITS_PER_WORD)
1631 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1632 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1633 /* Make sure the operand fits in the reg that dies. */
1634 && (GET_MODE_SIZE (rel_mode)
1635 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1636 && HARD_REGNO_MODE_OK (regno, inmode)
1637 && HARD_REGNO_MODE_OK (regno, outmode))
1639 unsigned int offs;
1640 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1641 hard_regno_nregs[regno][outmode]);
1643 for (offs = 0; offs < nregs; offs++)
1644 if (fixed_regs[regno + offs]
1645 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1646 regno + offs))
1647 break;
1649 if (offs == nregs
1650 && (! (refers_to_regno_for_reload_p
1651 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1652 || can_reload_into (in, regno, inmode)))
1654 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1655 break;
1660 if (out)
1661 output_reloadnum = i;
1663 return i;
1666 /* Record an additional place we must replace a value
1667 for which we have already recorded a reload.
1668 RELOADNUM is the value returned by push_reload
1669 when the reload was recorded.
1670 This is used in insn patterns that use match_dup. */
1672 static void
1673 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
1675 if (replace_reloads)
1677 struct replacement *r = &replacements[n_replacements++];
1678 r->what = reloadnum;
1679 r->where = loc;
1680 r->mode = mode;
1684 /* Duplicate any replacement we have recorded to apply at
1685 location ORIG_LOC to also be performed at DUP_LOC.
1686 This is used in insn patterns that use match_dup. */
1688 static void
1689 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1691 int i, n = n_replacements;
1693 for (i = 0; i < n; i++)
1695 struct replacement *r = &replacements[i];
1696 if (r->where == orig_loc)
1697 push_replacement (dup_loc, r->what, r->mode);
1701 /* Transfer all replacements that used to be in reload FROM to be in
1702 reload TO. */
1704 void
1705 transfer_replacements (int to, int from)
1707 int i;
1709 for (i = 0; i < n_replacements; i++)
1710 if (replacements[i].what == from)
1711 replacements[i].what = to;
1714 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1715 or a subpart of it. If we have any replacements registered for IN_RTX,
1716 cancel the reloads that were supposed to load them.
1717 Return nonzero if we canceled any reloads. */
1719 remove_address_replacements (rtx in_rtx)
1721 int i, j;
1722 char reload_flags[MAX_RELOADS];
1723 int something_changed = 0;
1725 memset (reload_flags, 0, sizeof reload_flags);
1726 for (i = 0, j = 0; i < n_replacements; i++)
1728 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1729 reload_flags[replacements[i].what] |= 1;
1730 else
1732 replacements[j++] = replacements[i];
1733 reload_flags[replacements[i].what] |= 2;
1736 /* Note that the following store must be done before the recursive calls. */
1737 n_replacements = j;
1739 for (i = n_reloads - 1; i >= 0; i--)
1741 if (reload_flags[i] == 1)
1743 deallocate_reload_reg (i);
1744 remove_address_replacements (rld[i].in);
1745 rld[i].in = 0;
1746 something_changed = 1;
1749 return something_changed;
1752 /* If there is only one output reload, and it is not for an earlyclobber
1753 operand, try to combine it with a (logically unrelated) input reload
1754 to reduce the number of reload registers needed.
1756 This is safe if the input reload does not appear in
1757 the value being output-reloaded, because this implies
1758 it is not needed any more once the original insn completes.
1760 If that doesn't work, see we can use any of the registers that
1761 die in this insn as a reload register. We can if it is of the right
1762 class and does not appear in the value being output-reloaded. */
1764 static void
1765 combine_reloads (void)
1767 int i, regno;
1768 int output_reload = -1;
1769 int secondary_out = -1;
1770 rtx note;
1772 /* Find the output reload; return unless there is exactly one
1773 and that one is mandatory. */
1775 for (i = 0; i < n_reloads; i++)
1776 if (rld[i].out != 0)
1778 if (output_reload >= 0)
1779 return;
1780 output_reload = i;
1783 if (output_reload < 0 || rld[output_reload].optional)
1784 return;
1786 /* An input-output reload isn't combinable. */
1788 if (rld[output_reload].in != 0)
1789 return;
1791 /* If this reload is for an earlyclobber operand, we can't do anything. */
1792 if (earlyclobber_operand_p (rld[output_reload].out))
1793 return;
1795 /* If there is a reload for part of the address of this operand, we would
1796 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1797 its life to the point where doing this combine would not lower the
1798 number of spill registers needed. */
1799 for (i = 0; i < n_reloads; i++)
1800 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1801 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1802 && rld[i].opnum == rld[output_reload].opnum)
1803 return;
1805 /* Check each input reload; can we combine it? */
1807 for (i = 0; i < n_reloads; i++)
1808 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1809 /* Life span of this reload must not extend past main insn. */
1810 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1811 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1812 && rld[i].when_needed != RELOAD_OTHER
1813 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1814 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1815 [(int) rld[output_reload].outmode])
1816 && rld[i].inc == 0
1817 && rld[i].reg_rtx == 0
1818 #ifdef SECONDARY_MEMORY_NEEDED
1819 /* Don't combine two reloads with different secondary
1820 memory locations. */
1821 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1822 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1823 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1824 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1825 #endif
1826 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1827 ? (rld[i].rclass == rld[output_reload].rclass)
1828 : (reg_class_subset_p (rld[i].rclass,
1829 rld[output_reload].rclass)
1830 || reg_class_subset_p (rld[output_reload].rclass,
1831 rld[i].rclass)))
1832 && (MATCHES (rld[i].in, rld[output_reload].out)
1833 /* Args reversed because the first arg seems to be
1834 the one that we imagine being modified
1835 while the second is the one that might be affected. */
1836 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1837 rld[i].in)
1838 /* However, if the input is a register that appears inside
1839 the output, then we also can't share.
1840 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1841 If the same reload reg is used for both reg 69 and the
1842 result to be stored in memory, then that result
1843 will clobber the address of the memory ref. */
1844 && ! (REG_P (rld[i].in)
1845 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1846 rld[output_reload].out))))
1847 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1848 rld[i].when_needed != RELOAD_FOR_INPUT)
1849 && (reg_class_size[(int) rld[i].rclass]
1850 || targetm.small_register_classes_for_mode_p (VOIDmode))
1851 /* We will allow making things slightly worse by combining an
1852 input and an output, but no worse than that. */
1853 && (rld[i].when_needed == RELOAD_FOR_INPUT
1854 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1856 int j;
1858 /* We have found a reload to combine with! */
1859 rld[i].out = rld[output_reload].out;
1860 rld[i].out_reg = rld[output_reload].out_reg;
1861 rld[i].outmode = rld[output_reload].outmode;
1862 /* Mark the old output reload as inoperative. */
1863 rld[output_reload].out = 0;
1864 /* The combined reload is needed for the entire insn. */
1865 rld[i].when_needed = RELOAD_OTHER;
1866 /* If the output reload had a secondary reload, copy it. */
1867 if (rld[output_reload].secondary_out_reload != -1)
1869 rld[i].secondary_out_reload
1870 = rld[output_reload].secondary_out_reload;
1871 rld[i].secondary_out_icode
1872 = rld[output_reload].secondary_out_icode;
1875 #ifdef SECONDARY_MEMORY_NEEDED
1876 /* Copy any secondary MEM. */
1877 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1878 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1879 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1880 #endif
1881 /* If required, minimize the register class. */
1882 if (reg_class_subset_p (rld[output_reload].rclass,
1883 rld[i].rclass))
1884 rld[i].rclass = rld[output_reload].rclass;
1886 /* Transfer all replacements from the old reload to the combined. */
1887 for (j = 0; j < n_replacements; j++)
1888 if (replacements[j].what == output_reload)
1889 replacements[j].what = i;
1891 return;
1894 /* If this insn has only one operand that is modified or written (assumed
1895 to be the first), it must be the one corresponding to this reload. It
1896 is safe to use anything that dies in this insn for that output provided
1897 that it does not occur in the output (we already know it isn't an
1898 earlyclobber. If this is an asm insn, give up. */
1900 if (INSN_CODE (this_insn) == -1)
1901 return;
1903 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1904 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1905 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1906 return;
1908 /* See if some hard register that dies in this insn and is not used in
1909 the output is the right class. Only works if the register we pick
1910 up can fully hold our output reload. */
1911 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1912 if (REG_NOTE_KIND (note) == REG_DEAD
1913 && REG_P (XEXP (note, 0))
1914 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1915 rld[output_reload].out)
1916 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1917 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1918 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1919 regno)
1920 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1921 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1922 /* Ensure that a secondary or tertiary reload for this output
1923 won't want this register. */
1924 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1925 || (!(TEST_HARD_REG_BIT
1926 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1927 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1928 || !(TEST_HARD_REG_BIT
1929 (reg_class_contents[(int) rld[secondary_out].rclass],
1930 regno)))))
1931 && !fixed_regs[regno]
1932 /* Check that a former pseudo is valid; see find_dummy_reload. */
1933 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1934 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1935 ORIGINAL_REGNO (XEXP (note, 0)))
1936 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1938 rld[output_reload].reg_rtx
1939 = gen_rtx_REG (rld[output_reload].outmode, regno);
1940 return;
1944 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1945 See if one of IN and OUT is a register that may be used;
1946 this is desirable since a spill-register won't be needed.
1947 If so, return the register rtx that proves acceptable.
1949 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1950 RCLASS is the register class required for the reload.
1952 If FOR_REAL is >= 0, it is the number of the reload,
1953 and in some cases when it can be discovered that OUT doesn't need
1954 to be computed, clear out rld[FOR_REAL].out.
1956 If FOR_REAL is -1, this should not be done, because this call
1957 is just to see if a register can be found, not to find and install it.
1959 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1960 puts an additional constraint on being able to use IN for OUT since
1961 IN must not appear elsewhere in the insn (it is assumed that IN itself
1962 is safe from the earlyclobber). */
1964 static rtx
1965 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1966 enum machine_mode inmode, enum machine_mode outmode,
1967 reg_class_t rclass, int for_real, int earlyclobber)
1969 rtx in = real_in;
1970 rtx out = real_out;
1971 int in_offset = 0;
1972 int out_offset = 0;
1973 rtx value = 0;
1975 /* If operands exceed a word, we can't use either of them
1976 unless they have the same size. */
1977 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1978 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1979 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1980 return 0;
1982 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1983 respectively refers to a hard register. */
1985 /* Find the inside of any subregs. */
1986 while (GET_CODE (out) == SUBREG)
1988 if (REG_P (SUBREG_REG (out))
1989 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1990 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1991 GET_MODE (SUBREG_REG (out)),
1992 SUBREG_BYTE (out),
1993 GET_MODE (out));
1994 out = SUBREG_REG (out);
1996 while (GET_CODE (in) == SUBREG)
1998 if (REG_P (SUBREG_REG (in))
1999 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
2000 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
2001 GET_MODE (SUBREG_REG (in)),
2002 SUBREG_BYTE (in),
2003 GET_MODE (in));
2004 in = SUBREG_REG (in);
2007 /* Narrow down the reg class, the same way push_reload will;
2008 otherwise we might find a dummy now, but push_reload won't. */
2010 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
2011 if (preferred_class != NO_REGS)
2012 rclass = (enum reg_class) preferred_class;
2015 /* See if OUT will do. */
2016 if (REG_P (out)
2017 && REGNO (out) < FIRST_PSEUDO_REGISTER)
2019 unsigned int regno = REGNO (out) + out_offset;
2020 unsigned int nwords = hard_regno_nregs[regno][outmode];
2021 rtx saved_rtx;
2023 /* When we consider whether the insn uses OUT,
2024 ignore references within IN. They don't prevent us
2025 from copying IN into OUT, because those refs would
2026 move into the insn that reloads IN.
2028 However, we only ignore IN in its role as this reload.
2029 If the insn uses IN elsewhere and it contains OUT,
2030 that counts. We can't be sure it's the "same" operand
2031 so it might not go through this reload.
2033 We also need to avoid using OUT if it, or part of it, is a
2034 fixed register. Modifying such registers, even transiently,
2035 may have undefined effects on the machine, such as modifying
2036 the stack pointer. */
2037 saved_rtx = *inloc;
2038 *inloc = const0_rtx;
2040 if (regno < FIRST_PSEUDO_REGISTER
2041 && HARD_REGNO_MODE_OK (regno, outmode)
2042 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2043 PATTERN (this_insn), outloc))
2045 unsigned int i;
2047 for (i = 0; i < nwords; i++)
2048 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2049 regno + i)
2050 || fixed_regs[regno + i])
2051 break;
2053 if (i == nwords)
2055 if (REG_P (real_out))
2056 value = real_out;
2057 else
2058 value = gen_rtx_REG (outmode, regno);
2062 *inloc = saved_rtx;
2065 /* Consider using IN if OUT was not acceptable
2066 or if OUT dies in this insn (like the quotient in a divmod insn).
2067 We can't use IN unless it is dies in this insn,
2068 which means we must know accurately which hard regs are live.
2069 Also, the result can't go in IN if IN is used within OUT,
2070 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2071 if (hard_regs_live_known
2072 && REG_P (in)
2073 && REGNO (in) < FIRST_PSEUDO_REGISTER
2074 && (value == 0
2075 || find_reg_note (this_insn, REG_UNUSED, real_out))
2076 && find_reg_note (this_insn, REG_DEAD, real_in)
2077 && !fixed_regs[REGNO (in)]
2078 && HARD_REGNO_MODE_OK (REGNO (in),
2079 /* The only case where out and real_out might
2080 have different modes is where real_out
2081 is a subreg, and in that case, out
2082 has a real mode. */
2083 (GET_MODE (out) != VOIDmode
2084 ? GET_MODE (out) : outmode))
2085 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2086 /* However only do this if we can be sure that this input
2087 operand doesn't correspond with an uninitialized pseudo.
2088 global can assign some hardreg to it that is the same as
2089 the one assigned to a different, also live pseudo (as it
2090 can ignore the conflict). We must never introduce writes
2091 to such hardregs, as they would clobber the other live
2092 pseudo. See PR 20973. */
2093 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2094 ORIGINAL_REGNO (in))
2095 /* Similarly, only do this if we can be sure that the death
2096 note is still valid. global can assign some hardreg to
2097 the pseudo referenced in the note and simultaneously a
2098 subword of this hardreg to a different, also live pseudo,
2099 because only another subword of the hardreg is actually
2100 used in the insn. This cannot happen if the pseudo has
2101 been assigned exactly one hardreg. See PR 33732. */
2102 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2104 unsigned int regno = REGNO (in) + in_offset;
2105 unsigned int nwords = hard_regno_nregs[regno][inmode];
2107 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2108 && ! hard_reg_set_here_p (regno, regno + nwords,
2109 PATTERN (this_insn))
2110 && (! earlyclobber
2111 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2112 PATTERN (this_insn), inloc)))
2114 unsigned int i;
2116 for (i = 0; i < nwords; i++)
2117 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2118 regno + i))
2119 break;
2121 if (i == nwords)
2123 /* If we were going to use OUT as the reload reg
2124 and changed our mind, it means OUT is a dummy that
2125 dies here. So don't bother copying value to it. */
2126 if (for_real >= 0 && value == real_out)
2127 rld[for_real].out = 0;
2128 if (REG_P (real_in))
2129 value = real_in;
2130 else
2131 value = gen_rtx_REG (inmode, regno);
2136 return value;
2139 /* This page contains subroutines used mainly for determining
2140 whether the IN or an OUT of a reload can serve as the
2141 reload register. */
2143 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2146 earlyclobber_operand_p (rtx x)
2148 int i;
2150 for (i = 0; i < n_earlyclobbers; i++)
2151 if (reload_earlyclobbers[i] == x)
2152 return 1;
2154 return 0;
2157 /* Return 1 if expression X alters a hard reg in the range
2158 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2159 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2160 X should be the body of an instruction. */
2162 static int
2163 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2165 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2167 rtx op0 = SET_DEST (x);
2169 while (GET_CODE (op0) == SUBREG)
2170 op0 = SUBREG_REG (op0);
2171 if (REG_P (op0))
2173 unsigned int r = REGNO (op0);
2175 /* See if this reg overlaps range under consideration. */
2176 if (r < end_regno
2177 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2178 return 1;
2181 else if (GET_CODE (x) == PARALLEL)
2183 int i = XVECLEN (x, 0) - 1;
2185 for (; i >= 0; i--)
2186 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2187 return 1;
2190 return 0;
2193 /* Return 1 if ADDR is a valid memory address for mode MODE
2194 in address space AS, and check that each pseudo reg has the
2195 proper kind of hard reg. */
2198 strict_memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2199 rtx addr, addr_space_t as)
2201 #ifdef GO_IF_LEGITIMATE_ADDRESS
2202 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2203 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2204 return 0;
2206 win:
2207 return 1;
2208 #else
2209 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2210 #endif
2213 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2214 if they are the same hard reg, and has special hacks for
2215 autoincrement and autodecrement.
2216 This is specifically intended for find_reloads to use
2217 in determining whether two operands match.
2218 X is the operand whose number is the lower of the two.
2220 The value is 2 if Y contains a pre-increment that matches
2221 a non-incrementing address in X. */
2223 /* ??? To be completely correct, we should arrange to pass
2224 for X the output operand and for Y the input operand.
2225 For now, we assume that the output operand has the lower number
2226 because that is natural in (SET output (... input ...)). */
2229 operands_match_p (rtx x, rtx y)
2231 int i;
2232 RTX_CODE code = GET_CODE (x);
2233 const char *fmt;
2234 int success_2;
2236 if (x == y)
2237 return 1;
2238 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2239 && (REG_P (y) || (GET_CODE (y) == SUBREG
2240 && REG_P (SUBREG_REG (y)))))
2242 int j;
2244 if (code == SUBREG)
2246 i = REGNO (SUBREG_REG (x));
2247 if (i >= FIRST_PSEUDO_REGISTER)
2248 goto slow;
2249 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2250 GET_MODE (SUBREG_REG (x)),
2251 SUBREG_BYTE (x),
2252 GET_MODE (x));
2254 else
2255 i = REGNO (x);
2257 if (GET_CODE (y) == SUBREG)
2259 j = REGNO (SUBREG_REG (y));
2260 if (j >= FIRST_PSEUDO_REGISTER)
2261 goto slow;
2262 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2263 GET_MODE (SUBREG_REG (y)),
2264 SUBREG_BYTE (y),
2265 GET_MODE (y));
2267 else
2268 j = REGNO (y);
2270 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2271 multiple hard register group of scalar integer registers, so that
2272 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2273 register. */
2274 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2275 && SCALAR_INT_MODE_P (GET_MODE (x))
2276 && i < FIRST_PSEUDO_REGISTER)
2277 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2278 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2279 && SCALAR_INT_MODE_P (GET_MODE (y))
2280 && j < FIRST_PSEUDO_REGISTER)
2281 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2283 return i == j;
2285 /* If two operands must match, because they are really a single
2286 operand of an assembler insn, then two postincrements are invalid
2287 because the assembler insn would increment only once.
2288 On the other hand, a postincrement matches ordinary indexing
2289 if the postincrement is the output operand. */
2290 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2291 return operands_match_p (XEXP (x, 0), y);
2292 /* Two preincrements are invalid
2293 because the assembler insn would increment only once.
2294 On the other hand, a preincrement matches ordinary indexing
2295 if the preincrement is the input operand.
2296 In this case, return 2, since some callers need to do special
2297 things when this happens. */
2298 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2299 || GET_CODE (y) == PRE_MODIFY)
2300 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2302 slow:
2304 /* Now we have disposed of all the cases in which different rtx codes
2305 can match. */
2306 if (code != GET_CODE (y))
2307 return 0;
2309 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2310 if (GET_MODE (x) != GET_MODE (y))
2311 return 0;
2313 /* MEMs referring to different address space are not equivalent. */
2314 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2315 return 0;
2317 switch (code)
2319 CASE_CONST_UNIQUE:
2320 return 0;
2322 case LABEL_REF:
2323 return XEXP (x, 0) == XEXP (y, 0);
2324 case SYMBOL_REF:
2325 return XSTR (x, 0) == XSTR (y, 0);
2327 default:
2328 break;
2331 /* Compare the elements. If any pair of corresponding elements
2332 fail to match, return 0 for the whole things. */
2334 success_2 = 0;
2335 fmt = GET_RTX_FORMAT (code);
2336 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2338 int val, j;
2339 switch (fmt[i])
2341 case 'w':
2342 if (XWINT (x, i) != XWINT (y, i))
2343 return 0;
2344 break;
2346 case 'i':
2347 if (XINT (x, i) != XINT (y, i))
2348 return 0;
2349 break;
2351 case 'e':
2352 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2353 if (val == 0)
2354 return 0;
2355 /* If any subexpression returns 2,
2356 we should return 2 if we are successful. */
2357 if (val == 2)
2358 success_2 = 1;
2359 break;
2361 case '0':
2362 break;
2364 case 'E':
2365 if (XVECLEN (x, i) != XVECLEN (y, i))
2366 return 0;
2367 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2369 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2370 if (val == 0)
2371 return 0;
2372 if (val == 2)
2373 success_2 = 1;
2375 break;
2377 /* It is believed that rtx's at this level will never
2378 contain anything but integers and other rtx's,
2379 except for within LABEL_REFs and SYMBOL_REFs. */
2380 default:
2381 gcc_unreachable ();
2384 return 1 + success_2;
2387 /* Describe the range of registers or memory referenced by X.
2388 If X is a register, set REG_FLAG and put the first register
2389 number into START and the last plus one into END.
2390 If X is a memory reference, put a base address into BASE
2391 and a range of integer offsets into START and END.
2392 If X is pushing on the stack, we can assume it causes no trouble,
2393 so we set the SAFE field. */
2395 static struct decomposition
2396 decompose (rtx x)
2398 struct decomposition val;
2399 int all_const = 0;
2401 memset (&val, 0, sizeof (val));
2403 switch (GET_CODE (x))
2405 case MEM:
2407 rtx base = NULL_RTX, offset = 0;
2408 rtx addr = XEXP (x, 0);
2410 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2411 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2413 val.base = XEXP (addr, 0);
2414 val.start = -GET_MODE_SIZE (GET_MODE (x));
2415 val.end = GET_MODE_SIZE (GET_MODE (x));
2416 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2417 return val;
2420 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2422 if (GET_CODE (XEXP (addr, 1)) == PLUS
2423 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2424 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2426 val.base = XEXP (addr, 0);
2427 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2428 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2429 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2430 return val;
2434 if (GET_CODE (addr) == CONST)
2436 addr = XEXP (addr, 0);
2437 all_const = 1;
2439 if (GET_CODE (addr) == PLUS)
2441 if (CONSTANT_P (XEXP (addr, 0)))
2443 base = XEXP (addr, 1);
2444 offset = XEXP (addr, 0);
2446 else if (CONSTANT_P (XEXP (addr, 1)))
2448 base = XEXP (addr, 0);
2449 offset = XEXP (addr, 1);
2453 if (offset == 0)
2455 base = addr;
2456 offset = const0_rtx;
2458 if (GET_CODE (offset) == CONST)
2459 offset = XEXP (offset, 0);
2460 if (GET_CODE (offset) == PLUS)
2462 if (CONST_INT_P (XEXP (offset, 0)))
2464 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2465 offset = XEXP (offset, 0);
2467 else if (CONST_INT_P (XEXP (offset, 1)))
2469 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2470 offset = XEXP (offset, 1);
2472 else
2474 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2475 offset = const0_rtx;
2478 else if (!CONST_INT_P (offset))
2480 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2481 offset = const0_rtx;
2484 if (all_const && GET_CODE (base) == PLUS)
2485 base = gen_rtx_CONST (GET_MODE (base), base);
2487 gcc_assert (CONST_INT_P (offset));
2489 val.start = INTVAL (offset);
2490 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2491 val.base = base;
2493 break;
2495 case REG:
2496 val.reg_flag = 1;
2497 val.start = true_regnum (x);
2498 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2500 /* A pseudo with no hard reg. */
2501 val.start = REGNO (x);
2502 val.end = val.start + 1;
2504 else
2505 /* A hard reg. */
2506 val.end = end_hard_regno (GET_MODE (x), val.start);
2507 break;
2509 case SUBREG:
2510 if (!REG_P (SUBREG_REG (x)))
2511 /* This could be more precise, but it's good enough. */
2512 return decompose (SUBREG_REG (x));
2513 val.reg_flag = 1;
2514 val.start = true_regnum (x);
2515 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2516 return decompose (SUBREG_REG (x));
2517 else
2518 /* A hard reg. */
2519 val.end = val.start + subreg_nregs (x);
2520 break;
2522 case SCRATCH:
2523 /* This hasn't been assigned yet, so it can't conflict yet. */
2524 val.safe = 1;
2525 break;
2527 default:
2528 gcc_assert (CONSTANT_P (x));
2529 val.safe = 1;
2530 break;
2532 return val;
2535 /* Return 1 if altering Y will not modify the value of X.
2536 Y is also described by YDATA, which should be decompose (Y). */
2538 static int
2539 immune_p (rtx x, rtx y, struct decomposition ydata)
2541 struct decomposition xdata;
2543 if (ydata.reg_flag)
2544 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2545 if (ydata.safe)
2546 return 1;
2548 gcc_assert (MEM_P (y));
2549 /* If Y is memory and X is not, Y can't affect X. */
2550 if (!MEM_P (x))
2551 return 1;
2553 xdata = decompose (x);
2555 if (! rtx_equal_p (xdata.base, ydata.base))
2557 /* If bases are distinct symbolic constants, there is no overlap. */
2558 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2559 return 1;
2560 /* Constants and stack slots never overlap. */
2561 if (CONSTANT_P (xdata.base)
2562 && (ydata.base == frame_pointer_rtx
2563 || ydata.base == hard_frame_pointer_rtx
2564 || ydata.base == stack_pointer_rtx))
2565 return 1;
2566 if (CONSTANT_P (ydata.base)
2567 && (xdata.base == frame_pointer_rtx
2568 || xdata.base == hard_frame_pointer_rtx
2569 || xdata.base == stack_pointer_rtx))
2570 return 1;
2571 /* If either base is variable, we don't know anything. */
2572 return 0;
2575 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2578 /* Similar, but calls decompose. */
2581 safe_from_earlyclobber (rtx op, rtx clobber)
2583 struct decomposition early_data;
2585 early_data = decompose (clobber);
2586 return immune_p (op, clobber, early_data);
2589 /* Main entry point of this file: search the body of INSN
2590 for values that need reloading and record them with push_reload.
2591 REPLACE nonzero means record also where the values occur
2592 so that subst_reloads can be used.
2594 IND_LEVELS says how many levels of indirection are supported by this
2595 machine; a value of zero means that a memory reference is not a valid
2596 memory address.
2598 LIVE_KNOWN says we have valid information about which hard
2599 regs are live at each point in the program; this is true when
2600 we are called from global_alloc but false when stupid register
2601 allocation has been done.
2603 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2604 which is nonnegative if the reg has been commandeered for reloading into.
2605 It is copied into STATIC_RELOAD_REG_P and referenced from there
2606 by various subroutines.
2608 Return TRUE if some operands need to be changed, because of swapping
2609 commutative operands, reg_equiv_address substitution, or whatever. */
2612 find_reloads (rtx insn, int replace, int ind_levels, int live_known,
2613 short *reload_reg_p)
2615 int insn_code_number;
2616 int i, j;
2617 int noperands;
2618 /* These start out as the constraints for the insn
2619 and they are chewed up as we consider alternatives. */
2620 const char *constraints[MAX_RECOG_OPERANDS];
2621 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2622 a register. */
2623 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2624 char pref_or_nothing[MAX_RECOG_OPERANDS];
2625 /* Nonzero for a MEM operand whose entire address needs a reload.
2626 May be -1 to indicate the entire address may or may not need a reload. */
2627 int address_reloaded[MAX_RECOG_OPERANDS];
2628 /* Nonzero for an address operand that needs to be completely reloaded.
2629 May be -1 to indicate the entire operand may or may not need a reload. */
2630 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2631 /* Value of enum reload_type to use for operand. */
2632 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2633 /* Value of enum reload_type to use within address of operand. */
2634 enum reload_type address_type[MAX_RECOG_OPERANDS];
2635 /* Save the usage of each operand. */
2636 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2637 int no_input_reloads = 0, no_output_reloads = 0;
2638 int n_alternatives;
2639 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2640 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2641 char this_alternative_win[MAX_RECOG_OPERANDS];
2642 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2643 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2644 int this_alternative_matches[MAX_RECOG_OPERANDS];
2645 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2646 int this_alternative_number;
2647 int goal_alternative_number = 0;
2648 int operand_reloadnum[MAX_RECOG_OPERANDS];
2649 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2650 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2651 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2652 char goal_alternative_win[MAX_RECOG_OPERANDS];
2653 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2654 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2655 int goal_alternative_swapped;
2656 int best;
2657 int commutative;
2658 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2659 rtx substed_operand[MAX_RECOG_OPERANDS];
2660 rtx body = PATTERN (insn);
2661 rtx set = single_set (insn);
2662 int goal_earlyclobber = 0, this_earlyclobber;
2663 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
2664 int retval = 0;
2666 this_insn = insn;
2667 n_reloads = 0;
2668 n_replacements = 0;
2669 n_earlyclobbers = 0;
2670 replace_reloads = replace;
2671 hard_regs_live_known = live_known;
2672 static_reload_reg_p = reload_reg_p;
2674 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2675 neither are insns that SET cc0. Insns that use CC0 are not allowed
2676 to have any input reloads. */
2677 if (JUMP_P (insn) || CALL_P (insn))
2678 no_output_reloads = 1;
2680 #ifdef HAVE_cc0
2681 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2682 no_input_reloads = 1;
2683 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2684 no_output_reloads = 1;
2685 #endif
2687 #ifdef SECONDARY_MEMORY_NEEDED
2688 /* The eliminated forms of any secondary memory locations are per-insn, so
2689 clear them out here. */
2691 if (secondary_memlocs_elim_used)
2693 memset (secondary_memlocs_elim, 0,
2694 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2695 secondary_memlocs_elim_used = 0;
2697 #endif
2699 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2700 is cheap to move between them. If it is not, there may not be an insn
2701 to do the copy, so we may need a reload. */
2702 if (GET_CODE (body) == SET
2703 && REG_P (SET_DEST (body))
2704 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2705 && REG_P (SET_SRC (body))
2706 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2707 && register_move_cost (GET_MODE (SET_SRC (body)),
2708 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2709 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2710 return 0;
2712 extract_insn (insn);
2714 noperands = reload_n_operands = recog_data.n_operands;
2715 n_alternatives = recog_data.n_alternatives;
2717 /* Just return "no reloads" if insn has no operands with constraints. */
2718 if (noperands == 0 || n_alternatives == 0)
2719 return 0;
2721 insn_code_number = INSN_CODE (insn);
2722 this_insn_is_asm = insn_code_number < 0;
2724 memcpy (operand_mode, recog_data.operand_mode,
2725 noperands * sizeof (enum machine_mode));
2726 memcpy (constraints, recog_data.constraints,
2727 noperands * sizeof (const char *));
2729 commutative = -1;
2731 /* If we will need to know, later, whether some pair of operands
2732 are the same, we must compare them now and save the result.
2733 Reloading the base and index registers will clobber them
2734 and afterward they will fail to match. */
2736 for (i = 0; i < noperands; i++)
2738 const char *p;
2739 int c;
2740 char *end;
2742 substed_operand[i] = recog_data.operand[i];
2743 p = constraints[i];
2745 modified[i] = RELOAD_READ;
2747 /* Scan this operand's constraint to see if it is an output operand,
2748 an in-out operand, is commutative, or should match another. */
2750 while ((c = *p))
2752 p += CONSTRAINT_LEN (c, p);
2753 switch (c)
2755 case '=':
2756 modified[i] = RELOAD_WRITE;
2757 break;
2758 case '+':
2759 modified[i] = RELOAD_READ_WRITE;
2760 break;
2761 case '%':
2763 /* The last operand should not be marked commutative. */
2764 gcc_assert (i != noperands - 1);
2766 /* We currently only support one commutative pair of
2767 operands. Some existing asm code currently uses more
2768 than one pair. Previously, that would usually work,
2769 but sometimes it would crash the compiler. We
2770 continue supporting that case as well as we can by
2771 silently ignoring all but the first pair. In the
2772 future we may handle it correctly. */
2773 if (commutative < 0)
2774 commutative = i;
2775 else
2776 gcc_assert (this_insn_is_asm);
2778 break;
2779 /* Use of ISDIGIT is tempting here, but it may get expensive because
2780 of locale support we don't want. */
2781 case '0': case '1': case '2': case '3': case '4':
2782 case '5': case '6': case '7': case '8': case '9':
2784 c = strtoul (p - 1, &end, 10);
2785 p = end;
2787 operands_match[c][i]
2788 = operands_match_p (recog_data.operand[c],
2789 recog_data.operand[i]);
2791 /* An operand may not match itself. */
2792 gcc_assert (c != i);
2794 /* If C can be commuted with C+1, and C might need to match I,
2795 then C+1 might also need to match I. */
2796 if (commutative >= 0)
2798 if (c == commutative || c == commutative + 1)
2800 int other = c + (c == commutative ? 1 : -1);
2801 operands_match[other][i]
2802 = operands_match_p (recog_data.operand[other],
2803 recog_data.operand[i]);
2805 if (i == commutative || i == commutative + 1)
2807 int other = i + (i == commutative ? 1 : -1);
2808 operands_match[c][other]
2809 = operands_match_p (recog_data.operand[c],
2810 recog_data.operand[other]);
2812 /* Note that C is supposed to be less than I.
2813 No need to consider altering both C and I because in
2814 that case we would alter one into the other. */
2821 /* Examine each operand that is a memory reference or memory address
2822 and reload parts of the addresses into index registers.
2823 Also here any references to pseudo regs that didn't get hard regs
2824 but are equivalent to constants get replaced in the insn itself
2825 with those constants. Nobody will ever see them again.
2827 Finally, set up the preferred classes of each operand. */
2829 for (i = 0; i < noperands; i++)
2831 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2833 address_reloaded[i] = 0;
2834 address_operand_reloaded[i] = 0;
2835 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2836 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2837 : RELOAD_OTHER);
2838 address_type[i]
2839 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2840 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2841 : RELOAD_OTHER);
2843 if (*constraints[i] == 0)
2844 /* Ignore things like match_operator operands. */
2846 else if (insn_extra_address_constraint
2847 (lookup_constraint (constraints[i])))
2849 address_operand_reloaded[i]
2850 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2851 recog_data.operand[i],
2852 recog_data.operand_loc[i],
2853 i, operand_type[i], ind_levels, insn);
2855 /* If we now have a simple operand where we used to have a
2856 PLUS or MULT, re-recognize and try again. */
2857 if ((OBJECT_P (*recog_data.operand_loc[i])
2858 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2859 && (GET_CODE (recog_data.operand[i]) == MULT
2860 || GET_CODE (recog_data.operand[i]) == PLUS))
2862 INSN_CODE (insn) = -1;
2863 retval = find_reloads (insn, replace, ind_levels, live_known,
2864 reload_reg_p);
2865 return retval;
2868 recog_data.operand[i] = *recog_data.operand_loc[i];
2869 substed_operand[i] = recog_data.operand[i];
2871 /* Address operands are reloaded in their existing mode,
2872 no matter what is specified in the machine description. */
2873 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2875 /* If the address is a single CONST_INT pick address mode
2876 instead otherwise we will later not know in which mode
2877 the reload should be performed. */
2878 if (operand_mode[i] == VOIDmode)
2879 operand_mode[i] = Pmode;
2882 else if (code == MEM)
2884 address_reloaded[i]
2885 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2886 recog_data.operand_loc[i],
2887 XEXP (recog_data.operand[i], 0),
2888 &XEXP (recog_data.operand[i], 0),
2889 i, address_type[i], ind_levels, insn);
2890 recog_data.operand[i] = *recog_data.operand_loc[i];
2891 substed_operand[i] = recog_data.operand[i];
2893 else if (code == SUBREG)
2895 rtx reg = SUBREG_REG (recog_data.operand[i]);
2896 rtx op
2897 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2898 ind_levels,
2899 set != 0
2900 && &SET_DEST (set) == recog_data.operand_loc[i],
2901 insn,
2902 &address_reloaded[i]);
2904 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2905 that didn't get a hard register, emit a USE with a REG_EQUAL
2906 note in front so that we might inherit a previous, possibly
2907 wider reload. */
2909 if (replace
2910 && MEM_P (op)
2911 && REG_P (reg)
2912 && (GET_MODE_SIZE (GET_MODE (reg))
2913 >= GET_MODE_SIZE (GET_MODE (op)))
2914 && reg_equiv_constant (REGNO (reg)) == 0)
2915 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2916 insn),
2917 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2919 substed_operand[i] = recog_data.operand[i] = op;
2921 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2922 /* We can get a PLUS as an "operand" as a result of register
2923 elimination. See eliminate_regs and gen_reload. We handle
2924 a unary operator by reloading the operand. */
2925 substed_operand[i] = recog_data.operand[i]
2926 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2927 ind_levels, 0, insn,
2928 &address_reloaded[i]);
2929 else if (code == REG)
2931 /* This is equivalent to calling find_reloads_toplev.
2932 The code is duplicated for speed.
2933 When we find a pseudo always equivalent to a constant,
2934 we replace it by the constant. We must be sure, however,
2935 that we don't try to replace it in the insn in which it
2936 is being set. */
2937 int regno = REGNO (recog_data.operand[i]);
2938 if (reg_equiv_constant (regno) != 0
2939 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2941 /* Record the existing mode so that the check if constants are
2942 allowed will work when operand_mode isn't specified. */
2944 if (operand_mode[i] == VOIDmode)
2945 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2947 substed_operand[i] = recog_data.operand[i]
2948 = reg_equiv_constant (regno);
2950 if (reg_equiv_memory_loc (regno) != 0
2951 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2952 /* We need not give a valid is_set_dest argument since the case
2953 of a constant equivalence was checked above. */
2954 substed_operand[i] = recog_data.operand[i]
2955 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2956 ind_levels, 0, insn,
2957 &address_reloaded[i]);
2959 /* If the operand is still a register (we didn't replace it with an
2960 equivalent), get the preferred class to reload it into. */
2961 code = GET_CODE (recog_data.operand[i]);
2962 preferred_class[i]
2963 = ((code == REG && REGNO (recog_data.operand[i])
2964 >= FIRST_PSEUDO_REGISTER)
2965 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2966 : NO_REGS);
2967 pref_or_nothing[i]
2968 = (code == REG
2969 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2970 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2973 /* If this is simply a copy from operand 1 to operand 0, merge the
2974 preferred classes for the operands. */
2975 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2976 && recog_data.operand[1] == SET_SRC (set))
2978 preferred_class[0] = preferred_class[1]
2979 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2980 pref_or_nothing[0] |= pref_or_nothing[1];
2981 pref_or_nothing[1] |= pref_or_nothing[0];
2984 /* Now see what we need for pseudo-regs that didn't get hard regs
2985 or got the wrong kind of hard reg. For this, we must consider
2986 all the operands together against the register constraints. */
2988 best = MAX_RECOG_OPERANDS * 2 + 600;
2990 goal_alternative_swapped = 0;
2992 /* The constraints are made of several alternatives.
2993 Each operand's constraint looks like foo,bar,... with commas
2994 separating the alternatives. The first alternatives for all
2995 operands go together, the second alternatives go together, etc.
2997 First loop over alternatives. */
2999 for (this_alternative_number = 0;
3000 this_alternative_number < n_alternatives;
3001 this_alternative_number++)
3003 int swapped;
3005 if (!TEST_BIT (recog_data.enabled_alternatives, this_alternative_number))
3007 int i;
3009 for (i = 0; i < recog_data.n_operands; i++)
3010 constraints[i] = skip_alternative (constraints[i]);
3012 continue;
3015 /* If insn is commutative (it's safe to exchange a certain pair
3016 of operands) then we need to try each alternative twice, the
3017 second time matching those two operands as if we had
3018 exchanged them. To do this, really exchange them in
3019 operands. */
3020 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3022 /* Loop over operands for one constraint alternative. */
3023 /* LOSERS counts those that don't fit this alternative
3024 and would require loading. */
3025 int losers = 0;
3026 /* BAD is set to 1 if it some operand can't fit this alternative
3027 even after reloading. */
3028 int bad = 0;
3029 /* REJECT is a count of how undesirable this alternative says it is
3030 if any reloading is required. If the alternative matches exactly
3031 then REJECT is ignored, but otherwise it gets this much
3032 counted against it in addition to the reloading needed. Each
3033 ? counts three times here since we want the disparaging caused by
3034 a bad register class to only count 1/3 as much. */
3035 int reject = 0;
3037 if (swapped)
3039 enum reg_class tclass;
3040 int t;
3042 recog_data.operand[commutative] = substed_operand[commutative + 1];
3043 recog_data.operand[commutative + 1] = substed_operand[commutative];
3044 /* Swap the duplicates too. */
3045 for (i = 0; i < recog_data.n_dups; i++)
3046 if (recog_data.dup_num[i] == commutative
3047 || recog_data.dup_num[i] == commutative + 1)
3048 *recog_data.dup_loc[i]
3049 = recog_data.operand[(int) recog_data.dup_num[i]];
3051 tclass = preferred_class[commutative];
3052 preferred_class[commutative] = preferred_class[commutative + 1];
3053 preferred_class[commutative + 1] = tclass;
3055 t = pref_or_nothing[commutative];
3056 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3057 pref_or_nothing[commutative + 1] = t;
3059 t = address_reloaded[commutative];
3060 address_reloaded[commutative] = address_reloaded[commutative + 1];
3061 address_reloaded[commutative + 1] = t;
3064 this_earlyclobber = 0;
3066 for (i = 0; i < noperands; i++)
3068 const char *p = constraints[i];
3069 char *end;
3070 int len;
3071 int win = 0;
3072 int did_match = 0;
3073 /* 0 => this operand can be reloaded somehow for this alternative. */
3074 int badop = 1;
3075 /* 0 => this operand can be reloaded if the alternative allows regs. */
3076 int winreg = 0;
3077 int c;
3078 int m;
3079 rtx operand = recog_data.operand[i];
3080 int offset = 0;
3081 /* Nonzero means this is a MEM that must be reloaded into a reg
3082 regardless of what the constraint says. */
3083 int force_reload = 0;
3084 int offmemok = 0;
3085 /* Nonzero if a constant forced into memory would be OK for this
3086 operand. */
3087 int constmemok = 0;
3088 int earlyclobber = 0;
3089 enum constraint_num cn;
3090 enum reg_class cl;
3092 /* If the predicate accepts a unary operator, it means that
3093 we need to reload the operand, but do not do this for
3094 match_operator and friends. */
3095 if (UNARY_P (operand) && *p != 0)
3096 operand = XEXP (operand, 0);
3098 /* If the operand is a SUBREG, extract
3099 the REG or MEM (or maybe even a constant) within.
3100 (Constants can occur as a result of reg_equiv_constant.) */
3102 while (GET_CODE (operand) == SUBREG)
3104 /* Offset only matters when operand is a REG and
3105 it is a hard reg. This is because it is passed
3106 to reg_fits_class_p if it is a REG and all pseudos
3107 return 0 from that function. */
3108 if (REG_P (SUBREG_REG (operand))
3109 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3111 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3112 GET_MODE (SUBREG_REG (operand)),
3113 SUBREG_BYTE (operand),
3114 GET_MODE (operand)) < 0)
3115 force_reload = 1;
3116 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3117 GET_MODE (SUBREG_REG (operand)),
3118 SUBREG_BYTE (operand),
3119 GET_MODE (operand));
3121 operand = SUBREG_REG (operand);
3122 /* Force reload if this is a constant or PLUS or if there may
3123 be a problem accessing OPERAND in the outer mode. */
3124 if (CONSTANT_P (operand)
3125 || GET_CODE (operand) == PLUS
3126 /* We must force a reload of paradoxical SUBREGs
3127 of a MEM because the alignment of the inner value
3128 may not be enough to do the outer reference. On
3129 big-endian machines, it may also reference outside
3130 the object.
3132 On machines that extend byte operations and we have a
3133 SUBREG where both the inner and outer modes are no wider
3134 than a word and the inner mode is narrower, is integral,
3135 and gets extended when loaded from memory, combine.c has
3136 made assumptions about the behavior of the machine in such
3137 register access. If the data is, in fact, in memory we
3138 must always load using the size assumed to be in the
3139 register and let the insn do the different-sized
3140 accesses.
3142 This is doubly true if WORD_REGISTER_OPERATIONS. In
3143 this case eliminate_regs has left non-paradoxical
3144 subregs for push_reload to see. Make sure it does
3145 by forcing the reload.
3147 ??? When is it right at this stage to have a subreg
3148 of a mem that is _not_ to be handled specially? IMO
3149 those should have been reduced to just a mem. */
3150 || ((MEM_P (operand)
3151 || (REG_P (operand)
3152 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3153 #ifndef WORD_REGISTER_OPERATIONS
3154 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3155 < BIGGEST_ALIGNMENT)
3156 && (GET_MODE_SIZE (operand_mode[i])
3157 > GET_MODE_SIZE (GET_MODE (operand))))
3158 || BYTES_BIG_ENDIAN
3159 #ifdef LOAD_EXTEND_OP
3160 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3161 && (GET_MODE_SIZE (GET_MODE (operand))
3162 <= UNITS_PER_WORD)
3163 && (GET_MODE_SIZE (operand_mode[i])
3164 > GET_MODE_SIZE (GET_MODE (operand)))
3165 && INTEGRAL_MODE_P (GET_MODE (operand))
3166 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3167 #endif
3169 #endif
3172 force_reload = 1;
3175 this_alternative[i] = NO_REGS;
3176 this_alternative_win[i] = 0;
3177 this_alternative_match_win[i] = 0;
3178 this_alternative_offmemok[i] = 0;
3179 this_alternative_earlyclobber[i] = 0;
3180 this_alternative_matches[i] = -1;
3182 /* An empty constraint or empty alternative
3183 allows anything which matched the pattern. */
3184 if (*p == 0 || *p == ',')
3185 win = 1, badop = 0;
3187 /* Scan this alternative's specs for this operand;
3188 set WIN if the operand fits any letter in this alternative.
3189 Otherwise, clear BADOP if this operand could
3190 fit some letter after reloads,
3191 or set WINREG if this operand could fit after reloads
3192 provided the constraint allows some registers. */
3195 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3197 case '\0':
3198 len = 0;
3199 break;
3200 case ',':
3201 c = '\0';
3202 break;
3204 case '?':
3205 reject += 6;
3206 break;
3208 case '!':
3209 reject = 600;
3210 break;
3212 case '#':
3213 /* Ignore rest of this alternative as far as
3214 reloading is concerned. */
3216 p++;
3217 while (*p && *p != ',');
3218 len = 0;
3219 break;
3221 case '0': case '1': case '2': case '3': case '4':
3222 case '5': case '6': case '7': case '8': case '9':
3223 m = strtoul (p, &end, 10);
3224 p = end;
3225 len = 0;
3227 this_alternative_matches[i] = m;
3228 /* We are supposed to match a previous operand.
3229 If we do, we win if that one did.
3230 If we do not, count both of the operands as losers.
3231 (This is too conservative, since most of the time
3232 only a single reload insn will be needed to make
3233 the two operands win. As a result, this alternative
3234 may be rejected when it is actually desirable.) */
3235 if ((swapped && (m != commutative || i != commutative + 1))
3236 /* If we are matching as if two operands were swapped,
3237 also pretend that operands_match had been computed
3238 with swapped.
3239 But if I is the second of those and C is the first,
3240 don't exchange them, because operands_match is valid
3241 only on one side of its diagonal. */
3242 ? (operands_match
3243 [(m == commutative || m == commutative + 1)
3244 ? 2 * commutative + 1 - m : m]
3245 [(i == commutative || i == commutative + 1)
3246 ? 2 * commutative + 1 - i : i])
3247 : operands_match[m][i])
3249 /* If we are matching a non-offsettable address where an
3250 offsettable address was expected, then we must reject
3251 this combination, because we can't reload it. */
3252 if (this_alternative_offmemok[m]
3253 && MEM_P (recog_data.operand[m])
3254 && this_alternative[m] == NO_REGS
3255 && ! this_alternative_win[m])
3256 bad = 1;
3258 did_match = this_alternative_win[m];
3260 else
3262 /* Operands don't match. */
3263 rtx value;
3264 int loc1, loc2;
3265 /* Retroactively mark the operand we had to match
3266 as a loser, if it wasn't already. */
3267 if (this_alternative_win[m])
3268 losers++;
3269 this_alternative_win[m] = 0;
3270 if (this_alternative[m] == NO_REGS)
3271 bad = 1;
3272 /* But count the pair only once in the total badness of
3273 this alternative, if the pair can be a dummy reload.
3274 The pointers in operand_loc are not swapped; swap
3275 them by hand if necessary. */
3276 if (swapped && i == commutative)
3277 loc1 = commutative + 1;
3278 else if (swapped && i == commutative + 1)
3279 loc1 = commutative;
3280 else
3281 loc1 = i;
3282 if (swapped && m == commutative)
3283 loc2 = commutative + 1;
3284 else if (swapped && m == commutative + 1)
3285 loc2 = commutative;
3286 else
3287 loc2 = m;
3288 value
3289 = find_dummy_reload (recog_data.operand[i],
3290 recog_data.operand[m],
3291 recog_data.operand_loc[loc1],
3292 recog_data.operand_loc[loc2],
3293 operand_mode[i], operand_mode[m],
3294 this_alternative[m], -1,
3295 this_alternative_earlyclobber[m]);
3297 if (value != 0)
3298 losers--;
3300 /* This can be fixed with reloads if the operand
3301 we are supposed to match can be fixed with reloads. */
3302 badop = 0;
3303 this_alternative[i] = this_alternative[m];
3305 /* If we have to reload this operand and some previous
3306 operand also had to match the same thing as this
3307 operand, we don't know how to do that. So reject this
3308 alternative. */
3309 if (! did_match || force_reload)
3310 for (j = 0; j < i; j++)
3311 if (this_alternative_matches[j]
3312 == this_alternative_matches[i])
3314 badop = 1;
3315 break;
3317 break;
3319 case 'p':
3320 /* All necessary reloads for an address_operand
3321 were handled in find_reloads_address. */
3322 this_alternative[i]
3323 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3324 ADDRESS, SCRATCH);
3325 win = 1;
3326 badop = 0;
3327 break;
3329 case TARGET_MEM_CONSTRAINT:
3330 if (force_reload)
3331 break;
3332 if (MEM_P (operand)
3333 || (REG_P (operand)
3334 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3335 && reg_renumber[REGNO (operand)] < 0))
3336 win = 1;
3337 if (CONST_POOL_OK_P (operand_mode[i], operand))
3338 badop = 0;
3339 constmemok = 1;
3340 break;
3342 case '<':
3343 if (MEM_P (operand)
3344 && ! address_reloaded[i]
3345 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3346 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3347 win = 1;
3348 break;
3350 case '>':
3351 if (MEM_P (operand)
3352 && ! address_reloaded[i]
3353 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3354 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3355 win = 1;
3356 break;
3358 /* Memory operand whose address is not offsettable. */
3359 case 'V':
3360 if (force_reload)
3361 break;
3362 if (MEM_P (operand)
3363 && ! (ind_levels ? offsettable_memref_p (operand)
3364 : offsettable_nonstrict_memref_p (operand))
3365 /* Certain mem addresses will become offsettable
3366 after they themselves are reloaded. This is important;
3367 we don't want our own handling of unoffsettables
3368 to override the handling of reg_equiv_address. */
3369 && !(REG_P (XEXP (operand, 0))
3370 && (ind_levels == 0
3371 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3372 win = 1;
3373 break;
3375 /* Memory operand whose address is offsettable. */
3376 case 'o':
3377 if (force_reload)
3378 break;
3379 if ((MEM_P (operand)
3380 /* If IND_LEVELS, find_reloads_address won't reload a
3381 pseudo that didn't get a hard reg, so we have to
3382 reject that case. */
3383 && ((ind_levels ? offsettable_memref_p (operand)
3384 : offsettable_nonstrict_memref_p (operand))
3385 /* A reloaded address is offsettable because it is now
3386 just a simple register indirect. */
3387 || address_reloaded[i] == 1))
3388 || (REG_P (operand)
3389 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3390 && reg_renumber[REGNO (operand)] < 0
3391 /* If reg_equiv_address is nonzero, we will be
3392 loading it into a register; hence it will be
3393 offsettable, but we cannot say that reg_equiv_mem
3394 is offsettable without checking. */
3395 && ((reg_equiv_mem (REGNO (operand)) != 0
3396 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3397 || (reg_equiv_address (REGNO (operand)) != 0))))
3398 win = 1;
3399 if (CONST_POOL_OK_P (operand_mode[i], operand)
3400 || MEM_P (operand))
3401 badop = 0;
3402 constmemok = 1;
3403 offmemok = 1;
3404 break;
3406 case '&':
3407 /* Output operand that is stored before the need for the
3408 input operands (and their index registers) is over. */
3409 earlyclobber = 1, this_earlyclobber = 1;
3410 break;
3412 case 'X':
3413 force_reload = 0;
3414 win = 1;
3415 break;
3417 case 'g':
3418 if (! force_reload
3419 /* A PLUS is never a valid operand, but reload can make
3420 it from a register when eliminating registers. */
3421 && GET_CODE (operand) != PLUS
3422 /* A SCRATCH is not a valid operand. */
3423 && GET_CODE (operand) != SCRATCH
3424 && (! CONSTANT_P (operand)
3425 || ! flag_pic
3426 || LEGITIMATE_PIC_OPERAND_P (operand))
3427 && (GENERAL_REGS == ALL_REGS
3428 || !REG_P (operand)
3429 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3430 && reg_renumber[REGNO (operand)] < 0)))
3431 win = 1;
3432 cl = GENERAL_REGS;
3433 goto reg;
3435 default:
3436 cn = lookup_constraint (p);
3437 switch (get_constraint_type (cn))
3439 case CT_REGISTER:
3440 cl = reg_class_for_constraint (cn);
3441 if (cl != NO_REGS)
3442 goto reg;
3443 break;
3445 case CT_CONST_INT:
3446 if (CONST_INT_P (operand)
3447 && (insn_const_int_ok_for_constraint
3448 (INTVAL (operand), cn)))
3449 win = true;
3450 break;
3452 case CT_MEMORY:
3453 if (force_reload)
3454 break;
3455 if (constraint_satisfied_p (operand, cn))
3456 win = 1;
3457 /* If the address was already reloaded,
3458 we win as well. */
3459 else if (MEM_P (operand) && address_reloaded[i] == 1)
3460 win = 1;
3461 /* Likewise if the address will be reloaded because
3462 reg_equiv_address is nonzero. For reg_equiv_mem
3463 we have to check. */
3464 else if (REG_P (operand)
3465 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3466 && reg_renumber[REGNO (operand)] < 0
3467 && ((reg_equiv_mem (REGNO (operand)) != 0
3468 && (constraint_satisfied_p
3469 (reg_equiv_mem (REGNO (operand)),
3470 cn)))
3471 || (reg_equiv_address (REGNO (operand))
3472 != 0)))
3473 win = 1;
3475 /* If we didn't already win, we can reload
3476 constants via force_const_mem, and other
3477 MEMs by reloading the address like for 'o'. */
3478 if (CONST_POOL_OK_P (operand_mode[i], operand)
3479 || MEM_P (operand))
3480 badop = 0;
3481 constmemok = 1;
3482 offmemok = 1;
3483 break;
3485 case CT_ADDRESS:
3486 if (constraint_satisfied_p (operand, cn))
3487 win = 1;
3489 /* If we didn't already win, we can reload
3490 the address into a base register. */
3491 this_alternative[i]
3492 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3493 ADDRESS, SCRATCH);
3494 badop = 0;
3495 break;
3497 case CT_FIXED_FORM:
3498 if (constraint_satisfied_p (operand, cn))
3499 win = 1;
3500 break;
3502 break;
3504 reg:
3505 this_alternative[i]
3506 = reg_class_subunion[this_alternative[i]][cl];
3507 if (GET_MODE (operand) == BLKmode)
3508 break;
3509 winreg = 1;
3510 if (REG_P (operand)
3511 && reg_fits_class_p (operand, this_alternative[i],
3512 offset, GET_MODE (recog_data.operand[i])))
3513 win = 1;
3514 break;
3516 while ((p += len), c);
3518 if (swapped == (commutative >= 0 ? 1 : 0))
3519 constraints[i] = p;
3521 /* If this operand could be handled with a reg,
3522 and some reg is allowed, then this operand can be handled. */
3523 if (winreg && this_alternative[i] != NO_REGS
3524 && (win || !class_only_fixed_regs[this_alternative[i]]))
3525 badop = 0;
3527 /* Record which operands fit this alternative. */
3528 this_alternative_earlyclobber[i] = earlyclobber;
3529 if (win && ! force_reload)
3530 this_alternative_win[i] = 1;
3531 else if (did_match && ! force_reload)
3532 this_alternative_match_win[i] = 1;
3533 else
3535 int const_to_mem = 0;
3537 this_alternative_offmemok[i] = offmemok;
3538 losers++;
3539 if (badop)
3540 bad = 1;
3541 /* Alternative loses if it has no regs for a reg operand. */
3542 if (REG_P (operand)
3543 && this_alternative[i] == NO_REGS
3544 && this_alternative_matches[i] < 0)
3545 bad = 1;
3547 /* If this is a constant that is reloaded into the desired
3548 class by copying it to memory first, count that as another
3549 reload. This is consistent with other code and is
3550 required to avoid choosing another alternative when
3551 the constant is moved into memory by this function on
3552 an early reload pass. Note that the test here is
3553 precisely the same as in the code below that calls
3554 force_const_mem. */
3555 if (CONST_POOL_OK_P (operand_mode[i], operand)
3556 && ((targetm.preferred_reload_class (operand,
3557 this_alternative[i])
3558 == NO_REGS)
3559 || no_input_reloads))
3561 const_to_mem = 1;
3562 if (this_alternative[i] != NO_REGS)
3563 losers++;
3566 /* Alternative loses if it requires a type of reload not
3567 permitted for this insn. We can always reload SCRATCH
3568 and objects with a REG_UNUSED note. */
3569 if (GET_CODE (operand) != SCRATCH
3570 && modified[i] != RELOAD_READ && no_output_reloads
3571 && ! find_reg_note (insn, REG_UNUSED, operand))
3572 bad = 1;
3573 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3574 && ! const_to_mem)
3575 bad = 1;
3577 /* If we can't reload this value at all, reject this
3578 alternative. Note that we could also lose due to
3579 LIMIT_RELOAD_CLASS, but we don't check that
3580 here. */
3582 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3584 if (targetm.preferred_reload_class (operand,
3585 this_alternative[i])
3586 == NO_REGS)
3587 reject = 600;
3589 if (operand_type[i] == RELOAD_FOR_OUTPUT
3590 && (targetm.preferred_output_reload_class (operand,
3591 this_alternative[i])
3592 == NO_REGS))
3593 reject = 600;
3596 /* We prefer to reload pseudos over reloading other things,
3597 since such reloads may be able to be eliminated later.
3598 If we are reloading a SCRATCH, we won't be generating any
3599 insns, just using a register, so it is also preferred.
3600 So bump REJECT in other cases. Don't do this in the
3601 case where we are forcing a constant into memory and
3602 it will then win since we don't want to have a different
3603 alternative match then. */
3604 if (! (REG_P (operand)
3605 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3606 && GET_CODE (operand) != SCRATCH
3607 && ! (const_to_mem && constmemok))
3608 reject += 2;
3610 /* Input reloads can be inherited more often than output
3611 reloads can be removed, so penalize output reloads. */
3612 if (operand_type[i] != RELOAD_FOR_INPUT
3613 && GET_CODE (operand) != SCRATCH)
3614 reject++;
3617 /* If this operand is a pseudo register that didn't get
3618 a hard reg and this alternative accepts some
3619 register, see if the class that we want is a subset
3620 of the preferred class for this register. If not,
3621 but it intersects that class, use the preferred class
3622 instead. If it does not intersect the preferred
3623 class, show that usage of this alternative should be
3624 discouraged; it will be discouraged more still if the
3625 register is `preferred or nothing'. We do this
3626 because it increases the chance of reusing our spill
3627 register in a later insn and avoiding a pair of
3628 memory stores and loads.
3630 Don't bother with this if this alternative will
3631 accept this operand.
3633 Don't do this for a multiword operand, since it is
3634 only a small win and has the risk of requiring more
3635 spill registers, which could cause a large loss.
3637 Don't do this if the preferred class has only one
3638 register because we might otherwise exhaust the
3639 class. */
3641 if (! win && ! did_match
3642 && this_alternative[i] != NO_REGS
3643 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3644 && reg_class_size [(int) preferred_class[i]] > 0
3645 && ! small_register_class_p (preferred_class[i]))
3647 if (! reg_class_subset_p (this_alternative[i],
3648 preferred_class[i]))
3650 /* Since we don't have a way of forming the intersection,
3651 we just do something special if the preferred class
3652 is a subset of the class we have; that's the most
3653 common case anyway. */
3654 if (reg_class_subset_p (preferred_class[i],
3655 this_alternative[i]))
3656 this_alternative[i] = preferred_class[i];
3657 else
3658 reject += (2 + 2 * pref_or_nothing[i]);
3663 /* Now see if any output operands that are marked "earlyclobber"
3664 in this alternative conflict with any input operands
3665 or any memory addresses. */
3667 for (i = 0; i < noperands; i++)
3668 if (this_alternative_earlyclobber[i]
3669 && (this_alternative_win[i] || this_alternative_match_win[i]))
3671 struct decomposition early_data;
3673 early_data = decompose (recog_data.operand[i]);
3675 gcc_assert (modified[i] != RELOAD_READ);
3677 if (this_alternative[i] == NO_REGS)
3679 this_alternative_earlyclobber[i] = 0;
3680 gcc_assert (this_insn_is_asm);
3681 error_for_asm (this_insn,
3682 "%<&%> constraint used with no register class");
3685 for (j = 0; j < noperands; j++)
3686 /* Is this an input operand or a memory ref? */
3687 if ((MEM_P (recog_data.operand[j])
3688 || modified[j] != RELOAD_WRITE)
3689 && j != i
3690 /* Ignore things like match_operator operands. */
3691 && !recog_data.is_operator[j]
3692 /* Don't count an input operand that is constrained to match
3693 the early clobber operand. */
3694 && ! (this_alternative_matches[j] == i
3695 && rtx_equal_p (recog_data.operand[i],
3696 recog_data.operand[j]))
3697 /* Is it altered by storing the earlyclobber operand? */
3698 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3699 early_data))
3701 /* If the output is in a non-empty few-regs class,
3702 it's costly to reload it, so reload the input instead. */
3703 if (small_register_class_p (this_alternative[i])
3704 && (REG_P (recog_data.operand[j])
3705 || GET_CODE (recog_data.operand[j]) == SUBREG))
3707 losers++;
3708 this_alternative_win[j] = 0;
3709 this_alternative_match_win[j] = 0;
3711 else
3712 break;
3714 /* If an earlyclobber operand conflicts with something,
3715 it must be reloaded, so request this and count the cost. */
3716 if (j != noperands)
3718 losers++;
3719 this_alternative_win[i] = 0;
3720 this_alternative_match_win[j] = 0;
3721 for (j = 0; j < noperands; j++)
3722 if (this_alternative_matches[j] == i
3723 && this_alternative_match_win[j])
3725 this_alternative_win[j] = 0;
3726 this_alternative_match_win[j] = 0;
3727 losers++;
3732 /* If one alternative accepts all the operands, no reload required,
3733 choose that alternative; don't consider the remaining ones. */
3734 if (losers == 0)
3736 /* Unswap these so that they are never swapped at `finish'. */
3737 if (swapped)
3739 recog_data.operand[commutative] = substed_operand[commutative];
3740 recog_data.operand[commutative + 1]
3741 = substed_operand[commutative + 1];
3743 for (i = 0; i < noperands; i++)
3745 goal_alternative_win[i] = this_alternative_win[i];
3746 goal_alternative_match_win[i] = this_alternative_match_win[i];
3747 goal_alternative[i] = this_alternative[i];
3748 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3749 goal_alternative_matches[i] = this_alternative_matches[i];
3750 goal_alternative_earlyclobber[i]
3751 = this_alternative_earlyclobber[i];
3753 goal_alternative_number = this_alternative_number;
3754 goal_alternative_swapped = swapped;
3755 goal_earlyclobber = this_earlyclobber;
3756 goto finish;
3759 /* REJECT, set by the ! and ? constraint characters and when a register
3760 would be reloaded into a non-preferred class, discourages the use of
3761 this alternative for a reload goal. REJECT is incremented by six
3762 for each ? and two for each non-preferred class. */
3763 losers = losers * 6 + reject;
3765 /* If this alternative can be made to work by reloading,
3766 and it needs less reloading than the others checked so far,
3767 record it as the chosen goal for reloading. */
3768 if (! bad)
3770 if (best > losers)
3772 for (i = 0; i < noperands; i++)
3774 goal_alternative[i] = this_alternative[i];
3775 goal_alternative_win[i] = this_alternative_win[i];
3776 goal_alternative_match_win[i]
3777 = this_alternative_match_win[i];
3778 goal_alternative_offmemok[i]
3779 = this_alternative_offmemok[i];
3780 goal_alternative_matches[i] = this_alternative_matches[i];
3781 goal_alternative_earlyclobber[i]
3782 = this_alternative_earlyclobber[i];
3784 goal_alternative_swapped = swapped;
3785 best = losers;
3786 goal_alternative_number = this_alternative_number;
3787 goal_earlyclobber = this_earlyclobber;
3791 if (swapped)
3793 enum reg_class tclass;
3794 int t;
3796 /* If the commutative operands have been swapped, swap
3797 them back in order to check the next alternative. */
3798 recog_data.operand[commutative] = substed_operand[commutative];
3799 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3800 /* Unswap the duplicates too. */
3801 for (i = 0; i < recog_data.n_dups; i++)
3802 if (recog_data.dup_num[i] == commutative
3803 || recog_data.dup_num[i] == commutative + 1)
3804 *recog_data.dup_loc[i]
3805 = recog_data.operand[(int) recog_data.dup_num[i]];
3807 /* Unswap the operand related information as well. */
3808 tclass = preferred_class[commutative];
3809 preferred_class[commutative] = preferred_class[commutative + 1];
3810 preferred_class[commutative + 1] = tclass;
3812 t = pref_or_nothing[commutative];
3813 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3814 pref_or_nothing[commutative + 1] = t;
3816 t = address_reloaded[commutative];
3817 address_reloaded[commutative] = address_reloaded[commutative + 1];
3818 address_reloaded[commutative + 1] = t;
3823 /* The operands don't meet the constraints.
3824 goal_alternative describes the alternative
3825 that we could reach by reloading the fewest operands.
3826 Reload so as to fit it. */
3828 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3830 /* No alternative works with reloads?? */
3831 if (insn_code_number >= 0)
3832 fatal_insn ("unable to generate reloads for:", insn);
3833 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3834 /* Avoid further trouble with this insn. */
3835 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3836 n_reloads = 0;
3837 return 0;
3840 /* Jump to `finish' from above if all operands are valid already.
3841 In that case, goal_alternative_win is all 1. */
3842 finish:
3844 /* Right now, for any pair of operands I and J that are required to match,
3845 with I < J,
3846 goal_alternative_matches[J] is I.
3847 Set up goal_alternative_matched as the inverse function:
3848 goal_alternative_matched[I] = J. */
3850 for (i = 0; i < noperands; i++)
3851 goal_alternative_matched[i] = -1;
3853 for (i = 0; i < noperands; i++)
3854 if (! goal_alternative_win[i]
3855 && goal_alternative_matches[i] >= 0)
3856 goal_alternative_matched[goal_alternative_matches[i]] = i;
3858 for (i = 0; i < noperands; i++)
3859 goal_alternative_win[i] |= goal_alternative_match_win[i];
3861 /* If the best alternative is with operands 1 and 2 swapped,
3862 consider them swapped before reporting the reloads. Update the
3863 operand numbers of any reloads already pushed. */
3865 if (goal_alternative_swapped)
3867 rtx tem;
3869 tem = substed_operand[commutative];
3870 substed_operand[commutative] = substed_operand[commutative + 1];
3871 substed_operand[commutative + 1] = tem;
3872 tem = recog_data.operand[commutative];
3873 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3874 recog_data.operand[commutative + 1] = tem;
3875 tem = *recog_data.operand_loc[commutative];
3876 *recog_data.operand_loc[commutative]
3877 = *recog_data.operand_loc[commutative + 1];
3878 *recog_data.operand_loc[commutative + 1] = tem;
3880 for (i = 0; i < n_reloads; i++)
3882 if (rld[i].opnum == commutative)
3883 rld[i].opnum = commutative + 1;
3884 else if (rld[i].opnum == commutative + 1)
3885 rld[i].opnum = commutative;
3889 for (i = 0; i < noperands; i++)
3891 operand_reloadnum[i] = -1;
3893 /* If this is an earlyclobber operand, we need to widen the scope.
3894 The reload must remain valid from the start of the insn being
3895 reloaded until after the operand is stored into its destination.
3896 We approximate this with RELOAD_OTHER even though we know that we
3897 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3899 One special case that is worth checking is when we have an
3900 output that is earlyclobber but isn't used past the insn (typically
3901 a SCRATCH). In this case, we only need have the reload live
3902 through the insn itself, but not for any of our input or output
3903 reloads.
3904 But we must not accidentally narrow the scope of an existing
3905 RELOAD_OTHER reload - leave these alone.
3907 In any case, anything needed to address this operand can remain
3908 however they were previously categorized. */
3910 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3911 operand_type[i]
3912 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3913 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3916 /* Any constants that aren't allowed and can't be reloaded
3917 into registers are here changed into memory references. */
3918 for (i = 0; i < noperands; i++)
3919 if (! goal_alternative_win[i])
3921 rtx op = recog_data.operand[i];
3922 rtx subreg = NULL_RTX;
3923 rtx plus = NULL_RTX;
3924 enum machine_mode mode = operand_mode[i];
3926 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3927 push_reload so we have to let them pass here. */
3928 if (GET_CODE (op) == SUBREG)
3930 subreg = op;
3931 op = SUBREG_REG (op);
3932 mode = GET_MODE (op);
3935 if (GET_CODE (op) == PLUS)
3937 plus = op;
3938 op = XEXP (op, 1);
3941 if (CONST_POOL_OK_P (mode, op)
3942 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3943 == NO_REGS)
3944 || no_input_reloads))
3946 int this_address_reloaded;
3947 rtx tem = force_const_mem (mode, op);
3949 /* If we stripped a SUBREG or a PLUS above add it back. */
3950 if (plus != NULL_RTX)
3951 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3953 if (subreg != NULL_RTX)
3954 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3956 this_address_reloaded = 0;
3957 substed_operand[i] = recog_data.operand[i]
3958 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3959 0, insn, &this_address_reloaded);
3961 /* If the alternative accepts constant pool refs directly
3962 there will be no reload needed at all. */
3963 if (plus == NULL_RTX
3964 && subreg == NULL_RTX
3965 && alternative_allows_const_pool_ref (this_address_reloaded == 0
3966 ? substed_operand[i]
3967 : NULL,
3968 recog_data.constraints[i],
3969 goal_alternative_number))
3970 goal_alternative_win[i] = 1;
3974 /* Record the values of the earlyclobber operands for the caller. */
3975 if (goal_earlyclobber)
3976 for (i = 0; i < noperands; i++)
3977 if (goal_alternative_earlyclobber[i])
3978 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3980 /* Now record reloads for all the operands that need them. */
3981 for (i = 0; i < noperands; i++)
3982 if (! goal_alternative_win[i])
3984 /* Operands that match previous ones have already been handled. */
3985 if (goal_alternative_matches[i] >= 0)
3987 /* Handle an operand with a nonoffsettable address
3988 appearing where an offsettable address will do
3989 by reloading the address into a base register.
3991 ??? We can also do this when the operand is a register and
3992 reg_equiv_mem is not offsettable, but this is a bit tricky,
3993 so we don't bother with it. It may not be worth doing. */
3994 else if (goal_alternative_matched[i] == -1
3995 && goal_alternative_offmemok[i]
3996 && MEM_P (recog_data.operand[i]))
3998 /* If the address to be reloaded is a VOIDmode constant,
3999 use the default address mode as mode of the reload register,
4000 as would have been done by find_reloads_address. */
4001 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
4002 enum machine_mode address_mode;
4004 address_mode = get_address_mode (recog_data.operand[i]);
4005 operand_reloadnum[i]
4006 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
4007 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
4008 base_reg_class (VOIDmode, as, MEM, SCRATCH),
4009 address_mode,
4010 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
4011 rld[operand_reloadnum[i]].inc
4012 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4014 /* If this operand is an output, we will have made any
4015 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4016 now we are treating part of the operand as an input, so
4017 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
4019 if (modified[i] == RELOAD_WRITE)
4021 for (j = 0; j < n_reloads; j++)
4023 if (rld[j].opnum == i)
4025 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4026 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4027 else if (rld[j].when_needed
4028 == RELOAD_FOR_OUTADDR_ADDRESS)
4029 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4034 else if (goal_alternative_matched[i] == -1)
4036 operand_reloadnum[i]
4037 = push_reload ((modified[i] != RELOAD_WRITE
4038 ? recog_data.operand[i] : 0),
4039 (modified[i] != RELOAD_READ
4040 ? recog_data.operand[i] : 0),
4041 (modified[i] != RELOAD_WRITE
4042 ? recog_data.operand_loc[i] : 0),
4043 (modified[i] != RELOAD_READ
4044 ? recog_data.operand_loc[i] : 0),
4045 (enum reg_class) goal_alternative[i],
4046 (modified[i] == RELOAD_WRITE
4047 ? VOIDmode : operand_mode[i]),
4048 (modified[i] == RELOAD_READ
4049 ? VOIDmode : operand_mode[i]),
4050 (insn_code_number < 0 ? 0
4051 : insn_data[insn_code_number].operand[i].strict_low),
4052 0, i, operand_type[i]);
4054 /* In a matching pair of operands, one must be input only
4055 and the other must be output only.
4056 Pass the input operand as IN and the other as OUT. */
4057 else if (modified[i] == RELOAD_READ
4058 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4060 operand_reloadnum[i]
4061 = push_reload (recog_data.operand[i],
4062 recog_data.operand[goal_alternative_matched[i]],
4063 recog_data.operand_loc[i],
4064 recog_data.operand_loc[goal_alternative_matched[i]],
4065 (enum reg_class) goal_alternative[i],
4066 operand_mode[i],
4067 operand_mode[goal_alternative_matched[i]],
4068 0, 0, i, RELOAD_OTHER);
4069 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4071 else if (modified[i] == RELOAD_WRITE
4072 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4074 operand_reloadnum[goal_alternative_matched[i]]
4075 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4076 recog_data.operand[i],
4077 recog_data.operand_loc[goal_alternative_matched[i]],
4078 recog_data.operand_loc[i],
4079 (enum reg_class) goal_alternative[i],
4080 operand_mode[goal_alternative_matched[i]],
4081 operand_mode[i],
4082 0, 0, i, RELOAD_OTHER);
4083 operand_reloadnum[i] = output_reloadnum;
4085 else
4087 gcc_assert (insn_code_number < 0);
4088 error_for_asm (insn, "inconsistent operand constraints "
4089 "in an %<asm%>");
4090 /* Avoid further trouble with this insn. */
4091 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4092 n_reloads = 0;
4093 return 0;
4096 else if (goal_alternative_matched[i] < 0
4097 && goal_alternative_matches[i] < 0
4098 && address_operand_reloaded[i] != 1
4099 && optimize)
4101 /* For each non-matching operand that's a MEM or a pseudo-register
4102 that didn't get a hard register, make an optional reload.
4103 This may get done even if the insn needs no reloads otherwise. */
4105 rtx operand = recog_data.operand[i];
4107 while (GET_CODE (operand) == SUBREG)
4108 operand = SUBREG_REG (operand);
4109 if ((MEM_P (operand)
4110 || (REG_P (operand)
4111 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4112 /* If this is only for an output, the optional reload would not
4113 actually cause us to use a register now, just note that
4114 something is stored here. */
4115 && (goal_alternative[i] != NO_REGS
4116 || modified[i] == RELOAD_WRITE)
4117 && ! no_input_reloads
4118 /* An optional output reload might allow to delete INSN later.
4119 We mustn't make in-out reloads on insns that are not permitted
4120 output reloads.
4121 If this is an asm, we can't delete it; we must not even call
4122 push_reload for an optional output reload in this case,
4123 because we can't be sure that the constraint allows a register,
4124 and push_reload verifies the constraints for asms. */
4125 && (modified[i] == RELOAD_READ
4126 || (! no_output_reloads && ! this_insn_is_asm)))
4127 operand_reloadnum[i]
4128 = push_reload ((modified[i] != RELOAD_WRITE
4129 ? recog_data.operand[i] : 0),
4130 (modified[i] != RELOAD_READ
4131 ? recog_data.operand[i] : 0),
4132 (modified[i] != RELOAD_WRITE
4133 ? recog_data.operand_loc[i] : 0),
4134 (modified[i] != RELOAD_READ
4135 ? recog_data.operand_loc[i] : 0),
4136 (enum reg_class) goal_alternative[i],
4137 (modified[i] == RELOAD_WRITE
4138 ? VOIDmode : operand_mode[i]),
4139 (modified[i] == RELOAD_READ
4140 ? VOIDmode : operand_mode[i]),
4141 (insn_code_number < 0 ? 0
4142 : insn_data[insn_code_number].operand[i].strict_low),
4143 1, i, operand_type[i]);
4144 /* If a memory reference remains (either as a MEM or a pseudo that
4145 did not get a hard register), yet we can't make an optional
4146 reload, check if this is actually a pseudo register reference;
4147 we then need to emit a USE and/or a CLOBBER so that reload
4148 inheritance will do the right thing. */
4149 else if (replace
4150 && (MEM_P (operand)
4151 || (REG_P (operand)
4152 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4153 && reg_renumber [REGNO (operand)] < 0)))
4155 operand = *recog_data.operand_loc[i];
4157 while (GET_CODE (operand) == SUBREG)
4158 operand = SUBREG_REG (operand);
4159 if (REG_P (operand))
4161 if (modified[i] != RELOAD_WRITE)
4162 /* We mark the USE with QImode so that we recognize
4163 it as one that can be safely deleted at the end
4164 of reload. */
4165 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4166 insn), QImode);
4167 if (modified[i] != RELOAD_READ)
4168 emit_insn_after (gen_clobber (operand), insn);
4172 else if (goal_alternative_matches[i] >= 0
4173 && goal_alternative_win[goal_alternative_matches[i]]
4174 && modified[i] == RELOAD_READ
4175 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4176 && ! no_input_reloads && ! no_output_reloads
4177 && optimize)
4179 /* Similarly, make an optional reload for a pair of matching
4180 objects that are in MEM or a pseudo that didn't get a hard reg. */
4182 rtx operand = recog_data.operand[i];
4184 while (GET_CODE (operand) == SUBREG)
4185 operand = SUBREG_REG (operand);
4186 if ((MEM_P (operand)
4187 || (REG_P (operand)
4188 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4189 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4190 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4191 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4192 recog_data.operand[i],
4193 recog_data.operand_loc[goal_alternative_matches[i]],
4194 recog_data.operand_loc[i],
4195 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4196 operand_mode[goal_alternative_matches[i]],
4197 operand_mode[i],
4198 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4201 /* Perform whatever substitutions on the operands we are supposed
4202 to make due to commutativity or replacement of registers
4203 with equivalent constants or memory slots. */
4205 for (i = 0; i < noperands; i++)
4207 /* We only do this on the last pass through reload, because it is
4208 possible for some data (like reg_equiv_address) to be changed during
4209 later passes. Moreover, we lose the opportunity to get a useful
4210 reload_{in,out}_reg when we do these replacements. */
4212 if (replace)
4214 rtx substitution = substed_operand[i];
4216 *recog_data.operand_loc[i] = substitution;
4218 /* If we're replacing an operand with a LABEL_REF, we need to
4219 make sure that there's a REG_LABEL_OPERAND note attached to
4220 this instruction. */
4221 if (GET_CODE (substitution) == LABEL_REF
4222 && !find_reg_note (insn, REG_LABEL_OPERAND,
4223 XEXP (substitution, 0))
4224 /* For a JUMP_P, if it was a branch target it must have
4225 already been recorded as such. */
4226 && (!JUMP_P (insn)
4227 || !label_is_jump_target_p (XEXP (substitution, 0),
4228 insn)))
4230 add_reg_note (insn, REG_LABEL_OPERAND, XEXP (substitution, 0));
4231 if (LABEL_P (XEXP (substitution, 0)))
4232 ++LABEL_NUSES (XEXP (substitution, 0));
4236 else
4237 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4240 /* If this insn pattern contains any MATCH_DUP's, make sure that
4241 they will be substituted if the operands they match are substituted.
4242 Also do now any substitutions we already did on the operands.
4244 Don't do this if we aren't making replacements because we might be
4245 propagating things allocated by frame pointer elimination into places
4246 it doesn't expect. */
4248 if (insn_code_number >= 0 && replace)
4249 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4251 int opno = recog_data.dup_num[i];
4252 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4253 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4256 #if 0
4257 /* This loses because reloading of prior insns can invalidate the equivalence
4258 (or at least find_equiv_reg isn't smart enough to find it any more),
4259 causing this insn to need more reload regs than it needed before.
4260 It may be too late to make the reload regs available.
4261 Now this optimization is done safely in choose_reload_regs. */
4263 /* For each reload of a reg into some other class of reg,
4264 search for an existing equivalent reg (same value now) in the right class.
4265 We can use it as long as we don't need to change its contents. */
4266 for (i = 0; i < n_reloads; i++)
4267 if (rld[i].reg_rtx == 0
4268 && rld[i].in != 0
4269 && REG_P (rld[i].in)
4270 && rld[i].out == 0)
4272 rld[i].reg_rtx
4273 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4274 static_reload_reg_p, 0, rld[i].inmode);
4275 /* Prevent generation of insn to load the value
4276 because the one we found already has the value. */
4277 if (rld[i].reg_rtx)
4278 rld[i].in = rld[i].reg_rtx;
4280 #endif
4282 /* If we detected error and replaced asm instruction by USE, forget about the
4283 reloads. */
4284 if (GET_CODE (PATTERN (insn)) == USE
4285 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4286 n_reloads = 0;
4288 /* Perhaps an output reload can be combined with another
4289 to reduce needs by one. */
4290 if (!goal_earlyclobber)
4291 combine_reloads ();
4293 /* If we have a pair of reloads for parts of an address, they are reloading
4294 the same object, the operands themselves were not reloaded, and they
4295 are for two operands that are supposed to match, merge the reloads and
4296 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4298 for (i = 0; i < n_reloads; i++)
4300 int k;
4302 for (j = i + 1; j < n_reloads; j++)
4303 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4304 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4305 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4306 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4307 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4308 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4309 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4310 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4311 && rtx_equal_p (rld[i].in, rld[j].in)
4312 && (operand_reloadnum[rld[i].opnum] < 0
4313 || rld[operand_reloadnum[rld[i].opnum]].optional)
4314 && (operand_reloadnum[rld[j].opnum] < 0
4315 || rld[operand_reloadnum[rld[j].opnum]].optional)
4316 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4317 || (goal_alternative_matches[rld[j].opnum]
4318 == rld[i].opnum)))
4320 for (k = 0; k < n_replacements; k++)
4321 if (replacements[k].what == j)
4322 replacements[k].what = i;
4324 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4325 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4326 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4327 else
4328 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4329 rld[j].in = 0;
4333 /* Scan all the reloads and update their type.
4334 If a reload is for the address of an operand and we didn't reload
4335 that operand, change the type. Similarly, change the operand number
4336 of a reload when two operands match. If a reload is optional, treat it
4337 as though the operand isn't reloaded.
4339 ??? This latter case is somewhat odd because if we do the optional
4340 reload, it means the object is hanging around. Thus we need only
4341 do the address reload if the optional reload was NOT done.
4343 Change secondary reloads to be the address type of their operand, not
4344 the normal type.
4346 If an operand's reload is now RELOAD_OTHER, change any
4347 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4348 RELOAD_FOR_OTHER_ADDRESS. */
4350 for (i = 0; i < n_reloads; i++)
4352 if (rld[i].secondary_p
4353 && rld[i].when_needed == operand_type[rld[i].opnum])
4354 rld[i].when_needed = address_type[rld[i].opnum];
4356 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4357 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4358 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4359 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4360 && (operand_reloadnum[rld[i].opnum] < 0
4361 || rld[operand_reloadnum[rld[i].opnum]].optional))
4363 /* If we have a secondary reload to go along with this reload,
4364 change its type to RELOAD_FOR_OPADDR_ADDR. */
4366 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4367 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4368 && rld[i].secondary_in_reload != -1)
4370 int secondary_in_reload = rld[i].secondary_in_reload;
4372 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4374 /* If there's a tertiary reload we have to change it also. */
4375 if (secondary_in_reload > 0
4376 && rld[secondary_in_reload].secondary_in_reload != -1)
4377 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4378 = RELOAD_FOR_OPADDR_ADDR;
4381 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4382 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4383 && rld[i].secondary_out_reload != -1)
4385 int secondary_out_reload = rld[i].secondary_out_reload;
4387 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4389 /* If there's a tertiary reload we have to change it also. */
4390 if (secondary_out_reload
4391 && rld[secondary_out_reload].secondary_out_reload != -1)
4392 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4393 = RELOAD_FOR_OPADDR_ADDR;
4396 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4397 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4398 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4399 else
4400 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4403 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4404 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4405 && operand_reloadnum[rld[i].opnum] >= 0
4406 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4407 == RELOAD_OTHER))
4408 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4410 if (goal_alternative_matches[rld[i].opnum] >= 0)
4411 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4414 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4415 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4416 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4418 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4419 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4420 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4421 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4422 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4423 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4424 This is complicated by the fact that a single operand can have more
4425 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4426 choose_reload_regs without affecting code quality, and cases that
4427 actually fail are extremely rare, so it turns out to be better to fix
4428 the problem here by not generating cases that choose_reload_regs will
4429 fail for. */
4430 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4431 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4432 a single operand.
4433 We can reduce the register pressure by exploiting that a
4434 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4435 does not conflict with any of them, if it is only used for the first of
4436 the RELOAD_FOR_X_ADDRESS reloads. */
4438 int first_op_addr_num = -2;
4439 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4440 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4441 int need_change = 0;
4442 /* We use last_op_addr_reload and the contents of the above arrays
4443 first as flags - -2 means no instance encountered, -1 means exactly
4444 one instance encountered.
4445 If more than one instance has been encountered, we store the reload
4446 number of the first reload of the kind in question; reload numbers
4447 are known to be non-negative. */
4448 for (i = 0; i < noperands; i++)
4449 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4450 for (i = n_reloads - 1; i >= 0; i--)
4452 switch (rld[i].when_needed)
4454 case RELOAD_FOR_OPERAND_ADDRESS:
4455 if (++first_op_addr_num >= 0)
4457 first_op_addr_num = i;
4458 need_change = 1;
4460 break;
4461 case RELOAD_FOR_INPUT_ADDRESS:
4462 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4464 first_inpaddr_num[rld[i].opnum] = i;
4465 need_change = 1;
4467 break;
4468 case RELOAD_FOR_OUTPUT_ADDRESS:
4469 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4471 first_outpaddr_num[rld[i].opnum] = i;
4472 need_change = 1;
4474 break;
4475 default:
4476 break;
4480 if (need_change)
4482 for (i = 0; i < n_reloads; i++)
4484 int first_num;
4485 enum reload_type type;
4487 switch (rld[i].when_needed)
4489 case RELOAD_FOR_OPADDR_ADDR:
4490 first_num = first_op_addr_num;
4491 type = RELOAD_FOR_OPERAND_ADDRESS;
4492 break;
4493 case RELOAD_FOR_INPADDR_ADDRESS:
4494 first_num = first_inpaddr_num[rld[i].opnum];
4495 type = RELOAD_FOR_INPUT_ADDRESS;
4496 break;
4497 case RELOAD_FOR_OUTADDR_ADDRESS:
4498 first_num = first_outpaddr_num[rld[i].opnum];
4499 type = RELOAD_FOR_OUTPUT_ADDRESS;
4500 break;
4501 default:
4502 continue;
4504 if (first_num < 0)
4505 continue;
4506 else if (i > first_num)
4507 rld[i].when_needed = type;
4508 else
4510 /* Check if the only TYPE reload that uses reload I is
4511 reload FIRST_NUM. */
4512 for (j = n_reloads - 1; j > first_num; j--)
4514 if (rld[j].when_needed == type
4515 && (rld[i].secondary_p
4516 ? rld[j].secondary_in_reload == i
4517 : reg_mentioned_p (rld[i].in, rld[j].in)))
4519 rld[i].when_needed = type;
4520 break;
4528 /* See if we have any reloads that are now allowed to be merged
4529 because we've changed when the reload is needed to
4530 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4531 check for the most common cases. */
4533 for (i = 0; i < n_reloads; i++)
4534 if (rld[i].in != 0 && rld[i].out == 0
4535 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4536 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4537 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4538 for (j = 0; j < n_reloads; j++)
4539 if (i != j && rld[j].in != 0 && rld[j].out == 0
4540 && rld[j].when_needed == rld[i].when_needed
4541 && MATCHES (rld[i].in, rld[j].in)
4542 && rld[i].rclass == rld[j].rclass
4543 && !rld[i].nocombine && !rld[j].nocombine
4544 && rld[i].reg_rtx == rld[j].reg_rtx)
4546 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4547 transfer_replacements (i, j);
4548 rld[j].in = 0;
4551 #ifdef HAVE_cc0
4552 /* If we made any reloads for addresses, see if they violate a
4553 "no input reloads" requirement for this insn. But loads that we
4554 do after the insn (such as for output addresses) are fine. */
4555 if (no_input_reloads)
4556 for (i = 0; i < n_reloads; i++)
4557 gcc_assert (rld[i].in == 0
4558 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4559 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4560 #endif
4562 /* Compute reload_mode and reload_nregs. */
4563 for (i = 0; i < n_reloads; i++)
4565 rld[i].mode
4566 = (rld[i].inmode == VOIDmode
4567 || (GET_MODE_SIZE (rld[i].outmode)
4568 > GET_MODE_SIZE (rld[i].inmode)))
4569 ? rld[i].outmode : rld[i].inmode;
4571 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4574 /* Special case a simple move with an input reload and a
4575 destination of a hard reg, if the hard reg is ok, use it. */
4576 for (i = 0; i < n_reloads; i++)
4577 if (rld[i].when_needed == RELOAD_FOR_INPUT
4578 && GET_CODE (PATTERN (insn)) == SET
4579 && REG_P (SET_DEST (PATTERN (insn)))
4580 && (SET_SRC (PATTERN (insn)) == rld[i].in
4581 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4582 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4584 rtx dest = SET_DEST (PATTERN (insn));
4585 unsigned int regno = REGNO (dest);
4587 if (regno < FIRST_PSEUDO_REGISTER
4588 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4589 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4591 int nr = hard_regno_nregs[regno][rld[i].mode];
4592 int ok = 1, nri;
4594 for (nri = 1; nri < nr; nri ++)
4595 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4597 ok = 0;
4598 break;
4601 if (ok)
4602 rld[i].reg_rtx = dest;
4606 return retval;
4609 /* Return true if alternative number ALTNUM in constraint-string
4610 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4611 MEM gives the reference if it didn't need any reloads, otherwise it
4612 is null. */
4614 static bool
4615 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4616 const char *constraint, int altnum)
4618 int c;
4620 /* Skip alternatives before the one requested. */
4621 while (altnum > 0)
4623 while (*constraint++ != ',')
4625 altnum--;
4627 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4628 If one of them is present, this alternative accepts the result of
4629 passing a constant-pool reference through find_reloads_toplev.
4631 The same is true of extra memory constraints if the address
4632 was reloaded into a register. However, the target may elect
4633 to disallow the original constant address, forcing it to be
4634 reloaded into a register instead. */
4635 for (; (c = *constraint) && c != ',' && c != '#';
4636 constraint += CONSTRAINT_LEN (c, constraint))
4638 enum constraint_num cn = lookup_constraint (constraint);
4639 if (insn_extra_memory_constraint (cn)
4640 && (mem == NULL || constraint_satisfied_p (mem, cn)))
4641 return true;
4643 return false;
4646 /* Scan X for memory references and scan the addresses for reloading.
4647 Also checks for references to "constant" regs that we want to eliminate
4648 and replaces them with the values they stand for.
4649 We may alter X destructively if it contains a reference to such.
4650 If X is just a constant reg, we return the equivalent value
4651 instead of X.
4653 IND_LEVELS says how many levels of indirect addressing this machine
4654 supports.
4656 OPNUM and TYPE identify the purpose of the reload.
4658 IS_SET_DEST is true if X is the destination of a SET, which is not
4659 appropriate to be replaced by a constant.
4661 INSN, if nonzero, is the insn in which we do the reload. It is used
4662 to determine if we may generate output reloads, and where to put USEs
4663 for pseudos that we have to replace with stack slots.
4665 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4666 result of find_reloads_address. */
4668 static rtx
4669 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4670 int ind_levels, int is_set_dest, rtx insn,
4671 int *address_reloaded)
4673 RTX_CODE code = GET_CODE (x);
4675 const char *fmt = GET_RTX_FORMAT (code);
4676 int i;
4677 int copied;
4679 if (code == REG)
4681 /* This code is duplicated for speed in find_reloads. */
4682 int regno = REGNO (x);
4683 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4684 x = reg_equiv_constant (regno);
4685 #if 0
4686 /* This creates (subreg (mem...)) which would cause an unnecessary
4687 reload of the mem. */
4688 else if (reg_equiv_mem (regno) != 0)
4689 x = reg_equiv_mem (regno);
4690 #endif
4691 else if (reg_equiv_memory_loc (regno)
4692 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4694 rtx mem = make_memloc (x, regno);
4695 if (reg_equiv_address (regno)
4696 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4698 /* If this is not a toplevel operand, find_reloads doesn't see
4699 this substitution. We have to emit a USE of the pseudo so
4700 that delete_output_reload can see it. */
4701 if (replace_reloads && recog_data.operand[opnum] != x)
4702 /* We mark the USE with QImode so that we recognize it
4703 as one that can be safely deleted at the end of
4704 reload. */
4705 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4706 QImode);
4707 x = mem;
4708 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4709 opnum, type, ind_levels, insn);
4710 if (!rtx_equal_p (x, mem))
4711 push_reg_equiv_alt_mem (regno, x);
4712 if (address_reloaded)
4713 *address_reloaded = i;
4716 return x;
4718 if (code == MEM)
4720 rtx tem = x;
4722 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4723 opnum, type, ind_levels, insn);
4724 if (address_reloaded)
4725 *address_reloaded = i;
4727 return tem;
4730 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4732 /* Check for SUBREG containing a REG that's equivalent to a
4733 constant. If the constant has a known value, truncate it
4734 right now. Similarly if we are extracting a single-word of a
4735 multi-word constant. If the constant is symbolic, allow it
4736 to be substituted normally. push_reload will strip the
4737 subreg later. The constant must not be VOIDmode, because we
4738 will lose the mode of the register (this should never happen
4739 because one of the cases above should handle it). */
4741 int regno = REGNO (SUBREG_REG (x));
4742 rtx tem;
4744 if (regno >= FIRST_PSEUDO_REGISTER
4745 && reg_renumber[regno] < 0
4746 && reg_equiv_constant (regno) != 0)
4748 tem =
4749 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4750 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4751 gcc_assert (tem);
4752 if (CONSTANT_P (tem)
4753 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4755 tem = force_const_mem (GET_MODE (x), tem);
4756 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4757 &XEXP (tem, 0), opnum, type,
4758 ind_levels, insn);
4759 if (address_reloaded)
4760 *address_reloaded = i;
4762 return tem;
4765 /* If the subreg contains a reg that will be converted to a mem,
4766 attempt to convert the whole subreg to a (narrower or wider)
4767 memory reference instead. If this succeeds, we're done --
4768 otherwise fall through to check whether the inner reg still
4769 needs address reloads anyway. */
4771 if (regno >= FIRST_PSEUDO_REGISTER
4772 && reg_equiv_memory_loc (regno) != 0)
4774 tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4775 insn, address_reloaded);
4776 if (tem)
4777 return tem;
4781 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4783 if (fmt[i] == 'e')
4785 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4786 ind_levels, is_set_dest, insn,
4787 address_reloaded);
4788 /* If we have replaced a reg with it's equivalent memory loc -
4789 that can still be handled here e.g. if it's in a paradoxical
4790 subreg - we must make the change in a copy, rather than using
4791 a destructive change. This way, find_reloads can still elect
4792 not to do the change. */
4793 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4795 x = shallow_copy_rtx (x);
4796 copied = 1;
4798 XEXP (x, i) = new_part;
4801 return x;
4804 /* Return a mem ref for the memory equivalent of reg REGNO.
4805 This mem ref is not shared with anything. */
4807 static rtx
4808 make_memloc (rtx ad, int regno)
4810 /* We must rerun eliminate_regs, in case the elimination
4811 offsets have changed. */
4812 rtx tem
4813 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4816 /* If TEM might contain a pseudo, we must copy it to avoid
4817 modifying it when we do the substitution for the reload. */
4818 if (rtx_varies_p (tem, 0))
4819 tem = copy_rtx (tem);
4821 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4822 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4824 /* Copy the result if it's still the same as the equivalence, to avoid
4825 modifying it when we do the substitution for the reload. */
4826 if (tem == reg_equiv_memory_loc (regno))
4827 tem = copy_rtx (tem);
4828 return tem;
4831 /* Returns true if AD could be turned into a valid memory reference
4832 to mode MODE in address space AS by reloading the part pointed to
4833 by PART into a register. */
4835 static int
4836 maybe_memory_address_addr_space_p (enum machine_mode mode, rtx ad,
4837 addr_space_t as, rtx *part)
4839 int retv;
4840 rtx tem = *part;
4841 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4843 *part = reg;
4844 retv = memory_address_addr_space_p (mode, ad, as);
4845 *part = tem;
4847 return retv;
4850 /* Record all reloads needed for handling memory address AD
4851 which appears in *LOC in a memory reference to mode MODE
4852 which itself is found in location *MEMREFLOC.
4853 Note that we take shortcuts assuming that no multi-reg machine mode
4854 occurs as part of an address.
4856 OPNUM and TYPE specify the purpose of this reload.
4858 IND_LEVELS says how many levels of indirect addressing this machine
4859 supports.
4861 INSN, if nonzero, is the insn in which we do the reload. It is used
4862 to determine if we may generate output reloads, and where to put USEs
4863 for pseudos that we have to replace with stack slots.
4865 Value is one if this address is reloaded or replaced as a whole; it is
4866 zero if the top level of this address was not reloaded or replaced, and
4867 it is -1 if it may or may not have been reloaded or replaced.
4869 Note that there is no verification that the address will be valid after
4870 this routine does its work. Instead, we rely on the fact that the address
4871 was valid when reload started. So we need only undo things that reload
4872 could have broken. These are wrong register types, pseudos not allocated
4873 to a hard register, and frame pointer elimination. */
4875 static int
4876 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
4877 rtx *loc, int opnum, enum reload_type type,
4878 int ind_levels, rtx insn)
4880 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4881 : ADDR_SPACE_GENERIC;
4882 int regno;
4883 int removed_and = 0;
4884 int op_index;
4885 rtx tem;
4887 /* If the address is a register, see if it is a legitimate address and
4888 reload if not. We first handle the cases where we need not reload
4889 or where we must reload in a non-standard way. */
4891 if (REG_P (ad))
4893 regno = REGNO (ad);
4895 if (reg_equiv_constant (regno) != 0)
4897 find_reloads_address_part (reg_equiv_constant (regno), loc,
4898 base_reg_class (mode, as, MEM, SCRATCH),
4899 GET_MODE (ad), opnum, type, ind_levels);
4900 return 1;
4903 tem = reg_equiv_memory_loc (regno);
4904 if (tem != 0)
4906 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4908 tem = make_memloc (ad, regno);
4909 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4910 XEXP (tem, 0),
4911 MEM_ADDR_SPACE (tem)))
4913 rtx orig = tem;
4915 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4916 &XEXP (tem, 0), opnum,
4917 ADDR_TYPE (type), ind_levels, insn);
4918 if (!rtx_equal_p (tem, orig))
4919 push_reg_equiv_alt_mem (regno, tem);
4921 /* We can avoid a reload if the register's equivalent memory
4922 expression is valid as an indirect memory address.
4923 But not all addresses are valid in a mem used as an indirect
4924 address: only reg or reg+constant. */
4926 if (ind_levels > 0
4927 && strict_memory_address_addr_space_p (mode, tem, as)
4928 && (REG_P (XEXP (tem, 0))
4929 || (GET_CODE (XEXP (tem, 0)) == PLUS
4930 && REG_P (XEXP (XEXP (tem, 0), 0))
4931 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4933 /* TEM is not the same as what we'll be replacing the
4934 pseudo with after reload, put a USE in front of INSN
4935 in the final reload pass. */
4936 if (replace_reloads
4937 && num_not_at_initial_offset
4938 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4940 *loc = tem;
4941 /* We mark the USE with QImode so that we
4942 recognize it as one that can be safely
4943 deleted at the end of reload. */
4944 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4945 insn), QImode);
4947 /* This doesn't really count as replacing the address
4948 as a whole, since it is still a memory access. */
4950 return 0;
4952 ad = tem;
4956 /* The only remaining case where we can avoid a reload is if this is a
4957 hard register that is valid as a base register and which is not the
4958 subject of a CLOBBER in this insn. */
4960 else if (regno < FIRST_PSEUDO_REGISTER
4961 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4962 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4963 return 0;
4965 /* If we do not have one of the cases above, we must do the reload. */
4966 push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4967 base_reg_class (mode, as, MEM, SCRATCH),
4968 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4969 return 1;
4972 if (strict_memory_address_addr_space_p (mode, ad, as))
4974 /* The address appears valid, so reloads are not needed.
4975 But the address may contain an eliminable register.
4976 This can happen because a machine with indirect addressing
4977 may consider a pseudo register by itself a valid address even when
4978 it has failed to get a hard reg.
4979 So do a tree-walk to find and eliminate all such regs. */
4981 /* But first quickly dispose of a common case. */
4982 if (GET_CODE (ad) == PLUS
4983 && CONST_INT_P (XEXP (ad, 1))
4984 && REG_P (XEXP (ad, 0))
4985 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4986 return 0;
4988 subst_reg_equivs_changed = 0;
4989 *loc = subst_reg_equivs (ad, insn);
4991 if (! subst_reg_equivs_changed)
4992 return 0;
4994 /* Check result for validity after substitution. */
4995 if (strict_memory_address_addr_space_p (mode, ad, as))
4996 return 0;
4999 #ifdef LEGITIMIZE_RELOAD_ADDRESS
5002 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
5004 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5005 ind_levels, win);
5007 break;
5008 win:
5009 *memrefloc = copy_rtx (*memrefloc);
5010 XEXP (*memrefloc, 0) = ad;
5011 move_replacements (&ad, &XEXP (*memrefloc, 0));
5012 return -1;
5014 while (0);
5015 #endif
5017 /* The address is not valid. We have to figure out why. First see if
5018 we have an outer AND and remove it if so. Then analyze what's inside. */
5020 if (GET_CODE (ad) == AND)
5022 removed_and = 1;
5023 loc = &XEXP (ad, 0);
5024 ad = *loc;
5027 /* One possibility for why the address is invalid is that it is itself
5028 a MEM. This can happen when the frame pointer is being eliminated, a
5029 pseudo is not allocated to a hard register, and the offset between the
5030 frame and stack pointers is not its initial value. In that case the
5031 pseudo will have been replaced by a MEM referring to the
5032 stack pointer. */
5033 if (MEM_P (ad))
5035 /* First ensure that the address in this MEM is valid. Then, unless
5036 indirect addresses are valid, reload the MEM into a register. */
5037 tem = ad;
5038 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5039 opnum, ADDR_TYPE (type),
5040 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5042 /* If tem was changed, then we must create a new memory reference to
5043 hold it and store it back into memrefloc. */
5044 if (tem != ad && memrefloc)
5046 *memrefloc = copy_rtx (*memrefloc);
5047 copy_replacements (tem, XEXP (*memrefloc, 0));
5048 loc = &XEXP (*memrefloc, 0);
5049 if (removed_and)
5050 loc = &XEXP (*loc, 0);
5053 /* Check similar cases as for indirect addresses as above except
5054 that we can allow pseudos and a MEM since they should have been
5055 taken care of above. */
5057 if (ind_levels == 0
5058 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5059 || MEM_P (XEXP (tem, 0))
5060 || ! (REG_P (XEXP (tem, 0))
5061 || (GET_CODE (XEXP (tem, 0)) == PLUS
5062 && REG_P (XEXP (XEXP (tem, 0), 0))
5063 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5065 /* Must use TEM here, not AD, since it is the one that will
5066 have any subexpressions reloaded, if needed. */
5067 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5068 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5069 VOIDmode, 0,
5070 0, opnum, type);
5071 return ! removed_and;
5073 else
5074 return 0;
5077 /* If we have address of a stack slot but it's not valid because the
5078 displacement is too large, compute the sum in a register.
5079 Handle all base registers here, not just fp/ap/sp, because on some
5080 targets (namely SH) we can also get too large displacements from
5081 big-endian corrections. */
5082 else if (GET_CODE (ad) == PLUS
5083 && REG_P (XEXP (ad, 0))
5084 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5085 && CONST_INT_P (XEXP (ad, 1))
5086 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5087 CONST_INT)
5088 /* Similarly, if we were to reload the base register and the
5089 mem+offset address is still invalid, then we want to reload
5090 the whole address, not just the base register. */
5091 || ! maybe_memory_address_addr_space_p
5092 (mode, ad, as, &(XEXP (ad, 0)))))
5095 /* Unshare the MEM rtx so we can safely alter it. */
5096 if (memrefloc)
5098 *memrefloc = copy_rtx (*memrefloc);
5099 loc = &XEXP (*memrefloc, 0);
5100 if (removed_and)
5101 loc = &XEXP (*loc, 0);
5104 if (double_reg_address_ok
5105 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5106 PLUS, CONST_INT))
5108 /* Unshare the sum as well. */
5109 *loc = ad = copy_rtx (ad);
5111 /* Reload the displacement into an index reg.
5112 We assume the frame pointer or arg pointer is a base reg. */
5113 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5114 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5115 type, ind_levels);
5116 return 0;
5118 else
5120 /* If the sum of two regs is not necessarily valid,
5121 reload the sum into a base reg.
5122 That will at least work. */
5123 find_reloads_address_part (ad, loc,
5124 base_reg_class (mode, as, MEM, SCRATCH),
5125 GET_MODE (ad), opnum, type, ind_levels);
5127 return ! removed_and;
5130 /* If we have an indexed stack slot, there are three possible reasons why
5131 it might be invalid: The index might need to be reloaded, the address
5132 might have been made by frame pointer elimination and hence have a
5133 constant out of range, or both reasons might apply.
5135 We can easily check for an index needing reload, but even if that is the
5136 case, we might also have an invalid constant. To avoid making the
5137 conservative assumption and requiring two reloads, we see if this address
5138 is valid when not interpreted strictly. If it is, the only problem is
5139 that the index needs a reload and find_reloads_address_1 will take care
5140 of it.
5142 Handle all base registers here, not just fp/ap/sp, because on some
5143 targets (namely SPARC) we can also get invalid addresses from preventive
5144 subreg big-endian corrections made by find_reloads_toplev. We
5145 can also get expressions involving LO_SUM (rather than PLUS) from
5146 find_reloads_subreg_address.
5148 If we decide to do something, it must be that `double_reg_address_ok'
5149 is true. We generate a reload of the base register + constant and
5150 rework the sum so that the reload register will be added to the index.
5151 This is safe because we know the address isn't shared.
5153 We check for the base register as both the first and second operand of
5154 the innermost PLUS and/or LO_SUM. */
5156 for (op_index = 0; op_index < 2; ++op_index)
5158 rtx operand, addend;
5159 enum rtx_code inner_code;
5161 if (GET_CODE (ad) != PLUS)
5162 continue;
5164 inner_code = GET_CODE (XEXP (ad, 0));
5165 if (!(GET_CODE (ad) == PLUS
5166 && CONST_INT_P (XEXP (ad, 1))
5167 && (inner_code == PLUS || inner_code == LO_SUM)))
5168 continue;
5170 operand = XEXP (XEXP (ad, 0), op_index);
5171 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5172 continue;
5174 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5176 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5177 GET_CODE (addend))
5178 || operand == frame_pointer_rtx
5179 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
5180 || operand == hard_frame_pointer_rtx
5181 #endif
5182 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5183 || operand == arg_pointer_rtx
5184 #endif
5185 || operand == stack_pointer_rtx)
5186 && ! maybe_memory_address_addr_space_p
5187 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5189 rtx offset_reg;
5190 enum reg_class cls;
5192 offset_reg = plus_constant (GET_MODE (ad), operand,
5193 INTVAL (XEXP (ad, 1)));
5195 /* Form the adjusted address. */
5196 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5197 ad = gen_rtx_PLUS (GET_MODE (ad),
5198 op_index == 0 ? offset_reg : addend,
5199 op_index == 0 ? addend : offset_reg);
5200 else
5201 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5202 op_index == 0 ? offset_reg : addend,
5203 op_index == 0 ? addend : offset_reg);
5204 *loc = ad;
5206 cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5207 find_reloads_address_part (XEXP (ad, op_index),
5208 &XEXP (ad, op_index), cls,
5209 GET_MODE (ad), opnum, type, ind_levels);
5210 find_reloads_address_1 (mode, as,
5211 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5212 GET_CODE (XEXP (ad, op_index)),
5213 &XEXP (ad, 1 - op_index), opnum,
5214 type, 0, insn);
5216 return 0;
5220 /* See if address becomes valid when an eliminable register
5221 in a sum is replaced. */
5223 tem = ad;
5224 if (GET_CODE (ad) == PLUS)
5225 tem = subst_indexed_address (ad);
5226 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5228 /* Ok, we win that way. Replace any additional eliminable
5229 registers. */
5231 subst_reg_equivs_changed = 0;
5232 tem = subst_reg_equivs (tem, insn);
5234 /* Make sure that didn't make the address invalid again. */
5236 if (! subst_reg_equivs_changed
5237 || strict_memory_address_addr_space_p (mode, tem, as))
5239 *loc = tem;
5240 return 0;
5244 /* If constants aren't valid addresses, reload the constant address
5245 into a register. */
5246 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5248 enum machine_mode address_mode = GET_MODE (ad);
5249 if (address_mode == VOIDmode)
5250 address_mode = targetm.addr_space.address_mode (as);
5252 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5253 Unshare it so we can safely alter it. */
5254 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5255 && CONSTANT_POOL_ADDRESS_P (ad))
5257 *memrefloc = copy_rtx (*memrefloc);
5258 loc = &XEXP (*memrefloc, 0);
5259 if (removed_and)
5260 loc = &XEXP (*loc, 0);
5263 find_reloads_address_part (ad, loc,
5264 base_reg_class (mode, as, MEM, SCRATCH),
5265 address_mode, opnum, type, ind_levels);
5266 return ! removed_and;
5269 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5270 opnum, type, ind_levels, insn);
5273 /* Find all pseudo regs appearing in AD
5274 that are eliminable in favor of equivalent values
5275 and do not have hard regs; replace them by their equivalents.
5276 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5277 front of it for pseudos that we have to replace with stack slots. */
5279 static rtx
5280 subst_reg_equivs (rtx ad, rtx insn)
5282 RTX_CODE code = GET_CODE (ad);
5283 int i;
5284 const char *fmt;
5286 switch (code)
5288 case HIGH:
5289 case CONST:
5290 CASE_CONST_ANY:
5291 case SYMBOL_REF:
5292 case LABEL_REF:
5293 case PC:
5294 case CC0:
5295 return ad;
5297 case REG:
5299 int regno = REGNO (ad);
5301 if (reg_equiv_constant (regno) != 0)
5303 subst_reg_equivs_changed = 1;
5304 return reg_equiv_constant (regno);
5306 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5308 rtx mem = make_memloc (ad, regno);
5309 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5311 subst_reg_equivs_changed = 1;
5312 /* We mark the USE with QImode so that we recognize it
5313 as one that can be safely deleted at the end of
5314 reload. */
5315 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5316 QImode);
5317 return mem;
5321 return ad;
5323 case PLUS:
5324 /* Quickly dispose of a common case. */
5325 if (XEXP (ad, 0) == frame_pointer_rtx
5326 && CONST_INT_P (XEXP (ad, 1)))
5327 return ad;
5328 break;
5330 default:
5331 break;
5334 fmt = GET_RTX_FORMAT (code);
5335 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5336 if (fmt[i] == 'e')
5337 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5338 return ad;
5341 /* Compute the sum of X and Y, making canonicalizations assumed in an
5342 address, namely: sum constant integers, surround the sum of two
5343 constants with a CONST, put the constant as the second operand, and
5344 group the constant on the outermost sum.
5346 This routine assumes both inputs are already in canonical form. */
5349 form_sum (enum machine_mode mode, rtx x, rtx y)
5351 rtx tem;
5353 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5354 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5356 if (CONST_INT_P (x))
5357 return plus_constant (mode, y, INTVAL (x));
5358 else if (CONST_INT_P (y))
5359 return plus_constant (mode, x, INTVAL (y));
5360 else if (CONSTANT_P (x))
5361 tem = x, x = y, y = tem;
5363 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5364 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5366 /* Note that if the operands of Y are specified in the opposite
5367 order in the recursive calls below, infinite recursion will occur. */
5368 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5369 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5371 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5372 constant will have been placed second. */
5373 if (CONSTANT_P (x) && CONSTANT_P (y))
5375 if (GET_CODE (x) == CONST)
5376 x = XEXP (x, 0);
5377 if (GET_CODE (y) == CONST)
5378 y = XEXP (y, 0);
5380 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5383 return gen_rtx_PLUS (mode, x, y);
5386 /* If ADDR is a sum containing a pseudo register that should be
5387 replaced with a constant (from reg_equiv_constant),
5388 return the result of doing so, and also apply the associative
5389 law so that the result is more likely to be a valid address.
5390 (But it is not guaranteed to be one.)
5392 Note that at most one register is replaced, even if more are
5393 replaceable. Also, we try to put the result into a canonical form
5394 so it is more likely to be a valid address.
5396 In all other cases, return ADDR. */
5398 static rtx
5399 subst_indexed_address (rtx addr)
5401 rtx op0 = 0, op1 = 0, op2 = 0;
5402 rtx tem;
5403 int regno;
5405 if (GET_CODE (addr) == PLUS)
5407 /* Try to find a register to replace. */
5408 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5409 if (REG_P (op0)
5410 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5411 && reg_renumber[regno] < 0
5412 && reg_equiv_constant (regno) != 0)
5413 op0 = reg_equiv_constant (regno);
5414 else if (REG_P (op1)
5415 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5416 && reg_renumber[regno] < 0
5417 && reg_equiv_constant (regno) != 0)
5418 op1 = reg_equiv_constant (regno);
5419 else if (GET_CODE (op0) == PLUS
5420 && (tem = subst_indexed_address (op0)) != op0)
5421 op0 = tem;
5422 else if (GET_CODE (op1) == PLUS
5423 && (tem = subst_indexed_address (op1)) != op1)
5424 op1 = tem;
5425 else
5426 return addr;
5428 /* Pick out up to three things to add. */
5429 if (GET_CODE (op1) == PLUS)
5430 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5431 else if (GET_CODE (op0) == PLUS)
5432 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5434 /* Compute the sum. */
5435 if (op2 != 0)
5436 op1 = form_sum (GET_MODE (addr), op1, op2);
5437 if (op1 != 0)
5438 op0 = form_sum (GET_MODE (addr), op0, op1);
5440 return op0;
5442 return addr;
5445 /* Update the REG_INC notes for an insn. It updates all REG_INC
5446 notes for the instruction which refer to REGNO the to refer
5447 to the reload number.
5449 INSN is the insn for which any REG_INC notes need updating.
5451 REGNO is the register number which has been reloaded.
5453 RELOADNUM is the reload number. */
5455 static void
5456 update_auto_inc_notes (rtx insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5457 int reloadnum ATTRIBUTE_UNUSED)
5459 #ifdef AUTO_INC_DEC
5460 rtx link;
5462 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5463 if (REG_NOTE_KIND (link) == REG_INC
5464 && (int) REGNO (XEXP (link, 0)) == regno)
5465 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5466 #endif
5469 /* Record the pseudo registers we must reload into hard registers in a
5470 subexpression of a would-be memory address, X referring to a value
5471 in mode MODE. (This function is not called if the address we find
5472 is strictly valid.)
5474 CONTEXT = 1 means we are considering regs as index regs,
5475 = 0 means we are considering them as base regs.
5476 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5477 or an autoinc code.
5478 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5479 is the code of the index part of the address. Otherwise, pass SCRATCH
5480 for this argument.
5481 OPNUM and TYPE specify the purpose of any reloads made.
5483 IND_LEVELS says how many levels of indirect addressing are
5484 supported at this point in the address.
5486 INSN, if nonzero, is the insn in which we do the reload. It is used
5487 to determine if we may generate output reloads.
5489 We return nonzero if X, as a whole, is reloaded or replaced. */
5491 /* Note that we take shortcuts assuming that no multi-reg machine mode
5492 occurs as part of an address.
5493 Also, this is not fully machine-customizable; it works for machines
5494 such as VAXen and 68000's and 32000's, but other possible machines
5495 could have addressing modes that this does not handle right.
5496 If you add push_reload calls here, you need to make sure gen_reload
5497 handles those cases gracefully. */
5499 static int
5500 find_reloads_address_1 (enum machine_mode mode, addr_space_t as,
5501 rtx x, int context,
5502 enum rtx_code outer_code, enum rtx_code index_code,
5503 rtx *loc, int opnum, enum reload_type type,
5504 int ind_levels, rtx insn)
5506 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5507 ((CONTEXT) == 0 \
5508 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5509 : REGNO_OK_FOR_INDEX_P (REGNO))
5511 enum reg_class context_reg_class;
5512 RTX_CODE code = GET_CODE (x);
5513 bool reloaded_inner_of_autoinc = false;
5515 if (context == 1)
5516 context_reg_class = INDEX_REG_CLASS;
5517 else
5518 context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5520 switch (code)
5522 case PLUS:
5524 rtx orig_op0 = XEXP (x, 0);
5525 rtx orig_op1 = XEXP (x, 1);
5526 RTX_CODE code0 = GET_CODE (orig_op0);
5527 RTX_CODE code1 = GET_CODE (orig_op1);
5528 rtx op0 = orig_op0;
5529 rtx op1 = orig_op1;
5531 if (GET_CODE (op0) == SUBREG)
5533 op0 = SUBREG_REG (op0);
5534 code0 = GET_CODE (op0);
5535 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5536 op0 = gen_rtx_REG (word_mode,
5537 (REGNO (op0) +
5538 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5539 GET_MODE (SUBREG_REG (orig_op0)),
5540 SUBREG_BYTE (orig_op0),
5541 GET_MODE (orig_op0))));
5544 if (GET_CODE (op1) == SUBREG)
5546 op1 = SUBREG_REG (op1);
5547 code1 = GET_CODE (op1);
5548 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5549 /* ??? Why is this given op1's mode and above for
5550 ??? op0 SUBREGs we use word_mode? */
5551 op1 = gen_rtx_REG (GET_MODE (op1),
5552 (REGNO (op1) +
5553 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5554 GET_MODE (SUBREG_REG (orig_op1)),
5555 SUBREG_BYTE (orig_op1),
5556 GET_MODE (orig_op1))));
5558 /* Plus in the index register may be created only as a result of
5559 register rematerialization for expression like &localvar*4. Reload it.
5560 It may be possible to combine the displacement on the outer level,
5561 but it is probably not worthwhile to do so. */
5562 if (context == 1)
5564 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5565 opnum, ADDR_TYPE (type), ind_levels, insn);
5566 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5567 context_reg_class,
5568 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5569 return 1;
5572 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5573 || code0 == ZERO_EXTEND || code1 == MEM)
5575 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5576 &XEXP (x, 0), opnum, type, ind_levels,
5577 insn);
5578 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5579 &XEXP (x, 1), opnum, type, ind_levels,
5580 insn);
5583 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5584 || code1 == ZERO_EXTEND || code0 == MEM)
5586 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5587 &XEXP (x, 0), opnum, type, ind_levels,
5588 insn);
5589 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5590 &XEXP (x, 1), opnum, type, ind_levels,
5591 insn);
5594 else if (code0 == CONST_INT || code0 == CONST
5595 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5596 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5597 &XEXP (x, 1), opnum, type, ind_levels,
5598 insn);
5600 else if (code1 == CONST_INT || code1 == CONST
5601 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5602 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5603 &XEXP (x, 0), opnum, type, ind_levels,
5604 insn);
5606 else if (code0 == REG && code1 == REG)
5608 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5609 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5610 return 0;
5611 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5612 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5613 return 0;
5614 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5615 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5616 &XEXP (x, 1), opnum, type, ind_levels,
5617 insn);
5618 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5619 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5620 &XEXP (x, 0), opnum, type, ind_levels,
5621 insn);
5622 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5623 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5624 &XEXP (x, 0), opnum, type, ind_levels,
5625 insn);
5626 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5627 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5628 &XEXP (x, 1), opnum, type, ind_levels,
5629 insn);
5630 else
5632 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5633 &XEXP (x, 0), opnum, type, ind_levels,
5634 insn);
5635 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5636 &XEXP (x, 1), opnum, type, ind_levels,
5637 insn);
5641 else if (code0 == REG)
5643 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5644 &XEXP (x, 0), opnum, type, ind_levels,
5645 insn);
5646 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5647 &XEXP (x, 1), opnum, type, ind_levels,
5648 insn);
5651 else if (code1 == REG)
5653 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5654 &XEXP (x, 1), opnum, type, ind_levels,
5655 insn);
5656 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5657 &XEXP (x, 0), opnum, type, ind_levels,
5658 insn);
5662 return 0;
5664 case POST_MODIFY:
5665 case PRE_MODIFY:
5667 rtx op0 = XEXP (x, 0);
5668 rtx op1 = XEXP (x, 1);
5669 enum rtx_code index_code;
5670 int regno;
5671 int reloadnum;
5673 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5674 return 0;
5676 /* Currently, we only support {PRE,POST}_MODIFY constructs
5677 where a base register is {inc,dec}remented by the contents
5678 of another register or by a constant value. Thus, these
5679 operands must match. */
5680 gcc_assert (op0 == XEXP (op1, 0));
5682 /* Require index register (or constant). Let's just handle the
5683 register case in the meantime... If the target allows
5684 auto-modify by a constant then we could try replacing a pseudo
5685 register with its equivalent constant where applicable.
5687 We also handle the case where the register was eliminated
5688 resulting in a PLUS subexpression.
5690 If we later decide to reload the whole PRE_MODIFY or
5691 POST_MODIFY, inc_for_reload might clobber the reload register
5692 before reading the index. The index register might therefore
5693 need to live longer than a TYPE reload normally would, so be
5694 conservative and class it as RELOAD_OTHER. */
5695 if ((REG_P (XEXP (op1, 1))
5696 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5697 || GET_CODE (XEXP (op1, 1)) == PLUS)
5698 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5699 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5700 ind_levels, insn);
5702 gcc_assert (REG_P (XEXP (op1, 0)));
5704 regno = REGNO (XEXP (op1, 0));
5705 index_code = GET_CODE (XEXP (op1, 1));
5707 /* A register that is incremented cannot be constant! */
5708 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5709 || reg_equiv_constant (regno) == 0);
5711 /* Handle a register that is equivalent to a memory location
5712 which cannot be addressed directly. */
5713 if (reg_equiv_memory_loc (regno) != 0
5714 && (reg_equiv_address (regno) != 0
5715 || num_not_at_initial_offset))
5717 rtx tem = make_memloc (XEXP (x, 0), regno);
5719 if (reg_equiv_address (regno)
5720 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5722 rtx orig = tem;
5724 /* First reload the memory location's address.
5725 We can't use ADDR_TYPE (type) here, because we need to
5726 write back the value after reading it, hence we actually
5727 need two registers. */
5728 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5729 &XEXP (tem, 0), opnum,
5730 RELOAD_OTHER,
5731 ind_levels, insn);
5733 if (!rtx_equal_p (tem, orig))
5734 push_reg_equiv_alt_mem (regno, tem);
5736 /* Then reload the memory location into a base
5737 register. */
5738 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5739 &XEXP (op1, 0),
5740 base_reg_class (mode, as,
5741 code, index_code),
5742 GET_MODE (x), GET_MODE (x), 0,
5743 0, opnum, RELOAD_OTHER);
5745 update_auto_inc_notes (this_insn, regno, reloadnum);
5746 return 0;
5750 if (reg_renumber[regno] >= 0)
5751 regno = reg_renumber[regno];
5753 /* We require a base register here... */
5754 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5756 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5757 &XEXP (op1, 0), &XEXP (x, 0),
5758 base_reg_class (mode, as,
5759 code, index_code),
5760 GET_MODE (x), GET_MODE (x), 0, 0,
5761 opnum, RELOAD_OTHER);
5763 update_auto_inc_notes (this_insn, regno, reloadnum);
5764 return 0;
5767 return 0;
5769 case POST_INC:
5770 case POST_DEC:
5771 case PRE_INC:
5772 case PRE_DEC:
5773 if (REG_P (XEXP (x, 0)))
5775 int regno = REGNO (XEXP (x, 0));
5776 int value = 0;
5777 rtx x_orig = x;
5779 /* A register that is incremented cannot be constant! */
5780 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5781 || reg_equiv_constant (regno) == 0);
5783 /* Handle a register that is equivalent to a memory location
5784 which cannot be addressed directly. */
5785 if (reg_equiv_memory_loc (regno) != 0
5786 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5788 rtx tem = make_memloc (XEXP (x, 0), regno);
5789 if (reg_equiv_address (regno)
5790 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5792 rtx orig = tem;
5794 /* First reload the memory location's address.
5795 We can't use ADDR_TYPE (type) here, because we need to
5796 write back the value after reading it, hence we actually
5797 need two registers. */
5798 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5799 &XEXP (tem, 0), opnum, type,
5800 ind_levels, insn);
5801 reloaded_inner_of_autoinc = true;
5802 if (!rtx_equal_p (tem, orig))
5803 push_reg_equiv_alt_mem (regno, tem);
5804 /* Put this inside a new increment-expression. */
5805 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5806 /* Proceed to reload that, as if it contained a register. */
5810 /* If we have a hard register that is ok in this incdec context,
5811 don't make a reload. If the register isn't nice enough for
5812 autoincdec, we can reload it. But, if an autoincrement of a
5813 register that we here verified as playing nice, still outside
5814 isn't "valid", it must be that no autoincrement is "valid".
5815 If that is true and something made an autoincrement anyway,
5816 this must be a special context where one is allowed.
5817 (For example, a "push" instruction.)
5818 We can't improve this address, so leave it alone. */
5820 /* Otherwise, reload the autoincrement into a suitable hard reg
5821 and record how much to increment by. */
5823 if (reg_renumber[regno] >= 0)
5824 regno = reg_renumber[regno];
5825 if (regno >= FIRST_PSEUDO_REGISTER
5826 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5827 index_code))
5829 int reloadnum;
5831 /* If we can output the register afterwards, do so, this
5832 saves the extra update.
5833 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5834 CALL_INSN - and it does not set CC0.
5835 But don't do this if we cannot directly address the
5836 memory location, since this will make it harder to
5837 reuse address reloads, and increases register pressure.
5838 Also don't do this if we can probably update x directly. */
5839 rtx equiv = (MEM_P (XEXP (x, 0))
5840 ? XEXP (x, 0)
5841 : reg_equiv_mem (regno));
5842 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5843 if (insn && NONJUMP_INSN_P (insn) && equiv
5844 && memory_operand (equiv, GET_MODE (equiv))
5845 #ifdef HAVE_cc0
5846 && ! sets_cc0_p (PATTERN (insn))
5847 #endif
5848 && ! (icode != CODE_FOR_nothing
5849 && insn_operand_matches (icode, 0, equiv)
5850 && insn_operand_matches (icode, 1, equiv))
5851 /* Using RELOAD_OTHER means we emit this and the reload we
5852 made earlier in the wrong order. */
5853 && !reloaded_inner_of_autoinc)
5855 /* We use the original pseudo for loc, so that
5856 emit_reload_insns() knows which pseudo this
5857 reload refers to and updates the pseudo rtx, not
5858 its equivalent memory location, as well as the
5859 corresponding entry in reg_last_reload_reg. */
5860 loc = &XEXP (x_orig, 0);
5861 x = XEXP (x, 0);
5862 reloadnum
5863 = push_reload (x, x, loc, loc,
5864 context_reg_class,
5865 GET_MODE (x), GET_MODE (x), 0, 0,
5866 opnum, RELOAD_OTHER);
5868 else
5870 reloadnum
5871 = push_reload (x, x, loc, (rtx*) 0,
5872 context_reg_class,
5873 GET_MODE (x), GET_MODE (x), 0, 0,
5874 opnum, type);
5875 rld[reloadnum].inc
5876 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5878 value = 1;
5881 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5882 reloadnum);
5884 return value;
5886 return 0;
5888 case TRUNCATE:
5889 case SIGN_EXTEND:
5890 case ZERO_EXTEND:
5891 /* Look for parts to reload in the inner expression and reload them
5892 too, in addition to this operation. Reloading all inner parts in
5893 addition to this one shouldn't be necessary, but at this point,
5894 we don't know if we can possibly omit any part that *can* be
5895 reloaded. Targets that are better off reloading just either part
5896 (or perhaps even a different part of an outer expression), should
5897 define LEGITIMIZE_RELOAD_ADDRESS. */
5898 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5899 context, code, SCRATCH, &XEXP (x, 0), opnum,
5900 type, ind_levels, insn);
5901 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5902 context_reg_class,
5903 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5904 return 1;
5906 case MEM:
5907 /* This is probably the result of a substitution, by eliminate_regs, of
5908 an equivalent address for a pseudo that was not allocated to a hard
5909 register. Verify that the specified address is valid and reload it
5910 into a register.
5912 Since we know we are going to reload this item, don't decrement for
5913 the indirection level.
5915 Note that this is actually conservative: it would be slightly more
5916 efficient to use the value of SPILL_INDIRECT_LEVELS from
5917 reload1.c here. */
5919 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5920 opnum, ADDR_TYPE (type), ind_levels, insn);
5921 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5922 context_reg_class,
5923 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5924 return 1;
5926 case REG:
5928 int regno = REGNO (x);
5930 if (reg_equiv_constant (regno) != 0)
5932 find_reloads_address_part (reg_equiv_constant (regno), loc,
5933 context_reg_class,
5934 GET_MODE (x), opnum, type, ind_levels);
5935 return 1;
5938 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5939 that feeds this insn. */
5940 if (reg_equiv_mem (regno) != 0)
5942 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5943 context_reg_class,
5944 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5945 return 1;
5947 #endif
5949 if (reg_equiv_memory_loc (regno)
5950 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5952 rtx tem = make_memloc (x, regno);
5953 if (reg_equiv_address (regno) != 0
5954 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5956 x = tem;
5957 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5958 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5959 ind_levels, insn);
5960 if (!rtx_equal_p (x, tem))
5961 push_reg_equiv_alt_mem (regno, x);
5965 if (reg_renumber[regno] >= 0)
5966 regno = reg_renumber[regno];
5968 if (regno >= FIRST_PSEUDO_REGISTER
5969 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5970 index_code))
5972 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5973 context_reg_class,
5974 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5975 return 1;
5978 /* If a register appearing in an address is the subject of a CLOBBER
5979 in this insn, reload it into some other register to be safe.
5980 The CLOBBER is supposed to make the register unavailable
5981 from before this insn to after it. */
5982 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5984 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5985 context_reg_class,
5986 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5987 return 1;
5990 return 0;
5992 case SUBREG:
5993 if (REG_P (SUBREG_REG (x)))
5995 /* If this is a SUBREG of a hard register and the resulting register
5996 is of the wrong class, reload the whole SUBREG. This avoids
5997 needless copies if SUBREG_REG is multi-word. */
5998 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6000 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
6002 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6003 index_code))
6005 push_reload (x, NULL_RTX, loc, (rtx*) 0,
6006 context_reg_class,
6007 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6008 return 1;
6011 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6012 is larger than the class size, then reload the whole SUBREG. */
6013 else
6015 enum reg_class rclass = context_reg_class;
6016 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6017 > reg_class_size[(int) rclass])
6019 /* If the inner register will be replaced by a memory
6020 reference, we can do this only if we can replace the
6021 whole subreg by a (narrower) memory reference. If
6022 this is not possible, fall through and reload just
6023 the inner register (including address reloads). */
6024 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6026 rtx tem = find_reloads_subreg_address (x, opnum,
6027 ADDR_TYPE (type),
6028 ind_levels, insn,
6029 NULL);
6030 if (tem)
6032 push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6033 GET_MODE (tem), VOIDmode, 0, 0,
6034 opnum, type);
6035 return 1;
6038 else
6040 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6041 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6042 return 1;
6047 break;
6049 default:
6050 break;
6054 const char *fmt = GET_RTX_FORMAT (code);
6055 int i;
6057 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6059 if (fmt[i] == 'e')
6060 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6061 we get here. */
6062 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6063 code, SCRATCH, &XEXP (x, i),
6064 opnum, type, ind_levels, insn);
6068 #undef REG_OK_FOR_CONTEXT
6069 return 0;
6072 /* X, which is found at *LOC, is a part of an address that needs to be
6073 reloaded into a register of class RCLASS. If X is a constant, or if
6074 X is a PLUS that contains a constant, check that the constant is a
6075 legitimate operand and that we are supposed to be able to load
6076 it into the register.
6078 If not, force the constant into memory and reload the MEM instead.
6080 MODE is the mode to use, in case X is an integer constant.
6082 OPNUM and TYPE describe the purpose of any reloads made.
6084 IND_LEVELS says how many levels of indirect addressing this machine
6085 supports. */
6087 static void
6088 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6089 enum machine_mode mode, int opnum,
6090 enum reload_type type, int ind_levels)
6092 if (CONSTANT_P (x)
6093 && (!targetm.legitimate_constant_p (mode, x)
6094 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6096 x = force_const_mem (mode, x);
6097 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6098 opnum, type, ind_levels, 0);
6101 else if (GET_CODE (x) == PLUS
6102 && CONSTANT_P (XEXP (x, 1))
6103 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6104 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6105 == NO_REGS))
6107 rtx tem;
6109 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6110 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6111 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6112 opnum, type, ind_levels, 0);
6115 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6116 mode, VOIDmode, 0, 0, opnum, type);
6119 /* X, a subreg of a pseudo, is a part of an address that needs to be
6120 reloaded, and the pseusdo is equivalent to a memory location.
6122 Attempt to replace the whole subreg by a (possibly narrower or wider)
6123 memory reference. If this is possible, return this new memory
6124 reference, and push all required address reloads. Otherwise,
6125 return NULL.
6127 OPNUM and TYPE identify the purpose of the reload.
6129 IND_LEVELS says how many levels of indirect addressing are
6130 supported at this point in the address.
6132 INSN, if nonzero, is the insn in which we do the reload. It is used
6133 to determine where to put USEs for pseudos that we have to replace with
6134 stack slots. */
6136 static rtx
6137 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6138 int ind_levels, rtx insn, int *address_reloaded)
6140 enum machine_mode outer_mode = GET_MODE (x);
6141 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6142 int regno = REGNO (SUBREG_REG (x));
6143 int reloaded = 0;
6144 rtx tem, orig;
6145 int offset;
6147 gcc_assert (reg_equiv_memory_loc (regno) != 0);
6149 /* We cannot replace the subreg with a modified memory reference if:
6151 - we have a paradoxical subreg that implicitly acts as a zero or
6152 sign extension operation due to LOAD_EXTEND_OP;
6154 - we have a subreg that is implicitly supposed to act on the full
6155 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6157 - the address of the equivalent memory location is mode-dependent; or
6159 - we have a paradoxical subreg and the resulting memory is not
6160 sufficiently aligned to allow access in the wider mode.
6162 In addition, we choose not to perform the replacement for *any*
6163 paradoxical subreg, even if it were possible in principle. This
6164 is to avoid generating wider memory references than necessary.
6166 This corresponds to how previous versions of reload used to handle
6167 paradoxical subregs where no address reload was required. */
6169 if (paradoxical_subreg_p (x))
6170 return NULL;
6172 #ifdef WORD_REGISTER_OPERATIONS
6173 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode)
6174 && ((GET_MODE_SIZE (outer_mode) - 1) / UNITS_PER_WORD
6175 == (GET_MODE_SIZE (inner_mode) - 1) / UNITS_PER_WORD))
6176 return NULL;
6177 #endif
6179 /* Since we don't attempt to handle paradoxical subregs, we can just
6180 call into simplify_subreg, which will handle all remaining checks
6181 for us. */
6182 orig = make_memloc (SUBREG_REG (x), regno);
6183 offset = SUBREG_BYTE (x);
6184 tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6185 if (!tem || !MEM_P (tem))
6186 return NULL;
6188 /* Now push all required address reloads, if any. */
6189 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6190 XEXP (tem, 0), &XEXP (tem, 0),
6191 opnum, type, ind_levels, insn);
6192 /* ??? Do we need to handle nonzero offsets somehow? */
6193 if (!offset && !rtx_equal_p (tem, orig))
6194 push_reg_equiv_alt_mem (regno, tem);
6196 /* For some processors an address may be valid in the original mode but
6197 not in a smaller mode. For example, ARM accepts a scaled index register
6198 in SImode but not in HImode. Note that this is only a problem if the
6199 address in reg_equiv_mem is already invalid in the new mode; other
6200 cases would be fixed by find_reloads_address as usual.
6202 ??? We attempt to handle such cases here by doing an additional reload
6203 of the full address after the usual processing by find_reloads_address.
6204 Note that this may not work in the general case, but it seems to cover
6205 the cases where this situation currently occurs. A more general fix
6206 might be to reload the *value* instead of the address, but this would
6207 not be expected by the callers of this routine as-is.
6209 If find_reloads_address already completed replaced the address, there
6210 is nothing further to do. */
6211 if (reloaded == 0
6212 && reg_equiv_mem (regno) != 0
6213 && !strict_memory_address_addr_space_p
6214 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6215 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6217 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6218 base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6219 MEM, SCRATCH),
6220 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6221 reloaded = 1;
6224 /* If this is not a toplevel operand, find_reloads doesn't see this
6225 substitution. We have to emit a USE of the pseudo so that
6226 delete_output_reload can see it. */
6227 if (replace_reloads && recog_data.operand[opnum] != x)
6228 /* We mark the USE with QImode so that we recognize it as one that
6229 can be safely deleted at the end of reload. */
6230 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6231 QImode);
6233 if (address_reloaded)
6234 *address_reloaded = reloaded;
6236 return tem;
6239 /* Substitute into the current INSN the registers into which we have reloaded
6240 the things that need reloading. The array `replacements'
6241 contains the locations of all pointers that must be changed
6242 and says what to replace them with.
6244 Return the rtx that X translates into; usually X, but modified. */
6246 void
6247 subst_reloads (rtx insn)
6249 int i;
6251 for (i = 0; i < n_replacements; i++)
6253 struct replacement *r = &replacements[i];
6254 rtx reloadreg = rld[r->what].reg_rtx;
6255 if (reloadreg)
6257 #ifdef DEBUG_RELOAD
6258 /* This checking takes a very long time on some platforms
6259 causing the gcc.c-torture/compile/limits-fnargs.c test
6260 to time out during testing. See PR 31850.
6262 Internal consistency test. Check that we don't modify
6263 anything in the equivalence arrays. Whenever something from
6264 those arrays needs to be reloaded, it must be unshared before
6265 being substituted into; the equivalence must not be modified.
6266 Otherwise, if the equivalence is used after that, it will
6267 have been modified, and the thing substituted (probably a
6268 register) is likely overwritten and not a usable equivalence. */
6269 int check_regno;
6271 for (check_regno = 0; check_regno < max_regno; check_regno++)
6273 #define CHECK_MODF(ARRAY) \
6274 gcc_assert (!(*reg_equivs)[check_regno].ARRAY \
6275 || !loc_mentioned_in_p (r->where, \
6276 (*reg_equivs)[check_regno].ARRAY))
6278 CHECK_MODF (constant);
6279 CHECK_MODF (memory_loc);
6280 CHECK_MODF (address);
6281 CHECK_MODF (mem);
6282 #undef CHECK_MODF
6284 #endif /* DEBUG_RELOAD */
6286 /* If we're replacing a LABEL_REF with a register, there must
6287 already be an indication (to e.g. flow) which label this
6288 register refers to. */
6289 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6290 || !JUMP_P (insn)
6291 || find_reg_note (insn,
6292 REG_LABEL_OPERAND,
6293 XEXP (*r->where, 0))
6294 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6296 /* Encapsulate RELOADREG so its machine mode matches what
6297 used to be there. Note that gen_lowpart_common will
6298 do the wrong thing if RELOADREG is multi-word. RELOADREG
6299 will always be a REG here. */
6300 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6301 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6303 *r->where = reloadreg;
6305 /* If reload got no reg and isn't optional, something's wrong. */
6306 else
6307 gcc_assert (rld[r->what].optional);
6311 /* Make a copy of any replacements being done into X and move those
6312 copies to locations in Y, a copy of X. */
6314 void
6315 copy_replacements (rtx x, rtx y)
6317 copy_replacements_1 (&x, &y, n_replacements);
6320 static void
6321 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6323 int i, j;
6324 rtx x, y;
6325 struct replacement *r;
6326 enum rtx_code code;
6327 const char *fmt;
6329 for (j = 0; j < orig_replacements; j++)
6330 if (replacements[j].where == px)
6332 r = &replacements[n_replacements++];
6333 r->where = py;
6334 r->what = replacements[j].what;
6335 r->mode = replacements[j].mode;
6338 x = *px;
6339 y = *py;
6340 code = GET_CODE (x);
6341 fmt = GET_RTX_FORMAT (code);
6343 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6345 if (fmt[i] == 'e')
6346 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6347 else if (fmt[i] == 'E')
6348 for (j = XVECLEN (x, i); --j >= 0; )
6349 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6350 orig_replacements);
6354 /* Change any replacements being done to *X to be done to *Y. */
6356 void
6357 move_replacements (rtx *x, rtx *y)
6359 int i;
6361 for (i = 0; i < n_replacements; i++)
6362 if (replacements[i].where == x)
6363 replacements[i].where = y;
6366 /* If LOC was scheduled to be replaced by something, return the replacement.
6367 Otherwise, return *LOC. */
6370 find_replacement (rtx *loc)
6372 struct replacement *r;
6374 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6376 rtx reloadreg = rld[r->what].reg_rtx;
6378 if (reloadreg && r->where == loc)
6380 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6381 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6383 return reloadreg;
6385 else if (reloadreg && GET_CODE (*loc) == SUBREG
6386 && r->where == &SUBREG_REG (*loc))
6388 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6389 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6391 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6392 GET_MODE (SUBREG_REG (*loc)),
6393 SUBREG_BYTE (*loc));
6397 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6398 what's inside and make a new rtl if so. */
6399 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6400 || GET_CODE (*loc) == MULT)
6402 rtx x = find_replacement (&XEXP (*loc, 0));
6403 rtx y = find_replacement (&XEXP (*loc, 1));
6405 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6406 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6409 return *loc;
6412 /* Return nonzero if register in range [REGNO, ENDREGNO)
6413 appears either explicitly or implicitly in X
6414 other than being stored into (except for earlyclobber operands).
6416 References contained within the substructure at LOC do not count.
6417 LOC may be zero, meaning don't ignore anything.
6419 This is similar to refers_to_regno_p in rtlanal.c except that we
6420 look at equivalences for pseudos that didn't get hard registers. */
6422 static int
6423 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6424 rtx x, rtx *loc)
6426 int i;
6427 unsigned int r;
6428 RTX_CODE code;
6429 const char *fmt;
6431 if (x == 0)
6432 return 0;
6434 repeat:
6435 code = GET_CODE (x);
6437 switch (code)
6439 case REG:
6440 r = REGNO (x);
6442 /* If this is a pseudo, a hard register must not have been allocated.
6443 X must therefore either be a constant or be in memory. */
6444 if (r >= FIRST_PSEUDO_REGISTER)
6446 if (reg_equiv_memory_loc (r))
6447 return refers_to_regno_for_reload_p (regno, endregno,
6448 reg_equiv_memory_loc (r),
6449 (rtx*) 0);
6451 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6452 return 0;
6455 return (endregno > r
6456 && regno < r + (r < FIRST_PSEUDO_REGISTER
6457 ? hard_regno_nregs[r][GET_MODE (x)]
6458 : 1));
6460 case SUBREG:
6461 /* If this is a SUBREG of a hard reg, we can see exactly which
6462 registers are being modified. Otherwise, handle normally. */
6463 if (REG_P (SUBREG_REG (x))
6464 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6466 unsigned int inner_regno = subreg_regno (x);
6467 unsigned int inner_endregno
6468 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6469 ? subreg_nregs (x) : 1);
6471 return endregno > inner_regno && regno < inner_endregno;
6473 break;
6475 case CLOBBER:
6476 case SET:
6477 if (&SET_DEST (x) != loc
6478 /* Note setting a SUBREG counts as referring to the REG it is in for
6479 a pseudo but not for hard registers since we can
6480 treat each word individually. */
6481 && ((GET_CODE (SET_DEST (x)) == SUBREG
6482 && loc != &SUBREG_REG (SET_DEST (x))
6483 && REG_P (SUBREG_REG (SET_DEST (x)))
6484 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6485 && refers_to_regno_for_reload_p (regno, endregno,
6486 SUBREG_REG (SET_DEST (x)),
6487 loc))
6488 /* If the output is an earlyclobber operand, this is
6489 a conflict. */
6490 || ((!REG_P (SET_DEST (x))
6491 || earlyclobber_operand_p (SET_DEST (x)))
6492 && refers_to_regno_for_reload_p (regno, endregno,
6493 SET_DEST (x), loc))))
6494 return 1;
6496 if (code == CLOBBER || loc == &SET_SRC (x))
6497 return 0;
6498 x = SET_SRC (x);
6499 goto repeat;
6501 default:
6502 break;
6505 /* X does not match, so try its subexpressions. */
6507 fmt = GET_RTX_FORMAT (code);
6508 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6510 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6512 if (i == 0)
6514 x = XEXP (x, 0);
6515 goto repeat;
6517 else
6518 if (refers_to_regno_for_reload_p (regno, endregno,
6519 XEXP (x, i), loc))
6520 return 1;
6522 else if (fmt[i] == 'E')
6524 int j;
6525 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6526 if (loc != &XVECEXP (x, i, j)
6527 && refers_to_regno_for_reload_p (regno, endregno,
6528 XVECEXP (x, i, j), loc))
6529 return 1;
6532 return 0;
6535 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6536 we check if any register number in X conflicts with the relevant register
6537 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6538 contains a MEM (we don't bother checking for memory addresses that can't
6539 conflict because we expect this to be a rare case.
6541 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6542 that we look at equivalences for pseudos that didn't get hard registers. */
6545 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6547 int regno, endregno;
6549 /* Overly conservative. */
6550 if (GET_CODE (x) == STRICT_LOW_PART
6551 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6552 x = XEXP (x, 0);
6554 /* If either argument is a constant, then modifying X can not affect IN. */
6555 if (CONSTANT_P (x) || CONSTANT_P (in))
6556 return 0;
6557 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6558 return refers_to_mem_for_reload_p (in);
6559 else if (GET_CODE (x) == SUBREG)
6561 regno = REGNO (SUBREG_REG (x));
6562 if (regno < FIRST_PSEUDO_REGISTER)
6563 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6564 GET_MODE (SUBREG_REG (x)),
6565 SUBREG_BYTE (x),
6566 GET_MODE (x));
6567 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6568 ? subreg_nregs (x) : 1);
6570 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6572 else if (REG_P (x))
6574 regno = REGNO (x);
6576 /* If this is a pseudo, it must not have been assigned a hard register.
6577 Therefore, it must either be in memory or be a constant. */
6579 if (regno >= FIRST_PSEUDO_REGISTER)
6581 if (reg_equiv_memory_loc (regno))
6582 return refers_to_mem_for_reload_p (in);
6583 gcc_assert (reg_equiv_constant (regno));
6584 return 0;
6587 endregno = END_HARD_REGNO (x);
6589 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6591 else if (MEM_P (x))
6592 return refers_to_mem_for_reload_p (in);
6593 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6594 || GET_CODE (x) == CC0)
6595 return reg_mentioned_p (x, in);
6596 else
6598 gcc_assert (GET_CODE (x) == PLUS);
6600 /* We actually want to know if X is mentioned somewhere inside IN.
6601 We must not say that (plus (sp) (const_int 124)) is in
6602 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6603 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6604 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6605 while (MEM_P (in))
6606 in = XEXP (in, 0);
6607 if (REG_P (in))
6608 return 0;
6609 else if (GET_CODE (in) == PLUS)
6610 return (rtx_equal_p (x, in)
6611 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6612 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6613 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6614 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6617 gcc_unreachable ();
6620 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6621 registers. */
6623 static int
6624 refers_to_mem_for_reload_p (rtx x)
6626 const char *fmt;
6627 int i;
6629 if (MEM_P (x))
6630 return 1;
6632 if (REG_P (x))
6633 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6634 && reg_equiv_memory_loc (REGNO (x)));
6636 fmt = GET_RTX_FORMAT (GET_CODE (x));
6637 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6638 if (fmt[i] == 'e'
6639 && (MEM_P (XEXP (x, i))
6640 || refers_to_mem_for_reload_p (XEXP (x, i))))
6641 return 1;
6643 return 0;
6646 /* Check the insns before INSN to see if there is a suitable register
6647 containing the same value as GOAL.
6648 If OTHER is -1, look for a register in class RCLASS.
6649 Otherwise, just see if register number OTHER shares GOAL's value.
6651 Return an rtx for the register found, or zero if none is found.
6653 If RELOAD_REG_P is (short *)1,
6654 we reject any hard reg that appears in reload_reg_rtx
6655 because such a hard reg is also needed coming into this insn.
6657 If RELOAD_REG_P is any other nonzero value,
6658 it is a vector indexed by hard reg number
6659 and we reject any hard reg whose element in the vector is nonnegative
6660 as well as any that appears in reload_reg_rtx.
6662 If GOAL is zero, then GOALREG is a register number; we look
6663 for an equivalent for that register.
6665 MODE is the machine mode of the value we want an equivalence for.
6666 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6668 This function is used by jump.c as well as in the reload pass.
6670 If GOAL is the sum of the stack pointer and a constant, we treat it
6671 as if it were a constant except that sp is required to be unchanging. */
6674 find_equiv_reg (rtx goal, rtx insn, enum reg_class rclass, int other,
6675 short *reload_reg_p, int goalreg, enum machine_mode mode)
6677 rtx p = insn;
6678 rtx goaltry, valtry, value, where;
6679 rtx pat;
6680 int regno = -1;
6681 int valueno;
6682 int goal_mem = 0;
6683 int goal_const = 0;
6684 int goal_mem_addr_varies = 0;
6685 int need_stable_sp = 0;
6686 int nregs;
6687 int valuenregs;
6688 int num = 0;
6690 if (goal == 0)
6691 regno = goalreg;
6692 else if (REG_P (goal))
6693 regno = REGNO (goal);
6694 else if (MEM_P (goal))
6696 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6697 if (MEM_VOLATILE_P (goal))
6698 return 0;
6699 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6700 return 0;
6701 /* An address with side effects must be reexecuted. */
6702 switch (code)
6704 case POST_INC:
6705 case PRE_INC:
6706 case POST_DEC:
6707 case PRE_DEC:
6708 case POST_MODIFY:
6709 case PRE_MODIFY:
6710 return 0;
6711 default:
6712 break;
6714 goal_mem = 1;
6716 else if (CONSTANT_P (goal))
6717 goal_const = 1;
6718 else if (GET_CODE (goal) == PLUS
6719 && XEXP (goal, 0) == stack_pointer_rtx
6720 && CONSTANT_P (XEXP (goal, 1)))
6721 goal_const = need_stable_sp = 1;
6722 else if (GET_CODE (goal) == PLUS
6723 && XEXP (goal, 0) == frame_pointer_rtx
6724 && CONSTANT_P (XEXP (goal, 1)))
6725 goal_const = 1;
6726 else
6727 return 0;
6729 num = 0;
6730 /* Scan insns back from INSN, looking for one that copies
6731 a value into or out of GOAL.
6732 Stop and give up if we reach a label. */
6734 while (1)
6736 p = PREV_INSN (p);
6737 if (p && DEBUG_INSN_P (p))
6738 continue;
6739 num++;
6740 if (p == 0 || LABEL_P (p)
6741 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6742 return 0;
6744 /* Don't reuse register contents from before a setjmp-type
6745 function call; on the second return (from the longjmp) it
6746 might have been clobbered by a later reuse. It doesn't
6747 seem worthwhile to actually go and see if it is actually
6748 reused even if that information would be readily available;
6749 just don't reuse it across the setjmp call. */
6750 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6751 return 0;
6753 if (NONJUMP_INSN_P (p)
6754 /* If we don't want spill regs ... */
6755 && (! (reload_reg_p != 0
6756 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6757 /* ... then ignore insns introduced by reload; they aren't
6758 useful and can cause results in reload_as_needed to be
6759 different from what they were when calculating the need for
6760 spills. If we notice an input-reload insn here, we will
6761 reject it below, but it might hide a usable equivalent.
6762 That makes bad code. It may even fail: perhaps no reg was
6763 spilled for this insn because it was assumed we would find
6764 that equivalent. */
6765 || INSN_UID (p) < reload_first_uid))
6767 rtx tem;
6768 pat = single_set (p);
6770 /* First check for something that sets some reg equal to GOAL. */
6771 if (pat != 0
6772 && ((regno >= 0
6773 && true_regnum (SET_SRC (pat)) == regno
6774 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6776 (regno >= 0
6777 && true_regnum (SET_DEST (pat)) == regno
6778 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6780 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6781 /* When looking for stack pointer + const,
6782 make sure we don't use a stack adjust. */
6783 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6784 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6785 || (goal_mem
6786 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6787 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6788 || (goal_mem
6789 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6790 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6791 /* If we are looking for a constant,
6792 and something equivalent to that constant was copied
6793 into a reg, we can use that reg. */
6794 || (goal_const && REG_NOTES (p) != 0
6795 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6796 && ((rtx_equal_p (XEXP (tem, 0), goal)
6797 && (valueno
6798 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6799 || (REG_P (SET_DEST (pat))
6800 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6801 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6802 && CONST_INT_P (goal)
6803 && 0 != (goaltry
6804 = operand_subword (XEXP (tem, 0), 0, 0,
6805 VOIDmode))
6806 && rtx_equal_p (goal, goaltry)
6807 && (valtry
6808 = operand_subword (SET_DEST (pat), 0, 0,
6809 VOIDmode))
6810 && (valueno = true_regnum (valtry)) >= 0)))
6811 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6812 NULL_RTX))
6813 && REG_P (SET_DEST (pat))
6814 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6815 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6816 && CONST_INT_P (goal)
6817 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6818 VOIDmode))
6819 && rtx_equal_p (goal, goaltry)
6820 && (valtry
6821 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6822 && (valueno = true_regnum (valtry)) >= 0)))
6824 if (other >= 0)
6826 if (valueno != other)
6827 continue;
6829 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6830 continue;
6831 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6832 mode, valueno))
6833 continue;
6834 value = valtry;
6835 where = p;
6836 break;
6841 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6842 (or copying VALUE into GOAL, if GOAL is also a register).
6843 Now verify that VALUE is really valid. */
6845 /* VALUENO is the register number of VALUE; a hard register. */
6847 /* Don't try to re-use something that is killed in this insn. We want
6848 to be able to trust REG_UNUSED notes. */
6849 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6850 return 0;
6852 /* If we propose to get the value from the stack pointer or if GOAL is
6853 a MEM based on the stack pointer, we need a stable SP. */
6854 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6855 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6856 goal)))
6857 need_stable_sp = 1;
6859 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6860 if (GET_MODE (value) != mode)
6861 return 0;
6863 /* Reject VALUE if it was loaded from GOAL
6864 and is also a register that appears in the address of GOAL. */
6866 if (goal_mem && value == SET_DEST (single_set (where))
6867 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6868 goal, (rtx*) 0))
6869 return 0;
6871 /* Reject registers that overlap GOAL. */
6873 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6874 nregs = hard_regno_nregs[regno][mode];
6875 else
6876 nregs = 1;
6877 valuenregs = hard_regno_nregs[valueno][mode];
6879 if (!goal_mem && !goal_const
6880 && regno + nregs > valueno && regno < valueno + valuenregs)
6881 return 0;
6883 /* Reject VALUE if it is one of the regs reserved for reloads.
6884 Reload1 knows how to reuse them anyway, and it would get
6885 confused if we allocated one without its knowledge.
6886 (Now that insns introduced by reload are ignored above,
6887 this case shouldn't happen, but I'm not positive.) */
6889 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6891 int i;
6892 for (i = 0; i < valuenregs; ++i)
6893 if (reload_reg_p[valueno + i] >= 0)
6894 return 0;
6897 /* Reject VALUE if it is a register being used for an input reload
6898 even if it is not one of those reserved. */
6900 if (reload_reg_p != 0)
6902 int i;
6903 for (i = 0; i < n_reloads; i++)
6904 if (rld[i].reg_rtx != 0 && rld[i].in)
6906 int regno1 = REGNO (rld[i].reg_rtx);
6907 int nregs1 = hard_regno_nregs[regno1]
6908 [GET_MODE (rld[i].reg_rtx)];
6909 if (regno1 < valueno + valuenregs
6910 && regno1 + nregs1 > valueno)
6911 return 0;
6915 if (goal_mem)
6916 /* We must treat frame pointer as varying here,
6917 since it can vary--in a nonlocal goto as generated by expand_goto. */
6918 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6920 /* Now verify that the values of GOAL and VALUE remain unaltered
6921 until INSN is reached. */
6923 p = insn;
6924 while (1)
6926 p = PREV_INSN (p);
6927 if (p == where)
6928 return value;
6930 /* Don't trust the conversion past a function call
6931 if either of the two is in a call-clobbered register, or memory. */
6932 if (CALL_P (p))
6934 int i;
6936 if (goal_mem || need_stable_sp)
6937 return 0;
6939 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6940 for (i = 0; i < nregs; ++i)
6941 if (call_used_regs[regno + i]
6942 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6943 return 0;
6945 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6946 for (i = 0; i < valuenregs; ++i)
6947 if (call_used_regs[valueno + i]
6948 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6949 return 0;
6952 if (INSN_P (p))
6954 pat = PATTERN (p);
6956 /* Watch out for unspec_volatile, and volatile asms. */
6957 if (volatile_insn_p (pat))
6958 return 0;
6960 /* If this insn P stores in either GOAL or VALUE, return 0.
6961 If GOAL is a memory ref and this insn writes memory, return 0.
6962 If GOAL is a memory ref and its address is not constant,
6963 and this insn P changes a register used in GOAL, return 0. */
6965 if (GET_CODE (pat) == COND_EXEC)
6966 pat = COND_EXEC_CODE (pat);
6967 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6969 rtx dest = SET_DEST (pat);
6970 while (GET_CODE (dest) == SUBREG
6971 || GET_CODE (dest) == ZERO_EXTRACT
6972 || GET_CODE (dest) == STRICT_LOW_PART)
6973 dest = XEXP (dest, 0);
6974 if (REG_P (dest))
6976 int xregno = REGNO (dest);
6977 int xnregs;
6978 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6979 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6980 else
6981 xnregs = 1;
6982 if (xregno < regno + nregs && xregno + xnregs > regno)
6983 return 0;
6984 if (xregno < valueno + valuenregs
6985 && xregno + xnregs > valueno)
6986 return 0;
6987 if (goal_mem_addr_varies
6988 && reg_overlap_mentioned_for_reload_p (dest, goal))
6989 return 0;
6990 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6991 return 0;
6993 else if (goal_mem && MEM_P (dest)
6994 && ! push_operand (dest, GET_MODE (dest)))
6995 return 0;
6996 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6997 && reg_equiv_memory_loc (regno) != 0)
6998 return 0;
6999 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
7000 return 0;
7002 else if (GET_CODE (pat) == PARALLEL)
7004 int i;
7005 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7007 rtx v1 = XVECEXP (pat, 0, i);
7008 if (GET_CODE (v1) == COND_EXEC)
7009 v1 = COND_EXEC_CODE (v1);
7010 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7012 rtx dest = SET_DEST (v1);
7013 while (GET_CODE (dest) == SUBREG
7014 || GET_CODE (dest) == ZERO_EXTRACT
7015 || GET_CODE (dest) == STRICT_LOW_PART)
7016 dest = XEXP (dest, 0);
7017 if (REG_P (dest))
7019 int xregno = REGNO (dest);
7020 int xnregs;
7021 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7022 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7023 else
7024 xnregs = 1;
7025 if (xregno < regno + nregs
7026 && xregno + xnregs > regno)
7027 return 0;
7028 if (xregno < valueno + valuenregs
7029 && xregno + xnregs > valueno)
7030 return 0;
7031 if (goal_mem_addr_varies
7032 && reg_overlap_mentioned_for_reload_p (dest,
7033 goal))
7034 return 0;
7035 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7036 return 0;
7038 else if (goal_mem && MEM_P (dest)
7039 && ! push_operand (dest, GET_MODE (dest)))
7040 return 0;
7041 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7042 && reg_equiv_memory_loc (regno) != 0)
7043 return 0;
7044 else if (need_stable_sp
7045 && push_operand (dest, GET_MODE (dest)))
7046 return 0;
7051 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7053 rtx link;
7055 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7056 link = XEXP (link, 1))
7058 pat = XEXP (link, 0);
7059 if (GET_CODE (pat) == CLOBBER)
7061 rtx dest = SET_DEST (pat);
7063 if (REG_P (dest))
7065 int xregno = REGNO (dest);
7066 int xnregs
7067 = hard_regno_nregs[xregno][GET_MODE (dest)];
7069 if (xregno < regno + nregs
7070 && xregno + xnregs > regno)
7071 return 0;
7072 else if (xregno < valueno + valuenregs
7073 && xregno + xnregs > valueno)
7074 return 0;
7075 else if (goal_mem_addr_varies
7076 && reg_overlap_mentioned_for_reload_p (dest,
7077 goal))
7078 return 0;
7081 else if (goal_mem && MEM_P (dest)
7082 && ! push_operand (dest, GET_MODE (dest)))
7083 return 0;
7084 else if (need_stable_sp
7085 && push_operand (dest, GET_MODE (dest)))
7086 return 0;
7091 #ifdef AUTO_INC_DEC
7092 /* If this insn auto-increments or auto-decrements
7093 either regno or valueno, return 0 now.
7094 If GOAL is a memory ref and its address is not constant,
7095 and this insn P increments a register used in GOAL, return 0. */
7097 rtx link;
7099 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7100 if (REG_NOTE_KIND (link) == REG_INC
7101 && REG_P (XEXP (link, 0)))
7103 int incno = REGNO (XEXP (link, 0));
7104 if (incno < regno + nregs && incno >= regno)
7105 return 0;
7106 if (incno < valueno + valuenregs && incno >= valueno)
7107 return 0;
7108 if (goal_mem_addr_varies
7109 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7110 goal))
7111 return 0;
7114 #endif
7119 /* Find a place where INCED appears in an increment or decrement operator
7120 within X, and return the amount INCED is incremented or decremented by.
7121 The value is always positive. */
7123 static int
7124 find_inc_amount (rtx x, rtx inced)
7126 enum rtx_code code = GET_CODE (x);
7127 const char *fmt;
7128 int i;
7130 if (code == MEM)
7132 rtx addr = XEXP (x, 0);
7133 if ((GET_CODE (addr) == PRE_DEC
7134 || GET_CODE (addr) == POST_DEC
7135 || GET_CODE (addr) == PRE_INC
7136 || GET_CODE (addr) == POST_INC)
7137 && XEXP (addr, 0) == inced)
7138 return GET_MODE_SIZE (GET_MODE (x));
7139 else if ((GET_CODE (addr) == PRE_MODIFY
7140 || GET_CODE (addr) == POST_MODIFY)
7141 && GET_CODE (XEXP (addr, 1)) == PLUS
7142 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7143 && XEXP (addr, 0) == inced
7144 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7146 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7147 return i < 0 ? -i : i;
7151 fmt = GET_RTX_FORMAT (code);
7152 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7154 if (fmt[i] == 'e')
7156 int tem = find_inc_amount (XEXP (x, i), inced);
7157 if (tem != 0)
7158 return tem;
7160 if (fmt[i] == 'E')
7162 int j;
7163 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7165 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7166 if (tem != 0)
7167 return tem;
7172 return 0;
7175 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7176 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7178 #ifdef AUTO_INC_DEC
7179 static int
7180 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7181 rtx insn)
7183 rtx link;
7185 gcc_assert (insn);
7187 if (! INSN_P (insn))
7188 return 0;
7190 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7191 if (REG_NOTE_KIND (link) == REG_INC)
7193 unsigned int test = (int) REGNO (XEXP (link, 0));
7194 if (test >= regno && test < endregno)
7195 return 1;
7197 return 0;
7199 #else
7201 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7203 #endif
7205 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7206 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7207 REG_INC. REGNO must refer to a hard register. */
7210 regno_clobbered_p (unsigned int regno, rtx insn, enum machine_mode mode,
7211 int sets)
7213 unsigned int nregs, endregno;
7215 /* regno must be a hard register. */
7216 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7218 nregs = hard_regno_nregs[regno][mode];
7219 endregno = regno + nregs;
7221 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7222 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7223 && REG_P (XEXP (PATTERN (insn), 0)))
7225 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7227 return test >= regno && test < endregno;
7230 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7231 return 1;
7233 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7235 int i = XVECLEN (PATTERN (insn), 0) - 1;
7237 for (; i >= 0; i--)
7239 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7240 if ((GET_CODE (elt) == CLOBBER
7241 || (sets == 1 && GET_CODE (elt) == SET))
7242 && REG_P (XEXP (elt, 0)))
7244 unsigned int test = REGNO (XEXP (elt, 0));
7246 if (test >= regno && test < endregno)
7247 return 1;
7249 if (sets == 2
7250 && reg_inc_found_and_valid_p (regno, endregno, elt))
7251 return 1;
7255 return 0;
7258 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7260 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
7262 int regno;
7264 if (GET_MODE (reloadreg) == mode)
7265 return reloadreg;
7267 regno = REGNO (reloadreg);
7269 if (REG_WORDS_BIG_ENDIAN)
7270 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7271 - (int) hard_regno_nregs[regno][mode];
7273 return gen_rtx_REG (mode, regno);
7276 static const char *const reload_when_needed_name[] =
7278 "RELOAD_FOR_INPUT",
7279 "RELOAD_FOR_OUTPUT",
7280 "RELOAD_FOR_INSN",
7281 "RELOAD_FOR_INPUT_ADDRESS",
7282 "RELOAD_FOR_INPADDR_ADDRESS",
7283 "RELOAD_FOR_OUTPUT_ADDRESS",
7284 "RELOAD_FOR_OUTADDR_ADDRESS",
7285 "RELOAD_FOR_OPERAND_ADDRESS",
7286 "RELOAD_FOR_OPADDR_ADDR",
7287 "RELOAD_OTHER",
7288 "RELOAD_FOR_OTHER_ADDRESS"
7291 /* These functions are used to print the variables set by 'find_reloads' */
7293 DEBUG_FUNCTION void
7294 debug_reload_to_stream (FILE *f)
7296 int r;
7297 const char *prefix;
7299 if (! f)
7300 f = stderr;
7301 for (r = 0; r < n_reloads; r++)
7303 fprintf (f, "Reload %d: ", r);
7305 if (rld[r].in != 0)
7307 fprintf (f, "reload_in (%s) = ",
7308 GET_MODE_NAME (rld[r].inmode));
7309 print_inline_rtx (f, rld[r].in, 24);
7310 fprintf (f, "\n\t");
7313 if (rld[r].out != 0)
7315 fprintf (f, "reload_out (%s) = ",
7316 GET_MODE_NAME (rld[r].outmode));
7317 print_inline_rtx (f, rld[r].out, 24);
7318 fprintf (f, "\n\t");
7321 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7323 fprintf (f, "%s (opnum = %d)",
7324 reload_when_needed_name[(int) rld[r].when_needed],
7325 rld[r].opnum);
7327 if (rld[r].optional)
7328 fprintf (f, ", optional");
7330 if (rld[r].nongroup)
7331 fprintf (f, ", nongroup");
7333 if (rld[r].inc != 0)
7334 fprintf (f, ", inc by %d", rld[r].inc);
7336 if (rld[r].nocombine)
7337 fprintf (f, ", can't combine");
7339 if (rld[r].secondary_p)
7340 fprintf (f, ", secondary_reload_p");
7342 if (rld[r].in_reg != 0)
7344 fprintf (f, "\n\treload_in_reg: ");
7345 print_inline_rtx (f, rld[r].in_reg, 24);
7348 if (rld[r].out_reg != 0)
7350 fprintf (f, "\n\treload_out_reg: ");
7351 print_inline_rtx (f, rld[r].out_reg, 24);
7354 if (rld[r].reg_rtx != 0)
7356 fprintf (f, "\n\treload_reg_rtx: ");
7357 print_inline_rtx (f, rld[r].reg_rtx, 24);
7360 prefix = "\n\t";
7361 if (rld[r].secondary_in_reload != -1)
7363 fprintf (f, "%ssecondary_in_reload = %d",
7364 prefix, rld[r].secondary_in_reload);
7365 prefix = ", ";
7368 if (rld[r].secondary_out_reload != -1)
7369 fprintf (f, "%ssecondary_out_reload = %d\n",
7370 prefix, rld[r].secondary_out_reload);
7372 prefix = "\n\t";
7373 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7375 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7376 insn_data[rld[r].secondary_in_icode].name);
7377 prefix = ", ";
7380 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7381 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7382 insn_data[rld[r].secondary_out_icode].name);
7384 fprintf (f, "\n");
7388 DEBUG_FUNCTION void
7389 debug_reload (void)
7391 debug_reload_to_stream (stderr);