2016-10-04 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / reload.c
blobcd0d661e2246b75f030b187bd1284e7b54466657
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains subroutines used only from the file reload1.c.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
56 NOTE SIDE EFFECTS:
58 find_reloads can alter the operands of the instruction it is called on.
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
63 better that way.
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
72 Using a reload register for several reloads in one insn:
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload. */
86 #define REG_OK_STRICT
88 /* We do not enable this with CHECKING_P, since it is awfully slow. */
89 #undef DEBUG_RELOAD
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "backend.h"
95 #include "target.h"
96 #include "rtl.h"
97 #include "tree.h"
98 #include "df.h"
99 #include "tm_p.h"
100 #include "optabs.h"
101 #include "regs.h"
102 #include "ira.h"
103 #include "recog.h"
104 #include "rtl-error.h"
105 #include "reload.h"
106 #include "addresses.h"
107 #include "params.h"
109 /* True if X is a constant that can be forced into the constant pool.
110 MODE is the mode of the operand, or VOIDmode if not known. */
111 #define CONST_POOL_OK_P(MODE, X) \
112 ((MODE) != VOIDmode \
113 && CONSTANT_P (X) \
114 && GET_CODE (X) != HIGH \
115 && !targetm.cannot_force_const_mem (MODE, X))
117 /* True if C is a non-empty register class that has too few registers
118 to be safely used as a reload target class. */
120 static inline bool
121 small_register_class_p (reg_class_t rclass)
123 return (reg_class_size [(int) rclass] == 1
124 || (reg_class_size [(int) rclass] >= 1
125 && targetm.class_likely_spilled_p (rclass)));
129 /* All reloads of the current insn are recorded here. See reload.h for
130 comments. */
131 int n_reloads;
132 struct reload rld[MAX_RELOADS];
134 /* All the "earlyclobber" operands of the current insn
135 are recorded here. */
136 int n_earlyclobbers;
137 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
139 int reload_n_operands;
141 /* Replacing reloads.
143 If `replace_reloads' is nonzero, then as each reload is recorded
144 an entry is made for it in the table `replacements'.
145 Then later `subst_reloads' can look through that table and
146 perform all the replacements needed. */
148 /* Nonzero means record the places to replace. */
149 static int replace_reloads;
151 /* Each replacement is recorded with a structure like this. */
152 struct replacement
154 rtx *where; /* Location to store in */
155 int what; /* which reload this is for */
156 machine_mode mode; /* mode it must have */
159 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
161 /* Number of replacements currently recorded. */
162 static int n_replacements;
164 /* Used to track what is modified by an operand. */
165 struct decomposition
167 int reg_flag; /* Nonzero if referencing a register. */
168 int safe; /* Nonzero if this can't conflict with anything. */
169 rtx base; /* Base address for MEM. */
170 HOST_WIDE_INT start; /* Starting offset or register number. */
171 HOST_WIDE_INT end; /* Ending offset or register number. */
174 #ifdef SECONDARY_MEMORY_NEEDED
176 /* Save MEMs needed to copy from one class of registers to another. One MEM
177 is used per mode, but normally only one or two modes are ever used.
179 We keep two versions, before and after register elimination. The one
180 after register elimination is record separately for each operand. This
181 is done in case the address is not valid to be sure that we separately
182 reload each. */
184 static rtx secondary_memlocs[NUM_MACHINE_MODES];
185 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
186 static int secondary_memlocs_elim_used = 0;
187 #endif
189 /* The instruction we are doing reloads for;
190 so we can test whether a register dies in it. */
191 static rtx_insn *this_insn;
193 /* Nonzero if this instruction is a user-specified asm with operands. */
194 static int this_insn_is_asm;
196 /* If hard_regs_live_known is nonzero,
197 we can tell which hard regs are currently live,
198 at least enough to succeed in choosing dummy reloads. */
199 static int hard_regs_live_known;
201 /* Indexed by hard reg number,
202 element is nonnegative if hard reg has been spilled.
203 This vector is passed to `find_reloads' as an argument
204 and is not changed here. */
205 static short *static_reload_reg_p;
207 /* Set to 1 in subst_reg_equivs if it changes anything. */
208 static int subst_reg_equivs_changed;
210 /* On return from push_reload, holds the reload-number for the OUT
211 operand, which can be different for that from the input operand. */
212 static int output_reloadnum;
214 /* Compare two RTX's. */
215 #define MATCHES(x, y) \
216 (x == y || (x != 0 && (REG_P (x) \
217 ? REG_P (y) && REGNO (x) == REGNO (y) \
218 : rtx_equal_p (x, y) && ! side_effects_p (x))))
220 /* Indicates if two reloads purposes are for similar enough things that we
221 can merge their reloads. */
222 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
223 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
224 || ((when1) == (when2) && (op1) == (op2)) \
225 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
226 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
227 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
228 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
229 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
231 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
232 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
233 ((when1) != (when2) \
234 || ! ((op1) == (op2) \
235 || (when1) == RELOAD_FOR_INPUT \
236 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
237 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
239 /* If we are going to reload an address, compute the reload type to
240 use. */
241 #define ADDR_TYPE(type) \
242 ((type) == RELOAD_FOR_INPUT_ADDRESS \
243 ? RELOAD_FOR_INPADDR_ADDRESS \
244 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
245 ? RELOAD_FOR_OUTADDR_ADDRESS \
246 : (type)))
248 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
249 machine_mode, enum reload_type,
250 enum insn_code *, secondary_reload_info *);
251 static enum reg_class find_valid_class (machine_mode, machine_mode,
252 int, unsigned int);
253 static void push_replacement (rtx *, int, machine_mode);
254 static void dup_replacements (rtx *, rtx *);
255 static void combine_reloads (void);
256 static int find_reusable_reload (rtx *, rtx, enum reg_class,
257 enum reload_type, int, int);
258 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode,
259 machine_mode, reg_class_t, int, int);
260 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
261 static struct decomposition decompose (rtx);
262 static int immune_p (rtx, rtx, struct decomposition);
263 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
264 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
265 rtx_insn *, int *);
266 static rtx make_memloc (rtx, int);
267 static int maybe_memory_address_addr_space_p (machine_mode, rtx,
268 addr_space_t, rtx *);
269 static int find_reloads_address (machine_mode, rtx *, rtx, rtx *,
270 int, enum reload_type, int, rtx_insn *);
271 static rtx subst_reg_equivs (rtx, rtx_insn *);
272 static rtx subst_indexed_address (rtx);
273 static void update_auto_inc_notes (rtx_insn *, int, int);
274 static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int,
275 enum rtx_code, enum rtx_code, rtx *,
276 int, enum reload_type,int, rtx_insn *);
277 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
278 machine_mode, int,
279 enum reload_type, int);
280 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
281 int, rtx_insn *, int *);
282 static void copy_replacements_1 (rtx *, rtx *, int);
283 static int find_inc_amount (rtx, rtx);
284 static int refers_to_mem_for_reload_p (rtx);
285 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
286 rtx, rtx *);
288 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
289 list yet. */
291 static void
292 push_reg_equiv_alt_mem (int regno, rtx mem)
294 rtx it;
296 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
297 if (rtx_equal_p (XEXP (it, 0), mem))
298 return;
300 reg_equiv_alt_mem_list (regno)
301 = alloc_EXPR_LIST (REG_EQUIV, mem,
302 reg_equiv_alt_mem_list (regno));
305 /* Determine if any secondary reloads are needed for loading (if IN_P is
306 nonzero) or storing (if IN_P is zero) X to or from a reload register of
307 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
308 are needed, push them.
310 Return the reload number of the secondary reload we made, or -1 if
311 we didn't need one. *PICODE is set to the insn_code to use if we do
312 need a secondary reload. */
314 static int
315 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
316 enum reg_class reload_class,
317 machine_mode reload_mode, enum reload_type type,
318 enum insn_code *picode, secondary_reload_info *prev_sri)
320 enum reg_class rclass = NO_REGS;
321 enum reg_class scratch_class;
322 machine_mode mode = reload_mode;
323 enum insn_code icode = CODE_FOR_nothing;
324 enum insn_code t_icode = CODE_FOR_nothing;
325 enum reload_type secondary_type;
326 int s_reload, t_reload = -1;
327 const char *scratch_constraint;
328 secondary_reload_info sri;
330 if (type == RELOAD_FOR_INPUT_ADDRESS
331 || type == RELOAD_FOR_OUTPUT_ADDRESS
332 || type == RELOAD_FOR_INPADDR_ADDRESS
333 || type == RELOAD_FOR_OUTADDR_ADDRESS)
334 secondary_type = type;
335 else
336 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
338 *picode = CODE_FOR_nothing;
340 /* If X is a paradoxical SUBREG, use the inner value to determine both the
341 mode and object being reloaded. */
342 if (paradoxical_subreg_p (x))
344 x = SUBREG_REG (x);
345 reload_mode = GET_MODE (x);
348 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
349 is still a pseudo-register by now, it *must* have an equivalent MEM
350 but we don't want to assume that), use that equivalent when seeing if
351 a secondary reload is needed since whether or not a reload is needed
352 might be sensitive to the form of the MEM. */
354 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
355 && reg_equiv_mem (REGNO (x)))
356 x = reg_equiv_mem (REGNO (x));
358 sri.icode = CODE_FOR_nothing;
359 sri.prev_sri = prev_sri;
360 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
361 reload_mode, &sri);
362 icode = (enum insn_code) sri.icode;
364 /* If we don't need any secondary registers, done. */
365 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
366 return -1;
368 if (rclass != NO_REGS)
369 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
370 reload_mode, type, &t_icode, &sri);
372 /* If we will be using an insn, the secondary reload is for a
373 scratch register. */
375 if (icode != CODE_FOR_nothing)
377 /* If IN_P is nonzero, the reload register will be the output in
378 operand 0. If IN_P is zero, the reload register will be the input
379 in operand 1. Outputs should have an initial "=", which we must
380 skip. */
382 /* ??? It would be useful to be able to handle only two, or more than
383 three, operands, but for now we can only handle the case of having
384 exactly three: output, input and one temp/scratch. */
385 gcc_assert (insn_data[(int) icode].n_operands == 3);
387 /* ??? We currently have no way to represent a reload that needs
388 an icode to reload from an intermediate tertiary reload register.
389 We should probably have a new field in struct reload to tag a
390 chain of scratch operand reloads onto. */
391 gcc_assert (rclass == NO_REGS);
393 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
394 gcc_assert (*scratch_constraint == '=');
395 scratch_constraint++;
396 if (*scratch_constraint == '&')
397 scratch_constraint++;
398 scratch_class = (reg_class_for_constraint
399 (lookup_constraint (scratch_constraint)));
401 rclass = scratch_class;
402 mode = insn_data[(int) icode].operand[2].mode;
405 /* This case isn't valid, so fail. Reload is allowed to use the same
406 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
407 in the case of a secondary register, we actually need two different
408 registers for correct code. We fail here to prevent the possibility of
409 silently generating incorrect code later.
411 The convention is that secondary input reloads are valid only if the
412 secondary_class is different from class. If you have such a case, you
413 can not use secondary reloads, you must work around the problem some
414 other way.
416 Allow this when a reload_in/out pattern is being used. I.e. assume
417 that the generated code handles this case. */
419 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
420 || t_icode != CODE_FOR_nothing);
422 /* See if we can reuse an existing secondary reload. */
423 for (s_reload = 0; s_reload < n_reloads; s_reload++)
424 if (rld[s_reload].secondary_p
425 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
426 || reg_class_subset_p (rld[s_reload].rclass, rclass))
427 && ((in_p && rld[s_reload].inmode == mode)
428 || (! in_p && rld[s_reload].outmode == mode))
429 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
430 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
431 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
432 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
433 && (small_register_class_p (rclass)
434 || targetm.small_register_classes_for_mode_p (VOIDmode))
435 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
436 opnum, rld[s_reload].opnum))
438 if (in_p)
439 rld[s_reload].inmode = mode;
440 if (! in_p)
441 rld[s_reload].outmode = mode;
443 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
444 rld[s_reload].rclass = rclass;
446 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
447 rld[s_reload].optional &= optional;
448 rld[s_reload].secondary_p = 1;
449 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
450 opnum, rld[s_reload].opnum))
451 rld[s_reload].when_needed = RELOAD_OTHER;
453 break;
456 if (s_reload == n_reloads)
458 #ifdef SECONDARY_MEMORY_NEEDED
459 /* If we need a memory location to copy between the two reload regs,
460 set it up now. Note that we do the input case before making
461 the reload and the output case after. This is due to the
462 way reloads are output. */
464 if (in_p && icode == CODE_FOR_nothing
465 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
467 get_secondary_mem (x, reload_mode, opnum, type);
469 /* We may have just added new reloads. Make sure we add
470 the new reload at the end. */
471 s_reload = n_reloads;
473 #endif
475 /* We need to make a new secondary reload for this register class. */
476 rld[s_reload].in = rld[s_reload].out = 0;
477 rld[s_reload].rclass = rclass;
479 rld[s_reload].inmode = in_p ? mode : VOIDmode;
480 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
481 rld[s_reload].reg_rtx = 0;
482 rld[s_reload].optional = optional;
483 rld[s_reload].inc = 0;
484 /* Maybe we could combine these, but it seems too tricky. */
485 rld[s_reload].nocombine = 1;
486 rld[s_reload].in_reg = 0;
487 rld[s_reload].out_reg = 0;
488 rld[s_reload].opnum = opnum;
489 rld[s_reload].when_needed = secondary_type;
490 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
491 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
492 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
493 rld[s_reload].secondary_out_icode
494 = ! in_p ? t_icode : CODE_FOR_nothing;
495 rld[s_reload].secondary_p = 1;
497 n_reloads++;
499 #ifdef SECONDARY_MEMORY_NEEDED
500 if (! in_p && icode == CODE_FOR_nothing
501 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
502 get_secondary_mem (x, mode, opnum, type);
503 #endif
506 *picode = icode;
507 return s_reload;
510 /* If a secondary reload is needed, return its class. If both an intermediate
511 register and a scratch register is needed, we return the class of the
512 intermediate register. */
513 reg_class_t
514 secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode,
515 rtx x)
517 enum insn_code icode;
518 secondary_reload_info sri;
520 sri.icode = CODE_FOR_nothing;
521 sri.prev_sri = NULL;
522 rclass
523 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
524 icode = (enum insn_code) sri.icode;
526 /* If there are no secondary reloads at all, we return NO_REGS.
527 If an intermediate register is needed, we return its class. */
528 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
529 return rclass;
531 /* No intermediate register is needed, but we have a special reload
532 pattern, which we assume for now needs a scratch register. */
533 return scratch_reload_class (icode);
536 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
537 three operands, verify that operand 2 is an output operand, and return
538 its register class.
539 ??? We'd like to be able to handle any pattern with at least 2 operands,
540 for zero or more scratch registers, but that needs more infrastructure. */
541 enum reg_class
542 scratch_reload_class (enum insn_code icode)
544 const char *scratch_constraint;
545 enum reg_class rclass;
547 gcc_assert (insn_data[(int) icode].n_operands == 3);
548 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
549 gcc_assert (*scratch_constraint == '=');
550 scratch_constraint++;
551 if (*scratch_constraint == '&')
552 scratch_constraint++;
553 rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
554 gcc_assert (rclass != NO_REGS);
555 return rclass;
558 #ifdef SECONDARY_MEMORY_NEEDED
560 /* Return a memory location that will be used to copy X in mode MODE.
561 If we haven't already made a location for this mode in this insn,
562 call find_reloads_address on the location being returned. */
565 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode,
566 int opnum, enum reload_type type)
568 rtx loc;
569 int mem_valid;
571 /* By default, if MODE is narrower than a word, widen it to a word.
572 This is required because most machines that require these memory
573 locations do not support short load and stores from all registers
574 (e.g., FP registers). */
576 #ifdef SECONDARY_MEMORY_NEEDED_MODE
577 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
578 #else
579 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
580 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
581 #endif
583 /* If we already have made a MEM for this operand in MODE, return it. */
584 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
585 return secondary_memlocs_elim[(int) mode][opnum];
587 /* If this is the first time we've tried to get a MEM for this mode,
588 allocate a new one. `something_changed' in reload will get set
589 by noticing that the frame size has changed. */
591 if (secondary_memlocs[(int) mode] == 0)
593 #ifdef SECONDARY_MEMORY_NEEDED_RTX
594 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
595 #else
596 secondary_memlocs[(int) mode]
597 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
598 #endif
601 /* Get a version of the address doing any eliminations needed. If that
602 didn't give us a new MEM, make a new one if it isn't valid. */
604 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
605 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
606 MEM_ADDR_SPACE (loc));
608 if (! mem_valid && loc == secondary_memlocs[(int) mode])
609 loc = copy_rtx (loc);
611 /* The only time the call below will do anything is if the stack
612 offset is too large. In that case IND_LEVELS doesn't matter, so we
613 can just pass a zero. Adjust the type to be the address of the
614 corresponding object. If the address was valid, save the eliminated
615 address. If it wasn't valid, we need to make a reload each time, so
616 don't save it. */
618 if (! mem_valid)
620 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
621 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
622 : RELOAD_OTHER);
624 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
625 opnum, type, 0, 0);
628 secondary_memlocs_elim[(int) mode][opnum] = loc;
629 if (secondary_memlocs_elim_used <= (int)mode)
630 secondary_memlocs_elim_used = (int)mode + 1;
631 return loc;
634 /* Clear any secondary memory locations we've made. */
636 void
637 clear_secondary_mem (void)
639 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
641 #endif /* SECONDARY_MEMORY_NEEDED */
644 /* Find the largest class which has at least one register valid in
645 mode INNER, and which for every such register, that register number
646 plus N is also valid in OUTER (if in range) and is cheap to move
647 into REGNO. Such a class must exist. */
649 static enum reg_class
650 find_valid_class (machine_mode outer ATTRIBUTE_UNUSED,
651 machine_mode inner ATTRIBUTE_UNUSED, int n,
652 unsigned int dest_regno ATTRIBUTE_UNUSED)
654 int best_cost = -1;
655 int rclass;
656 int regno;
657 enum reg_class best_class = NO_REGS;
658 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
659 unsigned int best_size = 0;
660 int cost;
662 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
664 int bad = 0;
665 int good = 0;
666 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
667 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
669 if (HARD_REGNO_MODE_OK (regno, inner))
671 good = 1;
672 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
673 && ! HARD_REGNO_MODE_OK (regno + n, outer))
674 bad = 1;
678 if (bad || !good)
679 continue;
680 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
682 if ((reg_class_size[rclass] > best_size
683 && (best_cost < 0 || best_cost >= cost))
684 || best_cost > cost)
686 best_class = (enum reg_class) rclass;
687 best_size = reg_class_size[rclass];
688 best_cost = register_move_cost (outer, (enum reg_class) rclass,
689 dest_class);
693 gcc_assert (best_size != 0);
695 return best_class;
698 /* We are trying to reload a subreg of something that is not a register.
699 Find the largest class which contains only registers valid in
700 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
701 which we would eventually like to obtain the object. */
703 static enum reg_class
704 find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED,
705 machine_mode mode ATTRIBUTE_UNUSED,
706 enum reg_class dest_class ATTRIBUTE_UNUSED)
708 int best_cost = -1;
709 int rclass;
710 int regno;
711 enum reg_class best_class = NO_REGS;
712 unsigned int best_size = 0;
713 int cost;
715 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
717 int bad = 0;
718 for (regno = 0; regno < FIRST_PSEUDO_REGISTER && !bad; regno++)
720 if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
721 && !HARD_REGNO_MODE_OK (regno, mode))
722 bad = 1;
725 if (bad)
726 continue;
728 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
730 if ((reg_class_size[rclass] > best_size
731 && (best_cost < 0 || best_cost >= cost))
732 || best_cost > cost)
734 best_class = (enum reg_class) rclass;
735 best_size = reg_class_size[rclass];
736 best_cost = register_move_cost (outer, (enum reg_class) rclass,
737 dest_class);
741 gcc_assert (best_size != 0);
743 #ifdef LIMIT_RELOAD_CLASS
744 best_class = LIMIT_RELOAD_CLASS (mode, best_class);
745 #endif
746 return best_class;
749 /* Return the number of a previously made reload that can be combined with
750 a new one, or n_reloads if none of the existing reloads can be used.
751 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
752 push_reload, they determine the kind of the new reload that we try to
753 combine. P_IN points to the corresponding value of IN, which can be
754 modified by this function.
755 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
757 static int
758 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
759 enum reload_type type, int opnum, int dont_share)
761 rtx in = *p_in;
762 int i;
763 /* We can't merge two reloads if the output of either one is
764 earlyclobbered. */
766 if (earlyclobber_operand_p (out))
767 return n_reloads;
769 /* We can use an existing reload if the class is right
770 and at least one of IN and OUT is a match
771 and the other is at worst neutral.
772 (A zero compared against anything is neutral.)
774 For targets with small register classes, don't use existing reloads
775 unless they are for the same thing since that can cause us to need
776 more reload registers than we otherwise would. */
778 for (i = 0; i < n_reloads; i++)
779 if ((reg_class_subset_p (rclass, rld[i].rclass)
780 || reg_class_subset_p (rld[i].rclass, rclass))
781 /* If the existing reload has a register, it must fit our class. */
782 && (rld[i].reg_rtx == 0
783 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
784 true_regnum (rld[i].reg_rtx)))
785 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
786 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
787 || (out != 0 && MATCHES (rld[i].out, out)
788 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
789 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
790 && (small_register_class_p (rclass)
791 || targetm.small_register_classes_for_mode_p (VOIDmode))
792 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
793 return i;
795 /* Reloading a plain reg for input can match a reload to postincrement
796 that reg, since the postincrement's value is the right value.
797 Likewise, it can match a preincrement reload, since we regard
798 the preincrementation as happening before any ref in this insn
799 to that register. */
800 for (i = 0; i < n_reloads; i++)
801 if ((reg_class_subset_p (rclass, rld[i].rclass)
802 || reg_class_subset_p (rld[i].rclass, rclass))
803 /* If the existing reload has a register, it must fit our
804 class. */
805 && (rld[i].reg_rtx == 0
806 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
807 true_regnum (rld[i].reg_rtx)))
808 && out == 0 && rld[i].out == 0 && rld[i].in != 0
809 && ((REG_P (in)
810 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
811 && MATCHES (XEXP (rld[i].in, 0), in))
812 || (REG_P (rld[i].in)
813 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
814 && MATCHES (XEXP (in, 0), rld[i].in)))
815 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
816 && (small_register_class_p (rclass)
817 || targetm.small_register_classes_for_mode_p (VOIDmode))
818 && MERGABLE_RELOADS (type, rld[i].when_needed,
819 opnum, rld[i].opnum))
821 /* Make sure reload_in ultimately has the increment,
822 not the plain register. */
823 if (REG_P (in))
824 *p_in = rld[i].in;
825 return i;
827 return n_reloads;
830 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
831 expression. MODE is the mode that X will be used in. OUTPUT is true if
832 the function is invoked for the output part of an enclosing reload. */
834 static bool
835 reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output)
837 rtx inner;
839 /* Only SUBREGs are problematical. */
840 if (GET_CODE (x) != SUBREG)
841 return false;
843 inner = SUBREG_REG (x);
845 /* If INNER is a constant or PLUS, then INNER will need reloading. */
846 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
847 return true;
849 /* If INNER is not a hard register, then INNER will not need reloading. */
850 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
851 return false;
853 /* If INNER is not ok for MODE, then INNER will need reloading. */
854 if (!HARD_REGNO_MODE_OK (subreg_regno (x), mode))
855 return true;
857 /* If this is for an output, and the outer part is a word or smaller,
858 INNER is larger than a word and the number of registers in INNER is
859 not the same as the number of words in INNER, then INNER will need
860 reloading (with an in-out reload). */
861 return (output
862 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
863 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
864 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
865 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
868 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
869 requiring an extra reload register. The caller has already found that
870 IN contains some reference to REGNO, so check that we can produce the
871 new value in a single step. E.g. if we have
872 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
873 instruction that adds one to a register, this should succeed.
874 However, if we have something like
875 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
876 needs to be loaded into a register first, we need a separate reload
877 register.
878 Such PLUS reloads are generated by find_reload_address_part.
879 The out-of-range PLUS expressions are usually introduced in the instruction
880 patterns by register elimination and substituting pseudos without a home
881 by their function-invariant equivalences. */
882 static int
883 can_reload_into (rtx in, int regno, machine_mode mode)
885 rtx dst;
886 rtx_insn *test_insn;
887 int r = 0;
888 struct recog_data_d save_recog_data;
890 /* For matching constraints, we often get notional input reloads where
891 we want to use the original register as the reload register. I.e.
892 technically this is a non-optional input-output reload, but IN is
893 already a valid register, and has been chosen as the reload register.
894 Speed this up, since it trivially works. */
895 if (REG_P (in))
896 return 1;
898 /* To test MEMs properly, we'd have to take into account all the reloads
899 that are already scheduled, which can become quite complicated.
900 And since we've already handled address reloads for this MEM, it
901 should always succeed anyway. */
902 if (MEM_P (in))
903 return 1;
905 /* If we can make a simple SET insn that does the job, everything should
906 be fine. */
907 dst = gen_rtx_REG (mode, regno);
908 test_insn = make_insn_raw (gen_rtx_SET (dst, in));
909 save_recog_data = recog_data;
910 if (recog_memoized (test_insn) >= 0)
912 extract_insn (test_insn);
913 r = constrain_operands (1, get_enabled_alternatives (test_insn));
915 recog_data = save_recog_data;
916 return r;
919 /* Record one reload that needs to be performed.
920 IN is an rtx saying where the data are to be found before this instruction.
921 OUT says where they must be stored after the instruction.
922 (IN is zero for data not read, and OUT is zero for data not written.)
923 INLOC and OUTLOC point to the places in the instructions where
924 IN and OUT were found.
925 If IN and OUT are both nonzero, it means the same register must be used
926 to reload both IN and OUT.
928 RCLASS is a register class required for the reloaded data.
929 INMODE is the machine mode that the instruction requires
930 for the reg that replaces IN and OUTMODE is likewise for OUT.
932 If IN is zero, then OUT's location and mode should be passed as
933 INLOC and INMODE.
935 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
937 OPTIONAL nonzero means this reload does not need to be performed:
938 it can be discarded if that is more convenient.
940 OPNUM and TYPE say what the purpose of this reload is.
942 The return value is the reload-number for this reload.
944 If both IN and OUT are nonzero, in some rare cases we might
945 want to make two separate reloads. (Actually we never do this now.)
946 Therefore, the reload-number for OUT is stored in
947 output_reloadnum when we return; the return value applies to IN.
948 Usually (presently always), when IN and OUT are nonzero,
949 the two reload-numbers are equal, but the caller should be careful to
950 distinguish them. */
953 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
954 enum reg_class rclass, machine_mode inmode,
955 machine_mode outmode, int strict_low, int optional,
956 int opnum, enum reload_type type)
958 int i;
959 int dont_share = 0;
960 int dont_remove_subreg = 0;
961 #ifdef LIMIT_RELOAD_CLASS
962 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
963 #endif
964 int secondary_in_reload = -1, secondary_out_reload = -1;
965 enum insn_code secondary_in_icode = CODE_FOR_nothing;
966 enum insn_code secondary_out_icode = CODE_FOR_nothing;
967 enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
968 subreg_in_class = NO_REGS;
970 /* INMODE and/or OUTMODE could be VOIDmode if no mode
971 has been specified for the operand. In that case,
972 use the operand's mode as the mode to reload. */
973 if (inmode == VOIDmode && in != 0)
974 inmode = GET_MODE (in);
975 if (outmode == VOIDmode && out != 0)
976 outmode = GET_MODE (out);
978 /* If find_reloads and friends until now missed to replace a pseudo
979 with a constant of reg_equiv_constant something went wrong
980 beforehand.
981 Note that it can't simply be done here if we missed it earlier
982 since the constant might need to be pushed into the literal pool
983 and the resulting memref would probably need further
984 reloading. */
985 if (in != 0 && REG_P (in))
987 int regno = REGNO (in);
989 gcc_assert (regno < FIRST_PSEUDO_REGISTER
990 || reg_renumber[regno] >= 0
991 || reg_equiv_constant (regno) == NULL_RTX);
994 /* reg_equiv_constant only contains constants which are obviously
995 not appropriate as destination. So if we would need to replace
996 the destination pseudo with a constant we are in real
997 trouble. */
998 if (out != 0 && REG_P (out))
1000 int regno = REGNO (out);
1002 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1003 || reg_renumber[regno] >= 0
1004 || reg_equiv_constant (regno) == NULL_RTX);
1007 /* If we have a read-write operand with an address side-effect,
1008 change either IN or OUT so the side-effect happens only once. */
1009 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1010 switch (GET_CODE (XEXP (in, 0)))
1012 case POST_INC: case POST_DEC: case POST_MODIFY:
1013 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1014 break;
1016 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1017 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1018 break;
1020 default:
1021 break;
1024 /* If we are reloading a (SUBREG constant ...), really reload just the
1025 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1026 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1027 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1028 register is a pseudo, also reload the inside expression.
1029 For machines that extend byte loads, do this for any SUBREG of a pseudo
1030 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1031 M2 is an integral mode that gets extended when loaded.
1032 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1033 where either M1 is not valid for R or M2 is wider than a word but we
1034 only need one register to store an M2-sized quantity in R.
1035 (However, if OUT is nonzero, we need to reload the reg *and*
1036 the subreg, so do nothing here, and let following statement handle it.)
1038 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1039 we can't handle it here because CONST_INT does not indicate a mode.
1041 Similarly, we must reload the inside expression if we have a
1042 STRICT_LOW_PART (presumably, in == out in this case).
1044 Also reload the inner expression if it does not require a secondary
1045 reload but the SUBREG does.
1047 Finally, reload the inner expression if it is a register that is in
1048 the class whose registers cannot be referenced in a different size
1049 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1050 cannot reload just the inside since we might end up with the wrong
1051 register class. But if it is inside a STRICT_LOW_PART, we have
1052 no choice, so we hope we do get the right register class there. */
1054 if (in != 0 && GET_CODE (in) == SUBREG
1055 && (subreg_lowpart_p (in) || strict_low)
1056 #ifdef CANNOT_CHANGE_MODE_CLASS
1057 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1058 #endif
1059 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (in))]
1060 && (CONSTANT_P (SUBREG_REG (in))
1061 || GET_CODE (SUBREG_REG (in)) == PLUS
1062 || strict_low
1063 || (((REG_P (SUBREG_REG (in))
1064 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1065 || MEM_P (SUBREG_REG (in)))
1066 && ((GET_MODE_PRECISION (inmode)
1067 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1068 #ifdef LOAD_EXTEND_OP
1069 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1070 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1071 <= UNITS_PER_WORD)
1072 && (GET_MODE_PRECISION (inmode)
1073 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1074 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1075 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1076 #endif
1077 #if WORD_REGISTER_OPERATIONS
1078 || ((GET_MODE_PRECISION (inmode)
1079 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1080 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1081 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1082 / UNITS_PER_WORD)))
1083 #endif
1085 || (REG_P (SUBREG_REG (in))
1086 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1087 /* The case where out is nonzero
1088 is handled differently in the following statement. */
1089 && (out == 0 || subreg_lowpart_p (in))
1090 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1091 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1092 > UNITS_PER_WORD)
1093 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1094 / UNITS_PER_WORD)
1095 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1096 [GET_MODE (SUBREG_REG (in))]))
1097 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1098 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1099 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1100 SUBREG_REG (in))
1101 == NO_REGS))
1102 #ifdef CANNOT_CHANGE_MODE_CLASS
1103 || (REG_P (SUBREG_REG (in))
1104 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1105 && REG_CANNOT_CHANGE_MODE_P
1106 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1107 #endif
1110 #ifdef LIMIT_RELOAD_CLASS
1111 in_subreg_loc = inloc;
1112 #endif
1113 inloc = &SUBREG_REG (in);
1114 in = *inloc;
1115 #if ! defined (LOAD_EXTEND_OP)
1116 if (!WORD_REGISTER_OPERATIONS
1117 && MEM_P (in))
1118 /* This is supposed to happen only for paradoxical subregs made by
1119 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1120 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1121 #endif
1122 inmode = GET_MODE (in);
1125 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1126 where M1 is not valid for R if it was not handled by the code above.
1128 Similar issue for (SUBREG constant ...) if it was not handled by the
1129 code above. This can happen if SUBREG_BYTE != 0.
1131 However, we must reload the inner reg *as well as* the subreg in
1132 that case. */
1134 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1136 if (REG_P (SUBREG_REG (in)))
1137 subreg_in_class
1138 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1139 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1140 GET_MODE (SUBREG_REG (in)),
1141 SUBREG_BYTE (in),
1142 GET_MODE (in)),
1143 REGNO (SUBREG_REG (in)));
1144 else if (CONSTANT_P (SUBREG_REG (in))
1145 || GET_CODE (SUBREG_REG (in)) == PLUS)
1146 subreg_in_class = find_valid_class_1 (inmode,
1147 GET_MODE (SUBREG_REG (in)),
1148 rclass);
1150 /* This relies on the fact that emit_reload_insns outputs the
1151 instructions for input reloads of type RELOAD_OTHER in the same
1152 order as the reloads. Thus if the outer reload is also of type
1153 RELOAD_OTHER, we are guaranteed that this inner reload will be
1154 output before the outer reload. */
1155 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1156 subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1157 dont_remove_subreg = 1;
1160 /* Similarly for paradoxical and problematical SUBREGs on the output.
1161 Note that there is no reason we need worry about the previous value
1162 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1163 entitled to clobber it all (except in the case of a word mode subreg
1164 or of a STRICT_LOW_PART, in that latter case the constraint should
1165 label it input-output.) */
1166 if (out != 0 && GET_CODE (out) == SUBREG
1167 && (subreg_lowpart_p (out) || strict_low)
1168 #ifdef CANNOT_CHANGE_MODE_CLASS
1169 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1170 #endif
1171 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (out))]
1172 && (CONSTANT_P (SUBREG_REG (out))
1173 || strict_low
1174 || (((REG_P (SUBREG_REG (out))
1175 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1176 || MEM_P (SUBREG_REG (out)))
1177 && ((GET_MODE_PRECISION (outmode)
1178 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1179 #if WORD_REGISTER_OPERATIONS
1180 || ((GET_MODE_PRECISION (outmode)
1181 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1182 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1183 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1184 / UNITS_PER_WORD)))
1185 #endif
1187 || (REG_P (SUBREG_REG (out))
1188 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1189 /* The case of a word mode subreg
1190 is handled differently in the following statement. */
1191 && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1192 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1193 > UNITS_PER_WORD))
1194 && ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode))
1195 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1196 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1197 SUBREG_REG (out))
1198 == NO_REGS))
1199 #ifdef CANNOT_CHANGE_MODE_CLASS
1200 || (REG_P (SUBREG_REG (out))
1201 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1202 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1203 GET_MODE (SUBREG_REG (out)),
1204 outmode))
1205 #endif
1208 #ifdef LIMIT_RELOAD_CLASS
1209 out_subreg_loc = outloc;
1210 #endif
1211 outloc = &SUBREG_REG (out);
1212 out = *outloc;
1213 gcc_assert (WORD_REGISTER_OPERATIONS || !MEM_P (out)
1214 || GET_MODE_SIZE (GET_MODE (out))
1215 <= GET_MODE_SIZE (outmode));
1216 outmode = GET_MODE (out);
1219 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1220 where either M1 is not valid for R or M2 is wider than a word but we
1221 only need one register to store an M2-sized quantity in R.
1223 However, we must reload the inner reg *as well as* the subreg in
1224 that case and the inner reg is an in-out reload. */
1226 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1228 enum reg_class in_out_class
1229 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1230 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1231 GET_MODE (SUBREG_REG (out)),
1232 SUBREG_BYTE (out),
1233 GET_MODE (out)),
1234 REGNO (SUBREG_REG (out)));
1236 /* This relies on the fact that emit_reload_insns outputs the
1237 instructions for output reloads of type RELOAD_OTHER in reverse
1238 order of the reloads. Thus if the outer reload is also of type
1239 RELOAD_OTHER, we are guaranteed that this inner reload will be
1240 output after the outer reload. */
1241 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1242 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1243 0, 0, opnum, RELOAD_OTHER);
1244 dont_remove_subreg = 1;
1247 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1248 if (in != 0 && out != 0 && MEM_P (out)
1249 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1250 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1251 dont_share = 1;
1253 /* If IN is a SUBREG of a hard register, make a new REG. This
1254 simplifies some of the cases below. */
1256 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1257 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1258 && ! dont_remove_subreg)
1259 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1261 /* Similarly for OUT. */
1262 if (out != 0 && GET_CODE (out) == SUBREG
1263 && REG_P (SUBREG_REG (out))
1264 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1265 && ! dont_remove_subreg)
1266 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1268 /* Narrow down the class of register wanted if that is
1269 desirable on this machine for efficiency. */
1271 reg_class_t preferred_class = rclass;
1273 if (in != 0)
1274 preferred_class = targetm.preferred_reload_class (in, rclass);
1276 /* Output reloads may need analogous treatment, different in detail. */
1277 if (out != 0)
1278 preferred_class
1279 = targetm.preferred_output_reload_class (out, preferred_class);
1281 /* Discard what the target said if we cannot do it. */
1282 if (preferred_class != NO_REGS
1283 || (optional && type == RELOAD_FOR_OUTPUT))
1284 rclass = (enum reg_class) preferred_class;
1287 /* Make sure we use a class that can handle the actual pseudo
1288 inside any subreg. For example, on the 386, QImode regs
1289 can appear within SImode subregs. Although GENERAL_REGS
1290 can handle SImode, QImode needs a smaller class. */
1291 #ifdef LIMIT_RELOAD_CLASS
1292 if (in_subreg_loc)
1293 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1294 else if (in != 0 && GET_CODE (in) == SUBREG)
1295 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1297 if (out_subreg_loc)
1298 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1299 if (out != 0 && GET_CODE (out) == SUBREG)
1300 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1301 #endif
1303 /* Verify that this class is at least possible for the mode that
1304 is specified. */
1305 if (this_insn_is_asm)
1307 machine_mode mode;
1308 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1309 mode = inmode;
1310 else
1311 mode = outmode;
1312 if (mode == VOIDmode)
1314 error_for_asm (this_insn, "cannot reload integer constant "
1315 "operand in %<asm%>");
1316 mode = word_mode;
1317 if (in != 0)
1318 inmode = word_mode;
1319 if (out != 0)
1320 outmode = word_mode;
1322 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1323 if (HARD_REGNO_MODE_OK (i, mode)
1324 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1325 break;
1326 if (i == FIRST_PSEUDO_REGISTER)
1328 error_for_asm (this_insn, "impossible register constraint "
1329 "in %<asm%>");
1330 /* Avoid further trouble with this insn. */
1331 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1332 /* We used to continue here setting class to ALL_REGS, but it triggers
1333 sanity check on i386 for:
1334 void foo(long double d)
1336 asm("" :: "a" (d));
1338 Returning zero here ought to be safe as we take care in
1339 find_reloads to not process the reloads when instruction was
1340 replaced by USE. */
1342 return 0;
1346 /* Optional output reloads are always OK even if we have no register class,
1347 since the function of these reloads is only to have spill_reg_store etc.
1348 set, so that the storing insn can be deleted later. */
1349 gcc_assert (rclass != NO_REGS
1350 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1352 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1354 if (i == n_reloads)
1356 /* See if we need a secondary reload register to move between CLASS
1357 and IN or CLASS and OUT. Get the icode and push any required reloads
1358 needed for each of them if so. */
1360 if (in != 0)
1361 secondary_in_reload
1362 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1363 &secondary_in_icode, NULL);
1364 if (out != 0 && GET_CODE (out) != SCRATCH)
1365 secondary_out_reload
1366 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1367 type, &secondary_out_icode, NULL);
1369 /* We found no existing reload suitable for re-use.
1370 So add an additional reload. */
1372 #ifdef SECONDARY_MEMORY_NEEDED
1373 if (subreg_in_class == NO_REGS
1374 && in != 0
1375 && (REG_P (in)
1376 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1377 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1378 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1379 /* If a memory location is needed for the copy, make one. */
1380 if (subreg_in_class != NO_REGS
1381 && SECONDARY_MEMORY_NEEDED (subreg_in_class, rclass, inmode))
1382 get_secondary_mem (in, inmode, opnum, type);
1383 #endif
1385 i = n_reloads;
1386 rld[i].in = in;
1387 rld[i].out = out;
1388 rld[i].rclass = rclass;
1389 rld[i].inmode = inmode;
1390 rld[i].outmode = outmode;
1391 rld[i].reg_rtx = 0;
1392 rld[i].optional = optional;
1393 rld[i].inc = 0;
1394 rld[i].nocombine = 0;
1395 rld[i].in_reg = inloc ? *inloc : 0;
1396 rld[i].out_reg = outloc ? *outloc : 0;
1397 rld[i].opnum = opnum;
1398 rld[i].when_needed = type;
1399 rld[i].secondary_in_reload = secondary_in_reload;
1400 rld[i].secondary_out_reload = secondary_out_reload;
1401 rld[i].secondary_in_icode = secondary_in_icode;
1402 rld[i].secondary_out_icode = secondary_out_icode;
1403 rld[i].secondary_p = 0;
1405 n_reloads++;
1407 #ifdef SECONDARY_MEMORY_NEEDED
1408 if (out != 0
1409 && (REG_P (out)
1410 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1411 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1412 && SECONDARY_MEMORY_NEEDED (rclass,
1413 REGNO_REG_CLASS (reg_or_subregno (out)),
1414 outmode))
1415 get_secondary_mem (out, outmode, opnum, type);
1416 #endif
1418 else
1420 /* We are reusing an existing reload,
1421 but we may have additional information for it.
1422 For example, we may now have both IN and OUT
1423 while the old one may have just one of them. */
1425 /* The modes can be different. If they are, we want to reload in
1426 the larger mode, so that the value is valid for both modes. */
1427 if (inmode != VOIDmode
1428 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1429 rld[i].inmode = inmode;
1430 if (outmode != VOIDmode
1431 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1432 rld[i].outmode = outmode;
1433 if (in != 0)
1435 rtx in_reg = inloc ? *inloc : 0;
1436 /* If we merge reloads for two distinct rtl expressions that
1437 are identical in content, there might be duplicate address
1438 reloads. Remove the extra set now, so that if we later find
1439 that we can inherit this reload, we can get rid of the
1440 address reloads altogether.
1442 Do not do this if both reloads are optional since the result
1443 would be an optional reload which could potentially leave
1444 unresolved address replacements.
1446 It is not sufficient to call transfer_replacements since
1447 choose_reload_regs will remove the replacements for address
1448 reloads of inherited reloads which results in the same
1449 problem. */
1450 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1451 && ! (rld[i].optional && optional))
1453 /* We must keep the address reload with the lower operand
1454 number alive. */
1455 if (opnum > rld[i].opnum)
1457 remove_address_replacements (in);
1458 in = rld[i].in;
1459 in_reg = rld[i].in_reg;
1461 else
1462 remove_address_replacements (rld[i].in);
1464 /* When emitting reloads we don't necessarily look at the in-
1465 and outmode, but also directly at the operands (in and out).
1466 So we can't simply overwrite them with whatever we have found
1467 for this (to-be-merged) reload, we have to "merge" that too.
1468 Reusing another reload already verified that we deal with the
1469 same operands, just possibly in different modes. So we
1470 overwrite the operands only when the new mode is larger.
1471 See also PR33613. */
1472 if (!rld[i].in
1473 || GET_MODE_SIZE (GET_MODE (in))
1474 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1475 rld[i].in = in;
1476 if (!rld[i].in_reg
1477 || (in_reg
1478 && GET_MODE_SIZE (GET_MODE (in_reg))
1479 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1480 rld[i].in_reg = in_reg;
1482 if (out != 0)
1484 if (!rld[i].out
1485 || (out
1486 && GET_MODE_SIZE (GET_MODE (out))
1487 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1488 rld[i].out = out;
1489 if (outloc
1490 && (!rld[i].out_reg
1491 || GET_MODE_SIZE (GET_MODE (*outloc))
1492 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1493 rld[i].out_reg = *outloc;
1495 if (reg_class_subset_p (rclass, rld[i].rclass))
1496 rld[i].rclass = rclass;
1497 rld[i].optional &= optional;
1498 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1499 opnum, rld[i].opnum))
1500 rld[i].when_needed = RELOAD_OTHER;
1501 rld[i].opnum = MIN (rld[i].opnum, opnum);
1504 /* If the ostensible rtx being reloaded differs from the rtx found
1505 in the location to substitute, this reload is not safe to combine
1506 because we cannot reliably tell whether it appears in the insn. */
1508 if (in != 0 && in != *inloc)
1509 rld[i].nocombine = 1;
1511 #if 0
1512 /* This was replaced by changes in find_reloads_address_1 and the new
1513 function inc_for_reload, which go with a new meaning of reload_inc. */
1515 /* If this is an IN/OUT reload in an insn that sets the CC,
1516 it must be for an autoincrement. It doesn't work to store
1517 the incremented value after the insn because that would clobber the CC.
1518 So we must do the increment of the value reloaded from,
1519 increment it, store it back, then decrement again. */
1520 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1522 out = 0;
1523 rld[i].out = 0;
1524 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1525 /* If we did not find a nonzero amount-to-increment-by,
1526 that contradicts the belief that IN is being incremented
1527 in an address in this insn. */
1528 gcc_assert (rld[i].inc != 0);
1530 #endif
1532 /* If we will replace IN and OUT with the reload-reg,
1533 record where they are located so that substitution need
1534 not do a tree walk. */
1536 if (replace_reloads)
1538 if (inloc != 0)
1540 struct replacement *r = &replacements[n_replacements++];
1541 r->what = i;
1542 r->where = inloc;
1543 r->mode = inmode;
1545 if (outloc != 0 && outloc != inloc)
1547 struct replacement *r = &replacements[n_replacements++];
1548 r->what = i;
1549 r->where = outloc;
1550 r->mode = outmode;
1554 /* If this reload is just being introduced and it has both
1555 an incoming quantity and an outgoing quantity that are
1556 supposed to be made to match, see if either one of the two
1557 can serve as the place to reload into.
1559 If one of them is acceptable, set rld[i].reg_rtx
1560 to that one. */
1562 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1564 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1565 inmode, outmode,
1566 rld[i].rclass, i,
1567 earlyclobber_operand_p (out));
1569 /* If the outgoing register already contains the same value
1570 as the incoming one, we can dispense with loading it.
1571 The easiest way to tell the caller that is to give a phony
1572 value for the incoming operand (same as outgoing one). */
1573 if (rld[i].reg_rtx == out
1574 && (REG_P (in) || CONSTANT_P (in))
1575 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1576 static_reload_reg_p, i, inmode))
1577 rld[i].in = out;
1580 /* If this is an input reload and the operand contains a register that
1581 dies in this insn and is used nowhere else, see if it is the right class
1582 to be used for this reload. Use it if so. (This occurs most commonly
1583 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1584 this if it is also an output reload that mentions the register unless
1585 the output is a SUBREG that clobbers an entire register.
1587 Note that the operand might be one of the spill regs, if it is a
1588 pseudo reg and we are in a block where spilling has not taken place.
1589 But if there is no spilling in this block, that is OK.
1590 An explicitly used hard reg cannot be a spill reg. */
1592 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1594 rtx note;
1595 int regno;
1596 machine_mode rel_mode = inmode;
1598 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1599 rel_mode = outmode;
1601 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1602 if (REG_NOTE_KIND (note) == REG_DEAD
1603 && REG_P (XEXP (note, 0))
1604 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1605 && reg_mentioned_p (XEXP (note, 0), in)
1606 /* Check that a former pseudo is valid; see find_dummy_reload. */
1607 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1608 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1609 ORIGINAL_REGNO (XEXP (note, 0)))
1610 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1611 && ! refers_to_regno_for_reload_p (regno,
1612 end_hard_regno (rel_mode,
1613 regno),
1614 PATTERN (this_insn), inloc)
1615 && ! find_reg_fusage (this_insn, USE, XEXP (note, 0))
1616 /* If this is also an output reload, IN cannot be used as
1617 the reload register if it is set in this insn unless IN
1618 is also OUT. */
1619 && (out == 0 || in == out
1620 || ! hard_reg_set_here_p (regno,
1621 end_hard_regno (rel_mode, regno),
1622 PATTERN (this_insn)))
1623 /* ??? Why is this code so different from the previous?
1624 Is there any simple coherent way to describe the two together?
1625 What's going on here. */
1626 && (in != out
1627 || (GET_CODE (in) == SUBREG
1628 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1629 / UNITS_PER_WORD)
1630 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1631 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1632 /* Make sure the operand fits in the reg that dies. */
1633 && (GET_MODE_SIZE (rel_mode)
1634 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1635 && HARD_REGNO_MODE_OK (regno, inmode)
1636 && HARD_REGNO_MODE_OK (regno, outmode))
1638 unsigned int offs;
1639 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1640 hard_regno_nregs[regno][outmode]);
1642 for (offs = 0; offs < nregs; offs++)
1643 if (fixed_regs[regno + offs]
1644 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1645 regno + offs))
1646 break;
1648 if (offs == nregs
1649 && (! (refers_to_regno_for_reload_p
1650 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1651 || can_reload_into (in, regno, inmode)))
1653 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1654 break;
1659 if (out)
1660 output_reloadnum = i;
1662 return i;
1665 /* Record an additional place we must replace a value
1666 for which we have already recorded a reload.
1667 RELOADNUM is the value returned by push_reload
1668 when the reload was recorded.
1669 This is used in insn patterns that use match_dup. */
1671 static void
1672 push_replacement (rtx *loc, int reloadnum, machine_mode mode)
1674 if (replace_reloads)
1676 struct replacement *r = &replacements[n_replacements++];
1677 r->what = reloadnum;
1678 r->where = loc;
1679 r->mode = mode;
1683 /* Duplicate any replacement we have recorded to apply at
1684 location ORIG_LOC to also be performed at DUP_LOC.
1685 This is used in insn patterns that use match_dup. */
1687 static void
1688 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1690 int i, n = n_replacements;
1692 for (i = 0; i < n; i++)
1694 struct replacement *r = &replacements[i];
1695 if (r->where == orig_loc)
1696 push_replacement (dup_loc, r->what, r->mode);
1700 /* Transfer all replacements that used to be in reload FROM to be in
1701 reload TO. */
1703 void
1704 transfer_replacements (int to, int from)
1706 int i;
1708 for (i = 0; i < n_replacements; i++)
1709 if (replacements[i].what == from)
1710 replacements[i].what = to;
1713 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1714 or a subpart of it. If we have any replacements registered for IN_RTX,
1715 cancel the reloads that were supposed to load them.
1716 Return nonzero if we canceled any reloads. */
1718 remove_address_replacements (rtx in_rtx)
1720 int i, j;
1721 char reload_flags[MAX_RELOADS];
1722 int something_changed = 0;
1724 memset (reload_flags, 0, sizeof reload_flags);
1725 for (i = 0, j = 0; i < n_replacements; i++)
1727 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1728 reload_flags[replacements[i].what] |= 1;
1729 else
1731 replacements[j++] = replacements[i];
1732 reload_flags[replacements[i].what] |= 2;
1735 /* Note that the following store must be done before the recursive calls. */
1736 n_replacements = j;
1738 for (i = n_reloads - 1; i >= 0; i--)
1740 if (reload_flags[i] == 1)
1742 deallocate_reload_reg (i);
1743 remove_address_replacements (rld[i].in);
1744 rld[i].in = 0;
1745 something_changed = 1;
1748 return something_changed;
1751 /* If there is only one output reload, and it is not for an earlyclobber
1752 operand, try to combine it with a (logically unrelated) input reload
1753 to reduce the number of reload registers needed.
1755 This is safe if the input reload does not appear in
1756 the value being output-reloaded, because this implies
1757 it is not needed any more once the original insn completes.
1759 If that doesn't work, see we can use any of the registers that
1760 die in this insn as a reload register. We can if it is of the right
1761 class and does not appear in the value being output-reloaded. */
1763 static void
1764 combine_reloads (void)
1766 int i, regno;
1767 int output_reload = -1;
1768 int secondary_out = -1;
1769 rtx note;
1771 /* Find the output reload; return unless there is exactly one
1772 and that one is mandatory. */
1774 for (i = 0; i < n_reloads; i++)
1775 if (rld[i].out != 0)
1777 if (output_reload >= 0)
1778 return;
1779 output_reload = i;
1782 if (output_reload < 0 || rld[output_reload].optional)
1783 return;
1785 /* An input-output reload isn't combinable. */
1787 if (rld[output_reload].in != 0)
1788 return;
1790 /* If this reload is for an earlyclobber operand, we can't do anything. */
1791 if (earlyclobber_operand_p (rld[output_reload].out))
1792 return;
1794 /* If there is a reload for part of the address of this operand, we would
1795 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1796 its life to the point where doing this combine would not lower the
1797 number of spill registers needed. */
1798 for (i = 0; i < n_reloads; i++)
1799 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1800 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1801 && rld[i].opnum == rld[output_reload].opnum)
1802 return;
1804 /* Check each input reload; can we combine it? */
1806 for (i = 0; i < n_reloads; i++)
1807 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1808 /* Life span of this reload must not extend past main insn. */
1809 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1810 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1811 && rld[i].when_needed != RELOAD_OTHER
1812 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1813 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1814 [(int) rld[output_reload].outmode])
1815 && rld[i].inc == 0
1816 && rld[i].reg_rtx == 0
1817 #ifdef SECONDARY_MEMORY_NEEDED
1818 /* Don't combine two reloads with different secondary
1819 memory locations. */
1820 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1821 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1822 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1823 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1824 #endif
1825 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1826 ? (rld[i].rclass == rld[output_reload].rclass)
1827 : (reg_class_subset_p (rld[i].rclass,
1828 rld[output_reload].rclass)
1829 || reg_class_subset_p (rld[output_reload].rclass,
1830 rld[i].rclass)))
1831 && (MATCHES (rld[i].in, rld[output_reload].out)
1832 /* Args reversed because the first arg seems to be
1833 the one that we imagine being modified
1834 while the second is the one that might be affected. */
1835 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1836 rld[i].in)
1837 /* However, if the input is a register that appears inside
1838 the output, then we also can't share.
1839 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1840 If the same reload reg is used for both reg 69 and the
1841 result to be stored in memory, then that result
1842 will clobber the address of the memory ref. */
1843 && ! (REG_P (rld[i].in)
1844 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1845 rld[output_reload].out))))
1846 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1847 rld[i].when_needed != RELOAD_FOR_INPUT)
1848 && (reg_class_size[(int) rld[i].rclass]
1849 || targetm.small_register_classes_for_mode_p (VOIDmode))
1850 /* We will allow making things slightly worse by combining an
1851 input and an output, but no worse than that. */
1852 && (rld[i].when_needed == RELOAD_FOR_INPUT
1853 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1855 int j;
1857 /* We have found a reload to combine with! */
1858 rld[i].out = rld[output_reload].out;
1859 rld[i].out_reg = rld[output_reload].out_reg;
1860 rld[i].outmode = rld[output_reload].outmode;
1861 /* Mark the old output reload as inoperative. */
1862 rld[output_reload].out = 0;
1863 /* The combined reload is needed for the entire insn. */
1864 rld[i].when_needed = RELOAD_OTHER;
1865 /* If the output reload had a secondary reload, copy it. */
1866 if (rld[output_reload].secondary_out_reload != -1)
1868 rld[i].secondary_out_reload
1869 = rld[output_reload].secondary_out_reload;
1870 rld[i].secondary_out_icode
1871 = rld[output_reload].secondary_out_icode;
1874 #ifdef SECONDARY_MEMORY_NEEDED
1875 /* Copy any secondary MEM. */
1876 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1877 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1878 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1879 #endif
1880 /* If required, minimize the register class. */
1881 if (reg_class_subset_p (rld[output_reload].rclass,
1882 rld[i].rclass))
1883 rld[i].rclass = rld[output_reload].rclass;
1885 /* Transfer all replacements from the old reload to the combined. */
1886 for (j = 0; j < n_replacements; j++)
1887 if (replacements[j].what == output_reload)
1888 replacements[j].what = i;
1890 return;
1893 /* If this insn has only one operand that is modified or written (assumed
1894 to be the first), it must be the one corresponding to this reload. It
1895 is safe to use anything that dies in this insn for that output provided
1896 that it does not occur in the output (we already know it isn't an
1897 earlyclobber. If this is an asm insn, give up. */
1899 if (INSN_CODE (this_insn) == -1)
1900 return;
1902 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1903 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1904 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1905 return;
1907 /* See if some hard register that dies in this insn and is not used in
1908 the output is the right class. Only works if the register we pick
1909 up can fully hold our output reload. */
1910 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1911 if (REG_NOTE_KIND (note) == REG_DEAD
1912 && REG_P (XEXP (note, 0))
1913 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1914 rld[output_reload].out)
1915 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1916 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1917 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1918 regno)
1919 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1920 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1921 /* Ensure that a secondary or tertiary reload for this output
1922 won't want this register. */
1923 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1924 || (!(TEST_HARD_REG_BIT
1925 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1926 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1927 || !(TEST_HARD_REG_BIT
1928 (reg_class_contents[(int) rld[secondary_out].rclass],
1929 regno)))))
1930 && !fixed_regs[regno]
1931 /* Check that a former pseudo is valid; see find_dummy_reload. */
1932 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1933 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1934 ORIGINAL_REGNO (XEXP (note, 0)))
1935 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1937 rld[output_reload].reg_rtx
1938 = gen_rtx_REG (rld[output_reload].outmode, regno);
1939 return;
1943 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1944 See if one of IN and OUT is a register that may be used;
1945 this is desirable since a spill-register won't be needed.
1946 If so, return the register rtx that proves acceptable.
1948 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1949 RCLASS is the register class required for the reload.
1951 If FOR_REAL is >= 0, it is the number of the reload,
1952 and in some cases when it can be discovered that OUT doesn't need
1953 to be computed, clear out rld[FOR_REAL].out.
1955 If FOR_REAL is -1, this should not be done, because this call
1956 is just to see if a register can be found, not to find and install it.
1958 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1959 puts an additional constraint on being able to use IN for OUT since
1960 IN must not appear elsewhere in the insn (it is assumed that IN itself
1961 is safe from the earlyclobber). */
1963 static rtx
1964 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1965 machine_mode inmode, machine_mode outmode,
1966 reg_class_t rclass, int for_real, int earlyclobber)
1968 rtx in = real_in;
1969 rtx out = real_out;
1970 int in_offset = 0;
1971 int out_offset = 0;
1972 rtx value = 0;
1974 /* If operands exceed a word, we can't use either of them
1975 unless they have the same size. */
1976 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1977 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1978 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1979 return 0;
1981 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1982 respectively refers to a hard register. */
1984 /* Find the inside of any subregs. */
1985 while (GET_CODE (out) == SUBREG)
1987 if (REG_P (SUBREG_REG (out))
1988 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1989 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1990 GET_MODE (SUBREG_REG (out)),
1991 SUBREG_BYTE (out),
1992 GET_MODE (out));
1993 out = SUBREG_REG (out);
1995 while (GET_CODE (in) == SUBREG)
1997 if (REG_P (SUBREG_REG (in))
1998 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1999 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
2000 GET_MODE (SUBREG_REG (in)),
2001 SUBREG_BYTE (in),
2002 GET_MODE (in));
2003 in = SUBREG_REG (in);
2006 /* Narrow down the reg class, the same way push_reload will;
2007 otherwise we might find a dummy now, but push_reload won't. */
2009 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
2010 if (preferred_class != NO_REGS)
2011 rclass = (enum reg_class) preferred_class;
2014 /* See if OUT will do. */
2015 if (REG_P (out)
2016 && REGNO (out) < FIRST_PSEUDO_REGISTER)
2018 unsigned int regno = REGNO (out) + out_offset;
2019 unsigned int nwords = hard_regno_nregs[regno][outmode];
2020 rtx saved_rtx;
2022 /* When we consider whether the insn uses OUT,
2023 ignore references within IN. They don't prevent us
2024 from copying IN into OUT, because those refs would
2025 move into the insn that reloads IN.
2027 However, we only ignore IN in its role as this reload.
2028 If the insn uses IN elsewhere and it contains OUT,
2029 that counts. We can't be sure it's the "same" operand
2030 so it might not go through this reload.
2032 We also need to avoid using OUT if it, or part of it, is a
2033 fixed register. Modifying such registers, even transiently,
2034 may have undefined effects on the machine, such as modifying
2035 the stack pointer. */
2036 saved_rtx = *inloc;
2037 *inloc = const0_rtx;
2039 if (regno < FIRST_PSEUDO_REGISTER
2040 && HARD_REGNO_MODE_OK (regno, outmode)
2041 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2042 PATTERN (this_insn), outloc))
2044 unsigned int i;
2046 for (i = 0; i < nwords; i++)
2047 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2048 regno + i)
2049 || fixed_regs[regno + i])
2050 break;
2052 if (i == nwords)
2054 if (REG_P (real_out))
2055 value = real_out;
2056 else
2057 value = gen_rtx_REG (outmode, regno);
2061 *inloc = saved_rtx;
2064 /* Consider using IN if OUT was not acceptable
2065 or if OUT dies in this insn (like the quotient in a divmod insn).
2066 We can't use IN unless it is dies in this insn,
2067 which means we must know accurately which hard regs are live.
2068 Also, the result can't go in IN if IN is used within OUT,
2069 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2070 if (hard_regs_live_known
2071 && REG_P (in)
2072 && REGNO (in) < FIRST_PSEUDO_REGISTER
2073 && (value == 0
2074 || find_reg_note (this_insn, REG_UNUSED, real_out))
2075 && find_reg_note (this_insn, REG_DEAD, real_in)
2076 && !fixed_regs[REGNO (in)]
2077 && HARD_REGNO_MODE_OK (REGNO (in),
2078 /* The only case where out and real_out might
2079 have different modes is where real_out
2080 is a subreg, and in that case, out
2081 has a real mode. */
2082 (GET_MODE (out) != VOIDmode
2083 ? GET_MODE (out) : outmode))
2084 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2085 /* However only do this if we can be sure that this input
2086 operand doesn't correspond with an uninitialized pseudo.
2087 global can assign some hardreg to it that is the same as
2088 the one assigned to a different, also live pseudo (as it
2089 can ignore the conflict). We must never introduce writes
2090 to such hardregs, as they would clobber the other live
2091 pseudo. See PR 20973. */
2092 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2093 ORIGINAL_REGNO (in))
2094 /* Similarly, only do this if we can be sure that the death
2095 note is still valid. global can assign some hardreg to
2096 the pseudo referenced in the note and simultaneously a
2097 subword of this hardreg to a different, also live pseudo,
2098 because only another subword of the hardreg is actually
2099 used in the insn. This cannot happen if the pseudo has
2100 been assigned exactly one hardreg. See PR 33732. */
2101 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2103 unsigned int regno = REGNO (in) + in_offset;
2104 unsigned int nwords = hard_regno_nregs[regno][inmode];
2106 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2107 && ! hard_reg_set_here_p (regno, regno + nwords,
2108 PATTERN (this_insn))
2109 && (! earlyclobber
2110 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2111 PATTERN (this_insn), inloc)))
2113 unsigned int i;
2115 for (i = 0; i < nwords; i++)
2116 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2117 regno + i))
2118 break;
2120 if (i == nwords)
2122 /* If we were going to use OUT as the reload reg
2123 and changed our mind, it means OUT is a dummy that
2124 dies here. So don't bother copying value to it. */
2125 if (for_real >= 0 && value == real_out)
2126 rld[for_real].out = 0;
2127 if (REG_P (real_in))
2128 value = real_in;
2129 else
2130 value = gen_rtx_REG (inmode, regno);
2135 return value;
2138 /* This page contains subroutines used mainly for determining
2139 whether the IN or an OUT of a reload can serve as the
2140 reload register. */
2142 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2145 earlyclobber_operand_p (rtx x)
2147 int i;
2149 for (i = 0; i < n_earlyclobbers; i++)
2150 if (reload_earlyclobbers[i] == x)
2151 return 1;
2153 return 0;
2156 /* Return 1 if expression X alters a hard reg in the range
2157 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2158 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2159 X should be the body of an instruction. */
2161 static int
2162 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2164 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2166 rtx op0 = SET_DEST (x);
2168 while (GET_CODE (op0) == SUBREG)
2169 op0 = SUBREG_REG (op0);
2170 if (REG_P (op0))
2172 unsigned int r = REGNO (op0);
2174 /* See if this reg overlaps range under consideration. */
2175 if (r < end_regno
2176 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2177 return 1;
2180 else if (GET_CODE (x) == PARALLEL)
2182 int i = XVECLEN (x, 0) - 1;
2184 for (; i >= 0; i--)
2185 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2186 return 1;
2189 return 0;
2192 /* Return 1 if ADDR is a valid memory address for mode MODE
2193 in address space AS, and check that each pseudo reg has the
2194 proper kind of hard reg. */
2197 strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
2198 rtx addr, addr_space_t as)
2200 #ifdef GO_IF_LEGITIMATE_ADDRESS
2201 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2202 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2203 return 0;
2205 win:
2206 return 1;
2207 #else
2208 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2209 #endif
2212 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2213 if they are the same hard reg, and has special hacks for
2214 autoincrement and autodecrement.
2215 This is specifically intended for find_reloads to use
2216 in determining whether two operands match.
2217 X is the operand whose number is the lower of the two.
2219 The value is 2 if Y contains a pre-increment that matches
2220 a non-incrementing address in X. */
2222 /* ??? To be completely correct, we should arrange to pass
2223 for X the output operand and for Y the input operand.
2224 For now, we assume that the output operand has the lower number
2225 because that is natural in (SET output (... input ...)). */
2228 operands_match_p (rtx x, rtx y)
2230 int i;
2231 RTX_CODE code = GET_CODE (x);
2232 const char *fmt;
2233 int success_2;
2235 if (x == y)
2236 return 1;
2237 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2238 && (REG_P (y) || (GET_CODE (y) == SUBREG
2239 && REG_P (SUBREG_REG (y)))))
2241 int j;
2243 if (code == SUBREG)
2245 i = REGNO (SUBREG_REG (x));
2246 if (i >= FIRST_PSEUDO_REGISTER)
2247 goto slow;
2248 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2249 GET_MODE (SUBREG_REG (x)),
2250 SUBREG_BYTE (x),
2251 GET_MODE (x));
2253 else
2254 i = REGNO (x);
2256 if (GET_CODE (y) == SUBREG)
2258 j = REGNO (SUBREG_REG (y));
2259 if (j >= FIRST_PSEUDO_REGISTER)
2260 goto slow;
2261 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2262 GET_MODE (SUBREG_REG (y)),
2263 SUBREG_BYTE (y),
2264 GET_MODE (y));
2266 else
2267 j = REGNO (y);
2269 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2270 multiple hard register group of scalar integer registers, so that
2271 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2272 register. */
2273 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2274 && SCALAR_INT_MODE_P (GET_MODE (x))
2275 && i < FIRST_PSEUDO_REGISTER)
2276 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2277 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2278 && SCALAR_INT_MODE_P (GET_MODE (y))
2279 && j < FIRST_PSEUDO_REGISTER)
2280 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2282 return i == j;
2284 /* If two operands must match, because they are really a single
2285 operand of an assembler insn, then two postincrements are invalid
2286 because the assembler insn would increment only once.
2287 On the other hand, a postincrement matches ordinary indexing
2288 if the postincrement is the output operand. */
2289 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2290 return operands_match_p (XEXP (x, 0), y);
2291 /* Two preincrements are invalid
2292 because the assembler insn would increment only once.
2293 On the other hand, a preincrement matches ordinary indexing
2294 if the preincrement is the input operand.
2295 In this case, return 2, since some callers need to do special
2296 things when this happens. */
2297 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2298 || GET_CODE (y) == PRE_MODIFY)
2299 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2301 slow:
2303 /* Now we have disposed of all the cases in which different rtx codes
2304 can match. */
2305 if (code != GET_CODE (y))
2306 return 0;
2308 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2309 if (GET_MODE (x) != GET_MODE (y))
2310 return 0;
2312 /* MEMs referring to different address space are not equivalent. */
2313 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2314 return 0;
2316 switch (code)
2318 CASE_CONST_UNIQUE:
2319 return 0;
2321 case LABEL_REF:
2322 return LABEL_REF_LABEL (x) == LABEL_REF_LABEL (y);
2323 case SYMBOL_REF:
2324 return XSTR (x, 0) == XSTR (y, 0);
2326 default:
2327 break;
2330 /* Compare the elements. If any pair of corresponding elements
2331 fail to match, return 0 for the whole things. */
2333 success_2 = 0;
2334 fmt = GET_RTX_FORMAT (code);
2335 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2337 int val, j;
2338 switch (fmt[i])
2340 case 'w':
2341 if (XWINT (x, i) != XWINT (y, i))
2342 return 0;
2343 break;
2345 case 'i':
2346 if (XINT (x, i) != XINT (y, i))
2347 return 0;
2348 break;
2350 case 'e':
2351 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2352 if (val == 0)
2353 return 0;
2354 /* If any subexpression returns 2,
2355 we should return 2 if we are successful. */
2356 if (val == 2)
2357 success_2 = 1;
2358 break;
2360 case '0':
2361 break;
2363 case 'E':
2364 if (XVECLEN (x, i) != XVECLEN (y, i))
2365 return 0;
2366 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2368 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2369 if (val == 0)
2370 return 0;
2371 if (val == 2)
2372 success_2 = 1;
2374 break;
2376 /* It is believed that rtx's at this level will never
2377 contain anything but integers and other rtx's,
2378 except for within LABEL_REFs and SYMBOL_REFs. */
2379 default:
2380 gcc_unreachable ();
2383 return 1 + success_2;
2386 /* Describe the range of registers or memory referenced by X.
2387 If X is a register, set REG_FLAG and put the first register
2388 number into START and the last plus one into END.
2389 If X is a memory reference, put a base address into BASE
2390 and a range of integer offsets into START and END.
2391 If X is pushing on the stack, we can assume it causes no trouble,
2392 so we set the SAFE field. */
2394 static struct decomposition
2395 decompose (rtx x)
2397 struct decomposition val;
2398 int all_const = 0;
2400 memset (&val, 0, sizeof (val));
2402 switch (GET_CODE (x))
2404 case MEM:
2406 rtx base = NULL_RTX, offset = 0;
2407 rtx addr = XEXP (x, 0);
2409 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2410 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2412 val.base = XEXP (addr, 0);
2413 val.start = -GET_MODE_SIZE (GET_MODE (x));
2414 val.end = GET_MODE_SIZE (GET_MODE (x));
2415 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2416 return val;
2419 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2421 if (GET_CODE (XEXP (addr, 1)) == PLUS
2422 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2423 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2425 val.base = XEXP (addr, 0);
2426 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2427 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2428 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2429 return val;
2433 if (GET_CODE (addr) == CONST)
2435 addr = XEXP (addr, 0);
2436 all_const = 1;
2438 if (GET_CODE (addr) == PLUS)
2440 if (CONSTANT_P (XEXP (addr, 0)))
2442 base = XEXP (addr, 1);
2443 offset = XEXP (addr, 0);
2445 else if (CONSTANT_P (XEXP (addr, 1)))
2447 base = XEXP (addr, 0);
2448 offset = XEXP (addr, 1);
2452 if (offset == 0)
2454 base = addr;
2455 offset = const0_rtx;
2457 if (GET_CODE (offset) == CONST)
2458 offset = XEXP (offset, 0);
2459 if (GET_CODE (offset) == PLUS)
2461 if (CONST_INT_P (XEXP (offset, 0)))
2463 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2464 offset = XEXP (offset, 0);
2466 else if (CONST_INT_P (XEXP (offset, 1)))
2468 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2469 offset = XEXP (offset, 1);
2471 else
2473 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2474 offset = const0_rtx;
2477 else if (!CONST_INT_P (offset))
2479 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2480 offset = const0_rtx;
2483 if (all_const && GET_CODE (base) == PLUS)
2484 base = gen_rtx_CONST (GET_MODE (base), base);
2486 gcc_assert (CONST_INT_P (offset));
2488 val.start = INTVAL (offset);
2489 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2490 val.base = base;
2492 break;
2494 case REG:
2495 val.reg_flag = 1;
2496 val.start = true_regnum (x);
2497 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2499 /* A pseudo with no hard reg. */
2500 val.start = REGNO (x);
2501 val.end = val.start + 1;
2503 else
2504 /* A hard reg. */
2505 val.end = end_hard_regno (GET_MODE (x), val.start);
2506 break;
2508 case SUBREG:
2509 if (!REG_P (SUBREG_REG (x)))
2510 /* This could be more precise, but it's good enough. */
2511 return decompose (SUBREG_REG (x));
2512 val.reg_flag = 1;
2513 val.start = true_regnum (x);
2514 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2515 return decompose (SUBREG_REG (x));
2516 else
2517 /* A hard reg. */
2518 val.end = val.start + subreg_nregs (x);
2519 break;
2521 case SCRATCH:
2522 /* This hasn't been assigned yet, so it can't conflict yet. */
2523 val.safe = 1;
2524 break;
2526 default:
2527 gcc_assert (CONSTANT_P (x));
2528 val.safe = 1;
2529 break;
2531 return val;
2534 /* Return 1 if altering Y will not modify the value of X.
2535 Y is also described by YDATA, which should be decompose (Y). */
2537 static int
2538 immune_p (rtx x, rtx y, struct decomposition ydata)
2540 struct decomposition xdata;
2542 if (ydata.reg_flag)
2543 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2544 if (ydata.safe)
2545 return 1;
2547 gcc_assert (MEM_P (y));
2548 /* If Y is memory and X is not, Y can't affect X. */
2549 if (!MEM_P (x))
2550 return 1;
2552 xdata = decompose (x);
2554 if (! rtx_equal_p (xdata.base, ydata.base))
2556 /* If bases are distinct symbolic constants, there is no overlap. */
2557 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2558 return 1;
2559 /* Constants and stack slots never overlap. */
2560 if (CONSTANT_P (xdata.base)
2561 && (ydata.base == frame_pointer_rtx
2562 || ydata.base == hard_frame_pointer_rtx
2563 || ydata.base == stack_pointer_rtx))
2564 return 1;
2565 if (CONSTANT_P (ydata.base)
2566 && (xdata.base == frame_pointer_rtx
2567 || xdata.base == hard_frame_pointer_rtx
2568 || xdata.base == stack_pointer_rtx))
2569 return 1;
2570 /* If either base is variable, we don't know anything. */
2571 return 0;
2574 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2577 /* Similar, but calls decompose. */
2580 safe_from_earlyclobber (rtx op, rtx clobber)
2582 struct decomposition early_data;
2584 early_data = decompose (clobber);
2585 return immune_p (op, clobber, early_data);
2588 /* Main entry point of this file: search the body of INSN
2589 for values that need reloading and record them with push_reload.
2590 REPLACE nonzero means record also where the values occur
2591 so that subst_reloads can be used.
2593 IND_LEVELS says how many levels of indirection are supported by this
2594 machine; a value of zero means that a memory reference is not a valid
2595 memory address.
2597 LIVE_KNOWN says we have valid information about which hard
2598 regs are live at each point in the program; this is true when
2599 we are called from global_alloc but false when stupid register
2600 allocation has been done.
2602 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2603 which is nonnegative if the reg has been commandeered for reloading into.
2604 It is copied into STATIC_RELOAD_REG_P and referenced from there
2605 by various subroutines.
2607 Return TRUE if some operands need to be changed, because of swapping
2608 commutative operands, reg_equiv_address substitution, or whatever. */
2611 find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2612 short *reload_reg_p)
2614 int insn_code_number;
2615 int i, j;
2616 int noperands;
2617 /* These start out as the constraints for the insn
2618 and they are chewed up as we consider alternatives. */
2619 const char *constraints[MAX_RECOG_OPERANDS];
2620 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2621 a register. */
2622 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2623 char pref_or_nothing[MAX_RECOG_OPERANDS];
2624 /* Nonzero for a MEM operand whose entire address needs a reload.
2625 May be -1 to indicate the entire address may or may not need a reload. */
2626 int address_reloaded[MAX_RECOG_OPERANDS];
2627 /* Nonzero for an address operand that needs to be completely reloaded.
2628 May be -1 to indicate the entire operand may or may not need a reload. */
2629 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2630 /* Value of enum reload_type to use for operand. */
2631 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2632 /* Value of enum reload_type to use within address of operand. */
2633 enum reload_type address_type[MAX_RECOG_OPERANDS];
2634 /* Save the usage of each operand. */
2635 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2636 int no_input_reloads = 0, no_output_reloads = 0;
2637 int n_alternatives;
2638 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2639 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2640 char this_alternative_win[MAX_RECOG_OPERANDS];
2641 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2642 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2643 int this_alternative_matches[MAX_RECOG_OPERANDS];
2644 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2645 int this_alternative_number;
2646 int goal_alternative_number = 0;
2647 int operand_reloadnum[MAX_RECOG_OPERANDS];
2648 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2649 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2650 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2651 char goal_alternative_win[MAX_RECOG_OPERANDS];
2652 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2653 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2654 int goal_alternative_swapped;
2655 int best;
2656 int commutative;
2657 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2658 rtx substed_operand[MAX_RECOG_OPERANDS];
2659 rtx body = PATTERN (insn);
2660 rtx set = single_set (insn);
2661 int goal_earlyclobber = 0, this_earlyclobber;
2662 machine_mode operand_mode[MAX_RECOG_OPERANDS];
2663 int retval = 0;
2665 this_insn = insn;
2666 n_reloads = 0;
2667 n_replacements = 0;
2668 n_earlyclobbers = 0;
2669 replace_reloads = replace;
2670 hard_regs_live_known = live_known;
2671 static_reload_reg_p = reload_reg_p;
2673 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2674 neither are insns that SET cc0. Insns that use CC0 are not allowed
2675 to have any input reloads. */
2676 if (JUMP_P (insn) || CALL_P (insn))
2677 no_output_reloads = 1;
2679 if (HAVE_cc0 && reg_referenced_p (cc0_rtx, PATTERN (insn)))
2680 no_input_reloads = 1;
2681 if (HAVE_cc0 && reg_set_p (cc0_rtx, PATTERN (insn)))
2682 no_output_reloads = 1;
2684 #ifdef SECONDARY_MEMORY_NEEDED
2685 /* The eliminated forms of any secondary memory locations are per-insn, so
2686 clear them out here. */
2688 if (secondary_memlocs_elim_used)
2690 memset (secondary_memlocs_elim, 0,
2691 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2692 secondary_memlocs_elim_used = 0;
2694 #endif
2696 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2697 is cheap to move between them. If it is not, there may not be an insn
2698 to do the copy, so we may need a reload. */
2699 if (GET_CODE (body) == SET
2700 && REG_P (SET_DEST (body))
2701 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2702 && REG_P (SET_SRC (body))
2703 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2704 && register_move_cost (GET_MODE (SET_SRC (body)),
2705 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2706 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2707 return 0;
2709 extract_insn (insn);
2711 noperands = reload_n_operands = recog_data.n_operands;
2712 n_alternatives = recog_data.n_alternatives;
2714 /* Just return "no reloads" if insn has no operands with constraints. */
2715 if (noperands == 0 || n_alternatives == 0)
2716 return 0;
2718 insn_code_number = INSN_CODE (insn);
2719 this_insn_is_asm = insn_code_number < 0;
2721 memcpy (operand_mode, recog_data.operand_mode,
2722 noperands * sizeof (machine_mode));
2723 memcpy (constraints, recog_data.constraints,
2724 noperands * sizeof (const char *));
2726 commutative = -1;
2728 /* If we will need to know, later, whether some pair of operands
2729 are the same, we must compare them now and save the result.
2730 Reloading the base and index registers will clobber them
2731 and afterward they will fail to match. */
2733 for (i = 0; i < noperands; i++)
2735 const char *p;
2736 int c;
2737 char *end;
2739 substed_operand[i] = recog_data.operand[i];
2740 p = constraints[i];
2742 modified[i] = RELOAD_READ;
2744 /* Scan this operand's constraint to see if it is an output operand,
2745 an in-out operand, is commutative, or should match another. */
2747 while ((c = *p))
2749 p += CONSTRAINT_LEN (c, p);
2750 switch (c)
2752 case '=':
2753 modified[i] = RELOAD_WRITE;
2754 break;
2755 case '+':
2756 modified[i] = RELOAD_READ_WRITE;
2757 break;
2758 case '%':
2760 /* The last operand should not be marked commutative. */
2761 gcc_assert (i != noperands - 1);
2763 /* We currently only support one commutative pair of
2764 operands. Some existing asm code currently uses more
2765 than one pair. Previously, that would usually work,
2766 but sometimes it would crash the compiler. We
2767 continue supporting that case as well as we can by
2768 silently ignoring all but the first pair. In the
2769 future we may handle it correctly. */
2770 if (commutative < 0)
2771 commutative = i;
2772 else
2773 gcc_assert (this_insn_is_asm);
2775 break;
2776 /* Use of ISDIGIT is tempting here, but it may get expensive because
2777 of locale support we don't want. */
2778 case '0': case '1': case '2': case '3': case '4':
2779 case '5': case '6': case '7': case '8': case '9':
2781 c = strtoul (p - 1, &end, 10);
2782 p = end;
2784 operands_match[c][i]
2785 = operands_match_p (recog_data.operand[c],
2786 recog_data.operand[i]);
2788 /* An operand may not match itself. */
2789 gcc_assert (c != i);
2791 /* If C can be commuted with C+1, and C might need to match I,
2792 then C+1 might also need to match I. */
2793 if (commutative >= 0)
2795 if (c == commutative || c == commutative + 1)
2797 int other = c + (c == commutative ? 1 : -1);
2798 operands_match[other][i]
2799 = operands_match_p (recog_data.operand[other],
2800 recog_data.operand[i]);
2802 if (i == commutative || i == commutative + 1)
2804 int other = i + (i == commutative ? 1 : -1);
2805 operands_match[c][other]
2806 = operands_match_p (recog_data.operand[c],
2807 recog_data.operand[other]);
2809 /* Note that C is supposed to be less than I.
2810 No need to consider altering both C and I because in
2811 that case we would alter one into the other. */
2818 /* Examine each operand that is a memory reference or memory address
2819 and reload parts of the addresses into index registers.
2820 Also here any references to pseudo regs that didn't get hard regs
2821 but are equivalent to constants get replaced in the insn itself
2822 with those constants. Nobody will ever see them again.
2824 Finally, set up the preferred classes of each operand. */
2826 for (i = 0; i < noperands; i++)
2828 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2830 address_reloaded[i] = 0;
2831 address_operand_reloaded[i] = 0;
2832 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2833 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2834 : RELOAD_OTHER);
2835 address_type[i]
2836 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2837 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2838 : RELOAD_OTHER);
2840 if (*constraints[i] == 0)
2841 /* Ignore things like match_operator operands. */
2843 else if (insn_extra_address_constraint
2844 (lookup_constraint (constraints[i])))
2846 address_operand_reloaded[i]
2847 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2848 recog_data.operand[i],
2849 recog_data.operand_loc[i],
2850 i, operand_type[i], ind_levels, insn);
2852 /* If we now have a simple operand where we used to have a
2853 PLUS or MULT, re-recognize and try again. */
2854 if ((OBJECT_P (*recog_data.operand_loc[i])
2855 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2856 && (GET_CODE (recog_data.operand[i]) == MULT
2857 || GET_CODE (recog_data.operand[i]) == PLUS))
2859 INSN_CODE (insn) = -1;
2860 retval = find_reloads (insn, replace, ind_levels, live_known,
2861 reload_reg_p);
2862 return retval;
2865 recog_data.operand[i] = *recog_data.operand_loc[i];
2866 substed_operand[i] = recog_data.operand[i];
2868 /* Address operands are reloaded in their existing mode,
2869 no matter what is specified in the machine description. */
2870 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2872 /* If the address is a single CONST_INT pick address mode
2873 instead otherwise we will later not know in which mode
2874 the reload should be performed. */
2875 if (operand_mode[i] == VOIDmode)
2876 operand_mode[i] = Pmode;
2879 else if (code == MEM)
2881 address_reloaded[i]
2882 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2883 recog_data.operand_loc[i],
2884 XEXP (recog_data.operand[i], 0),
2885 &XEXP (recog_data.operand[i], 0),
2886 i, address_type[i], ind_levels, insn);
2887 recog_data.operand[i] = *recog_data.operand_loc[i];
2888 substed_operand[i] = recog_data.operand[i];
2890 else if (code == SUBREG)
2892 rtx reg = SUBREG_REG (recog_data.operand[i]);
2893 rtx op
2894 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2895 ind_levels,
2896 set != 0
2897 && &SET_DEST (set) == recog_data.operand_loc[i],
2898 insn,
2899 &address_reloaded[i]);
2901 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2902 that didn't get a hard register, emit a USE with a REG_EQUAL
2903 note in front so that we might inherit a previous, possibly
2904 wider reload. */
2906 if (replace
2907 && MEM_P (op)
2908 && REG_P (reg)
2909 && (GET_MODE_SIZE (GET_MODE (reg))
2910 >= GET_MODE_SIZE (GET_MODE (op)))
2911 && reg_equiv_constant (REGNO (reg)) == 0)
2912 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2913 insn),
2914 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2916 substed_operand[i] = recog_data.operand[i] = op;
2918 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2919 /* We can get a PLUS as an "operand" as a result of register
2920 elimination. See eliminate_regs and gen_reload. We handle
2921 a unary operator by reloading the operand. */
2922 substed_operand[i] = recog_data.operand[i]
2923 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2924 ind_levels, 0, insn,
2925 &address_reloaded[i]);
2926 else if (code == REG)
2928 /* This is equivalent to calling find_reloads_toplev.
2929 The code is duplicated for speed.
2930 When we find a pseudo always equivalent to a constant,
2931 we replace it by the constant. We must be sure, however,
2932 that we don't try to replace it in the insn in which it
2933 is being set. */
2934 int regno = REGNO (recog_data.operand[i]);
2935 if (reg_equiv_constant (regno) != 0
2936 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2938 /* Record the existing mode so that the check if constants are
2939 allowed will work when operand_mode isn't specified. */
2941 if (operand_mode[i] == VOIDmode)
2942 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2944 substed_operand[i] = recog_data.operand[i]
2945 = reg_equiv_constant (regno);
2947 if (reg_equiv_memory_loc (regno) != 0
2948 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2949 /* We need not give a valid is_set_dest argument since the case
2950 of a constant equivalence was checked above. */
2951 substed_operand[i] = recog_data.operand[i]
2952 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2953 ind_levels, 0, insn,
2954 &address_reloaded[i]);
2956 /* If the operand is still a register (we didn't replace it with an
2957 equivalent), get the preferred class to reload it into. */
2958 code = GET_CODE (recog_data.operand[i]);
2959 preferred_class[i]
2960 = ((code == REG && REGNO (recog_data.operand[i])
2961 >= FIRST_PSEUDO_REGISTER)
2962 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2963 : NO_REGS);
2964 pref_or_nothing[i]
2965 = (code == REG
2966 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2967 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2970 /* If this is simply a copy from operand 1 to operand 0, merge the
2971 preferred classes for the operands. */
2972 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2973 && recog_data.operand[1] == SET_SRC (set))
2975 preferred_class[0] = preferred_class[1]
2976 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2977 pref_or_nothing[0] |= pref_or_nothing[1];
2978 pref_or_nothing[1] |= pref_or_nothing[0];
2981 /* Now see what we need for pseudo-regs that didn't get hard regs
2982 or got the wrong kind of hard reg. For this, we must consider
2983 all the operands together against the register constraints. */
2985 best = MAX_RECOG_OPERANDS * 2 + 600;
2987 goal_alternative_swapped = 0;
2989 /* The constraints are made of several alternatives.
2990 Each operand's constraint looks like foo,bar,... with commas
2991 separating the alternatives. The first alternatives for all
2992 operands go together, the second alternatives go together, etc.
2994 First loop over alternatives. */
2996 alternative_mask enabled = get_enabled_alternatives (insn);
2997 for (this_alternative_number = 0;
2998 this_alternative_number < n_alternatives;
2999 this_alternative_number++)
3001 int swapped;
3003 if (!TEST_BIT (enabled, this_alternative_number))
3005 int i;
3007 for (i = 0; i < recog_data.n_operands; i++)
3008 constraints[i] = skip_alternative (constraints[i]);
3010 continue;
3013 /* If insn is commutative (it's safe to exchange a certain pair
3014 of operands) then we need to try each alternative twice, the
3015 second time matching those two operands as if we had
3016 exchanged them. To do this, really exchange them in
3017 operands. */
3018 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3020 /* Loop over operands for one constraint alternative. */
3021 /* LOSERS counts those that don't fit this alternative
3022 and would require loading. */
3023 int losers = 0;
3024 /* BAD is set to 1 if it some operand can't fit this alternative
3025 even after reloading. */
3026 int bad = 0;
3027 /* REJECT is a count of how undesirable this alternative says it is
3028 if any reloading is required. If the alternative matches exactly
3029 then REJECT is ignored, but otherwise it gets this much
3030 counted against it in addition to the reloading needed. Each
3031 ? counts three times here since we want the disparaging caused by
3032 a bad register class to only count 1/3 as much. */
3033 int reject = 0;
3035 if (swapped)
3037 recog_data.operand[commutative] = substed_operand[commutative + 1];
3038 recog_data.operand[commutative + 1] = substed_operand[commutative];
3039 /* Swap the duplicates too. */
3040 for (i = 0; i < recog_data.n_dups; i++)
3041 if (recog_data.dup_num[i] == commutative
3042 || recog_data.dup_num[i] == commutative + 1)
3043 *recog_data.dup_loc[i]
3044 = recog_data.operand[(int) recog_data.dup_num[i]];
3046 std::swap (preferred_class[commutative],
3047 preferred_class[commutative + 1]);
3048 std::swap (pref_or_nothing[commutative],
3049 pref_or_nothing[commutative + 1]);
3050 std::swap (address_reloaded[commutative],
3051 address_reloaded[commutative + 1]);
3054 this_earlyclobber = 0;
3056 for (i = 0; i < noperands; i++)
3058 const char *p = constraints[i];
3059 char *end;
3060 int len;
3061 int win = 0;
3062 int did_match = 0;
3063 /* 0 => this operand can be reloaded somehow for this alternative. */
3064 int badop = 1;
3065 /* 0 => this operand can be reloaded if the alternative allows regs. */
3066 int winreg = 0;
3067 int c;
3068 int m;
3069 rtx operand = recog_data.operand[i];
3070 int offset = 0;
3071 /* Nonzero means this is a MEM that must be reloaded into a reg
3072 regardless of what the constraint says. */
3073 int force_reload = 0;
3074 int offmemok = 0;
3075 /* Nonzero if a constant forced into memory would be OK for this
3076 operand. */
3077 int constmemok = 0;
3078 int earlyclobber = 0;
3079 enum constraint_num cn;
3080 enum reg_class cl;
3082 /* If the predicate accepts a unary operator, it means that
3083 we need to reload the operand, but do not do this for
3084 match_operator and friends. */
3085 if (UNARY_P (operand) && *p != 0)
3086 operand = XEXP (operand, 0);
3088 /* If the operand is a SUBREG, extract
3089 the REG or MEM (or maybe even a constant) within.
3090 (Constants can occur as a result of reg_equiv_constant.) */
3092 while (GET_CODE (operand) == SUBREG)
3094 /* Offset only matters when operand is a REG and
3095 it is a hard reg. This is because it is passed
3096 to reg_fits_class_p if it is a REG and all pseudos
3097 return 0 from that function. */
3098 if (REG_P (SUBREG_REG (operand))
3099 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3101 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3102 GET_MODE (SUBREG_REG (operand)),
3103 SUBREG_BYTE (operand),
3104 GET_MODE (operand)) < 0)
3105 force_reload = 1;
3106 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3107 GET_MODE (SUBREG_REG (operand)),
3108 SUBREG_BYTE (operand),
3109 GET_MODE (operand));
3111 operand = SUBREG_REG (operand);
3112 /* Force reload if this is a constant or PLUS or if there may
3113 be a problem accessing OPERAND in the outer mode. */
3114 if (CONSTANT_P (operand)
3115 || GET_CODE (operand) == PLUS
3116 /* We must force a reload of paradoxical SUBREGs
3117 of a MEM because the alignment of the inner value
3118 may not be enough to do the outer reference. On
3119 big-endian machines, it may also reference outside
3120 the object.
3122 On machines that extend byte operations and we have a
3123 SUBREG where both the inner and outer modes are no wider
3124 than a word and the inner mode is narrower, is integral,
3125 and gets extended when loaded from memory, combine.c has
3126 made assumptions about the behavior of the machine in such
3127 register access. If the data is, in fact, in memory we
3128 must always load using the size assumed to be in the
3129 register and let the insn do the different-sized
3130 accesses.
3132 This is doubly true if WORD_REGISTER_OPERATIONS. In
3133 this case eliminate_regs has left non-paradoxical
3134 subregs for push_reload to see. Make sure it does
3135 by forcing the reload.
3137 ??? When is it right at this stage to have a subreg
3138 of a mem that is _not_ to be handled specially? IMO
3139 those should have been reduced to just a mem. */
3140 || ((MEM_P (operand)
3141 || (REG_P (operand)
3142 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3143 #if !WORD_REGISTER_OPERATIONS
3144 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3145 < BIGGEST_ALIGNMENT)
3146 && (GET_MODE_SIZE (operand_mode[i])
3147 > GET_MODE_SIZE (GET_MODE (operand))))
3148 || BYTES_BIG_ENDIAN
3149 #ifdef LOAD_EXTEND_OP
3150 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3151 && (GET_MODE_SIZE (GET_MODE (operand))
3152 <= UNITS_PER_WORD)
3153 && (GET_MODE_SIZE (operand_mode[i])
3154 > GET_MODE_SIZE (GET_MODE (operand)))
3155 && INTEGRAL_MODE_P (GET_MODE (operand))
3156 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3157 #endif
3159 #endif
3162 force_reload = 1;
3165 this_alternative[i] = NO_REGS;
3166 this_alternative_win[i] = 0;
3167 this_alternative_match_win[i] = 0;
3168 this_alternative_offmemok[i] = 0;
3169 this_alternative_earlyclobber[i] = 0;
3170 this_alternative_matches[i] = -1;
3172 /* An empty constraint or empty alternative
3173 allows anything which matched the pattern. */
3174 if (*p == 0 || *p == ',')
3175 win = 1, badop = 0;
3177 /* Scan this alternative's specs for this operand;
3178 set WIN if the operand fits any letter in this alternative.
3179 Otherwise, clear BADOP if this operand could
3180 fit some letter after reloads,
3181 or set WINREG if this operand could fit after reloads
3182 provided the constraint allows some registers. */
3185 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3187 case '\0':
3188 len = 0;
3189 break;
3190 case ',':
3191 c = '\0';
3192 break;
3194 case '?':
3195 reject += 6;
3196 break;
3198 case '!':
3199 reject = 600;
3200 break;
3202 case '#':
3203 /* Ignore rest of this alternative as far as
3204 reloading is concerned. */
3206 p++;
3207 while (*p && *p != ',');
3208 len = 0;
3209 break;
3211 case '0': case '1': case '2': case '3': case '4':
3212 case '5': case '6': case '7': case '8': case '9':
3213 m = strtoul (p, &end, 10);
3214 p = end;
3215 len = 0;
3217 this_alternative_matches[i] = m;
3218 /* We are supposed to match a previous operand.
3219 If we do, we win if that one did.
3220 If we do not, count both of the operands as losers.
3221 (This is too conservative, since most of the time
3222 only a single reload insn will be needed to make
3223 the two operands win. As a result, this alternative
3224 may be rejected when it is actually desirable.) */
3225 if ((swapped && (m != commutative || i != commutative + 1))
3226 /* If we are matching as if two operands were swapped,
3227 also pretend that operands_match had been computed
3228 with swapped.
3229 But if I is the second of those and C is the first,
3230 don't exchange them, because operands_match is valid
3231 only on one side of its diagonal. */
3232 ? (operands_match
3233 [(m == commutative || m == commutative + 1)
3234 ? 2 * commutative + 1 - m : m]
3235 [(i == commutative || i == commutative + 1)
3236 ? 2 * commutative + 1 - i : i])
3237 : operands_match[m][i])
3239 /* If we are matching a non-offsettable address where an
3240 offsettable address was expected, then we must reject
3241 this combination, because we can't reload it. */
3242 if (this_alternative_offmemok[m]
3243 && MEM_P (recog_data.operand[m])
3244 && this_alternative[m] == NO_REGS
3245 && ! this_alternative_win[m])
3246 bad = 1;
3248 did_match = this_alternative_win[m];
3250 else
3252 /* Operands don't match. */
3253 rtx value;
3254 int loc1, loc2;
3255 /* Retroactively mark the operand we had to match
3256 as a loser, if it wasn't already. */
3257 if (this_alternative_win[m])
3258 losers++;
3259 this_alternative_win[m] = 0;
3260 if (this_alternative[m] == NO_REGS)
3261 bad = 1;
3262 /* But count the pair only once in the total badness of
3263 this alternative, if the pair can be a dummy reload.
3264 The pointers in operand_loc are not swapped; swap
3265 them by hand if necessary. */
3266 if (swapped && i == commutative)
3267 loc1 = commutative + 1;
3268 else if (swapped && i == commutative + 1)
3269 loc1 = commutative;
3270 else
3271 loc1 = i;
3272 if (swapped && m == commutative)
3273 loc2 = commutative + 1;
3274 else if (swapped && m == commutative + 1)
3275 loc2 = commutative;
3276 else
3277 loc2 = m;
3278 value
3279 = find_dummy_reload (recog_data.operand[i],
3280 recog_data.operand[m],
3281 recog_data.operand_loc[loc1],
3282 recog_data.operand_loc[loc2],
3283 operand_mode[i], operand_mode[m],
3284 this_alternative[m], -1,
3285 this_alternative_earlyclobber[m]);
3287 if (value != 0)
3288 losers--;
3290 /* This can be fixed with reloads if the operand
3291 we are supposed to match can be fixed with reloads. */
3292 badop = 0;
3293 this_alternative[i] = this_alternative[m];
3295 /* If we have to reload this operand and some previous
3296 operand also had to match the same thing as this
3297 operand, we don't know how to do that. So reject this
3298 alternative. */
3299 if (! did_match || force_reload)
3300 for (j = 0; j < i; j++)
3301 if (this_alternative_matches[j]
3302 == this_alternative_matches[i])
3304 badop = 1;
3305 break;
3307 break;
3309 case 'p':
3310 /* All necessary reloads for an address_operand
3311 were handled in find_reloads_address. */
3312 this_alternative[i]
3313 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3314 ADDRESS, SCRATCH);
3315 win = 1;
3316 badop = 0;
3317 break;
3319 case TARGET_MEM_CONSTRAINT:
3320 if (force_reload)
3321 break;
3322 if (MEM_P (operand)
3323 || (REG_P (operand)
3324 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3325 && reg_renumber[REGNO (operand)] < 0))
3326 win = 1;
3327 if (CONST_POOL_OK_P (operand_mode[i], operand))
3328 badop = 0;
3329 constmemok = 1;
3330 break;
3332 case '<':
3333 if (MEM_P (operand)
3334 && ! address_reloaded[i]
3335 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3336 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3337 win = 1;
3338 break;
3340 case '>':
3341 if (MEM_P (operand)
3342 && ! address_reloaded[i]
3343 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3344 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3345 win = 1;
3346 break;
3348 /* Memory operand whose address is not offsettable. */
3349 case 'V':
3350 if (force_reload)
3351 break;
3352 if (MEM_P (operand)
3353 && ! (ind_levels ? offsettable_memref_p (operand)
3354 : offsettable_nonstrict_memref_p (operand))
3355 /* Certain mem addresses will become offsettable
3356 after they themselves are reloaded. This is important;
3357 we don't want our own handling of unoffsettables
3358 to override the handling of reg_equiv_address. */
3359 && !(REG_P (XEXP (operand, 0))
3360 && (ind_levels == 0
3361 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3362 win = 1;
3363 break;
3365 /* Memory operand whose address is offsettable. */
3366 case 'o':
3367 if (force_reload)
3368 break;
3369 if ((MEM_P (operand)
3370 /* If IND_LEVELS, find_reloads_address won't reload a
3371 pseudo that didn't get a hard reg, so we have to
3372 reject that case. */
3373 && ((ind_levels ? offsettable_memref_p (operand)
3374 : offsettable_nonstrict_memref_p (operand))
3375 /* A reloaded address is offsettable because it is now
3376 just a simple register indirect. */
3377 || address_reloaded[i] == 1))
3378 || (REG_P (operand)
3379 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3380 && reg_renumber[REGNO (operand)] < 0
3381 /* If reg_equiv_address is nonzero, we will be
3382 loading it into a register; hence it will be
3383 offsettable, but we cannot say that reg_equiv_mem
3384 is offsettable without checking. */
3385 && ((reg_equiv_mem (REGNO (operand)) != 0
3386 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3387 || (reg_equiv_address (REGNO (operand)) != 0))))
3388 win = 1;
3389 if (CONST_POOL_OK_P (operand_mode[i], operand)
3390 || MEM_P (operand))
3391 badop = 0;
3392 constmemok = 1;
3393 offmemok = 1;
3394 break;
3396 case '&':
3397 /* Output operand that is stored before the need for the
3398 input operands (and their index registers) is over. */
3399 earlyclobber = 1, this_earlyclobber = 1;
3400 break;
3402 case 'X':
3403 force_reload = 0;
3404 win = 1;
3405 break;
3407 case 'g':
3408 if (! force_reload
3409 /* A PLUS is never a valid operand, but reload can make
3410 it from a register when eliminating registers. */
3411 && GET_CODE (operand) != PLUS
3412 /* A SCRATCH is not a valid operand. */
3413 && GET_CODE (operand) != SCRATCH
3414 && (! CONSTANT_P (operand)
3415 || ! flag_pic
3416 || LEGITIMATE_PIC_OPERAND_P (operand))
3417 && (GENERAL_REGS == ALL_REGS
3418 || !REG_P (operand)
3419 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3420 && reg_renumber[REGNO (operand)] < 0)))
3421 win = 1;
3422 cl = GENERAL_REGS;
3423 goto reg;
3425 default:
3426 cn = lookup_constraint (p);
3427 switch (get_constraint_type (cn))
3429 case CT_REGISTER:
3430 cl = reg_class_for_constraint (cn);
3431 if (cl != NO_REGS)
3432 goto reg;
3433 break;
3435 case CT_CONST_INT:
3436 if (CONST_INT_P (operand)
3437 && (insn_const_int_ok_for_constraint
3438 (INTVAL (operand), cn)))
3439 win = true;
3440 break;
3442 case CT_MEMORY:
3443 if (force_reload)
3444 break;
3445 if (constraint_satisfied_p (operand, cn))
3446 win = 1;
3447 /* If the address was already reloaded,
3448 we win as well. */
3449 else if (MEM_P (operand) && address_reloaded[i] == 1)
3450 win = 1;
3451 /* Likewise if the address will be reloaded because
3452 reg_equiv_address is nonzero. For reg_equiv_mem
3453 we have to check. */
3454 else if (REG_P (operand)
3455 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3456 && reg_renumber[REGNO (operand)] < 0
3457 && ((reg_equiv_mem (REGNO (operand)) != 0
3458 && (constraint_satisfied_p
3459 (reg_equiv_mem (REGNO (operand)),
3460 cn)))
3461 || (reg_equiv_address (REGNO (operand))
3462 != 0)))
3463 win = 1;
3465 /* If we didn't already win, we can reload
3466 constants via force_const_mem, and other
3467 MEMs by reloading the address like for 'o'. */
3468 if (CONST_POOL_OK_P (operand_mode[i], operand)
3469 || MEM_P (operand))
3470 badop = 0;
3471 constmemok = 1;
3472 offmemok = 1;
3473 break;
3475 case CT_SPECIAL_MEMORY:
3476 if (force_reload)
3477 break;
3478 if (constraint_satisfied_p (operand, cn))
3479 win = 1;
3480 /* Likewise if the address will be reloaded because
3481 reg_equiv_address is nonzero. For reg_equiv_mem
3482 we have to check. */
3483 else if (REG_P (operand)
3484 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3485 && reg_renumber[REGNO (operand)] < 0
3486 && reg_equiv_mem (REGNO (operand)) != 0
3487 && (constraint_satisfied_p
3488 (reg_equiv_mem (REGNO (operand)), cn)))
3489 win = 1;
3490 break;
3492 case CT_ADDRESS:
3493 if (constraint_satisfied_p (operand, cn))
3494 win = 1;
3496 /* If we didn't already win, we can reload
3497 the address into a base register. */
3498 this_alternative[i]
3499 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3500 ADDRESS, SCRATCH);
3501 badop = 0;
3502 break;
3504 case CT_FIXED_FORM:
3505 if (constraint_satisfied_p (operand, cn))
3506 win = 1;
3507 break;
3509 break;
3511 reg:
3512 this_alternative[i]
3513 = reg_class_subunion[this_alternative[i]][cl];
3514 if (GET_MODE (operand) == BLKmode)
3515 break;
3516 winreg = 1;
3517 if (REG_P (operand)
3518 && reg_fits_class_p (operand, this_alternative[i],
3519 offset, GET_MODE (recog_data.operand[i])))
3520 win = 1;
3521 break;
3523 while ((p += len), c);
3525 if (swapped == (commutative >= 0 ? 1 : 0))
3526 constraints[i] = p;
3528 /* If this operand could be handled with a reg,
3529 and some reg is allowed, then this operand can be handled. */
3530 if (winreg && this_alternative[i] != NO_REGS
3531 && (win || !class_only_fixed_regs[this_alternative[i]]))
3532 badop = 0;
3534 /* Record which operands fit this alternative. */
3535 this_alternative_earlyclobber[i] = earlyclobber;
3536 if (win && ! force_reload)
3537 this_alternative_win[i] = 1;
3538 else if (did_match && ! force_reload)
3539 this_alternative_match_win[i] = 1;
3540 else
3542 int const_to_mem = 0;
3544 this_alternative_offmemok[i] = offmemok;
3545 losers++;
3546 if (badop)
3547 bad = 1;
3548 /* Alternative loses if it has no regs for a reg operand. */
3549 if (REG_P (operand)
3550 && this_alternative[i] == NO_REGS
3551 && this_alternative_matches[i] < 0)
3552 bad = 1;
3554 /* If this is a constant that is reloaded into the desired
3555 class by copying it to memory first, count that as another
3556 reload. This is consistent with other code and is
3557 required to avoid choosing another alternative when
3558 the constant is moved into memory by this function on
3559 an early reload pass. Note that the test here is
3560 precisely the same as in the code below that calls
3561 force_const_mem. */
3562 if (CONST_POOL_OK_P (operand_mode[i], operand)
3563 && ((targetm.preferred_reload_class (operand,
3564 this_alternative[i])
3565 == NO_REGS)
3566 || no_input_reloads))
3568 const_to_mem = 1;
3569 if (this_alternative[i] != NO_REGS)
3570 losers++;
3573 /* Alternative loses if it requires a type of reload not
3574 permitted for this insn. We can always reload SCRATCH
3575 and objects with a REG_UNUSED note. */
3576 if (GET_CODE (operand) != SCRATCH
3577 && modified[i] != RELOAD_READ && no_output_reloads
3578 && ! find_reg_note (insn, REG_UNUSED, operand))
3579 bad = 1;
3580 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3581 && ! const_to_mem)
3582 bad = 1;
3584 /* If we can't reload this value at all, reject this
3585 alternative. Note that we could also lose due to
3586 LIMIT_RELOAD_CLASS, but we don't check that
3587 here. */
3589 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3591 if (targetm.preferred_reload_class (operand,
3592 this_alternative[i])
3593 == NO_REGS)
3594 reject = 600;
3596 if (operand_type[i] == RELOAD_FOR_OUTPUT
3597 && (targetm.preferred_output_reload_class (operand,
3598 this_alternative[i])
3599 == NO_REGS))
3600 reject = 600;
3603 /* We prefer to reload pseudos over reloading other things,
3604 since such reloads may be able to be eliminated later.
3605 If we are reloading a SCRATCH, we won't be generating any
3606 insns, just using a register, so it is also preferred.
3607 So bump REJECT in other cases. Don't do this in the
3608 case where we are forcing a constant into memory and
3609 it will then win since we don't want to have a different
3610 alternative match then. */
3611 if (! (REG_P (operand)
3612 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3613 && GET_CODE (operand) != SCRATCH
3614 && ! (const_to_mem && constmemok))
3615 reject += 2;
3617 /* Input reloads can be inherited more often than output
3618 reloads can be removed, so penalize output reloads. */
3619 if (operand_type[i] != RELOAD_FOR_INPUT
3620 && GET_CODE (operand) != SCRATCH)
3621 reject++;
3624 /* If this operand is a pseudo register that didn't get
3625 a hard reg and this alternative accepts some
3626 register, see if the class that we want is a subset
3627 of the preferred class for this register. If not,
3628 but it intersects that class, use the preferred class
3629 instead. If it does not intersect the preferred
3630 class, show that usage of this alternative should be
3631 discouraged; it will be discouraged more still if the
3632 register is `preferred or nothing'. We do this
3633 because it increases the chance of reusing our spill
3634 register in a later insn and avoiding a pair of
3635 memory stores and loads.
3637 Don't bother with this if this alternative will
3638 accept this operand.
3640 Don't do this for a multiword operand, since it is
3641 only a small win and has the risk of requiring more
3642 spill registers, which could cause a large loss.
3644 Don't do this if the preferred class has only one
3645 register because we might otherwise exhaust the
3646 class. */
3648 if (! win && ! did_match
3649 && this_alternative[i] != NO_REGS
3650 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3651 && reg_class_size [(int) preferred_class[i]] > 0
3652 && ! small_register_class_p (preferred_class[i]))
3654 if (! reg_class_subset_p (this_alternative[i],
3655 preferred_class[i]))
3657 /* Since we don't have a way of forming the intersection,
3658 we just do something special if the preferred class
3659 is a subset of the class we have; that's the most
3660 common case anyway. */
3661 if (reg_class_subset_p (preferred_class[i],
3662 this_alternative[i]))
3663 this_alternative[i] = preferred_class[i];
3664 else
3665 reject += (2 + 2 * pref_or_nothing[i]);
3670 /* Now see if any output operands that are marked "earlyclobber"
3671 in this alternative conflict with any input operands
3672 or any memory addresses. */
3674 for (i = 0; i < noperands; i++)
3675 if (this_alternative_earlyclobber[i]
3676 && (this_alternative_win[i] || this_alternative_match_win[i]))
3678 struct decomposition early_data;
3680 early_data = decompose (recog_data.operand[i]);
3682 gcc_assert (modified[i] != RELOAD_READ);
3684 if (this_alternative[i] == NO_REGS)
3686 this_alternative_earlyclobber[i] = 0;
3687 gcc_assert (this_insn_is_asm);
3688 error_for_asm (this_insn,
3689 "%<&%> constraint used with no register class");
3692 for (j = 0; j < noperands; j++)
3693 /* Is this an input operand or a memory ref? */
3694 if ((MEM_P (recog_data.operand[j])
3695 || modified[j] != RELOAD_WRITE)
3696 && j != i
3697 /* Ignore things like match_operator operands. */
3698 && !recog_data.is_operator[j]
3699 /* Don't count an input operand that is constrained to match
3700 the early clobber operand. */
3701 && ! (this_alternative_matches[j] == i
3702 && rtx_equal_p (recog_data.operand[i],
3703 recog_data.operand[j]))
3704 /* Is it altered by storing the earlyclobber operand? */
3705 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3706 early_data))
3708 /* If the output is in a non-empty few-regs class,
3709 it's costly to reload it, so reload the input instead. */
3710 if (small_register_class_p (this_alternative[i])
3711 && (REG_P (recog_data.operand[j])
3712 || GET_CODE (recog_data.operand[j]) == SUBREG))
3714 losers++;
3715 this_alternative_win[j] = 0;
3716 this_alternative_match_win[j] = 0;
3718 else
3719 break;
3721 /* If an earlyclobber operand conflicts with something,
3722 it must be reloaded, so request this and count the cost. */
3723 if (j != noperands)
3725 losers++;
3726 this_alternative_win[i] = 0;
3727 this_alternative_match_win[j] = 0;
3728 for (j = 0; j < noperands; j++)
3729 if (this_alternative_matches[j] == i
3730 && this_alternative_match_win[j])
3732 this_alternative_win[j] = 0;
3733 this_alternative_match_win[j] = 0;
3734 losers++;
3739 /* If one alternative accepts all the operands, no reload required,
3740 choose that alternative; don't consider the remaining ones. */
3741 if (losers == 0)
3743 /* Unswap these so that they are never swapped at `finish'. */
3744 if (swapped)
3746 recog_data.operand[commutative] = substed_operand[commutative];
3747 recog_data.operand[commutative + 1]
3748 = substed_operand[commutative + 1];
3750 for (i = 0; i < noperands; i++)
3752 goal_alternative_win[i] = this_alternative_win[i];
3753 goal_alternative_match_win[i] = this_alternative_match_win[i];
3754 goal_alternative[i] = this_alternative[i];
3755 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3756 goal_alternative_matches[i] = this_alternative_matches[i];
3757 goal_alternative_earlyclobber[i]
3758 = this_alternative_earlyclobber[i];
3760 goal_alternative_number = this_alternative_number;
3761 goal_alternative_swapped = swapped;
3762 goal_earlyclobber = this_earlyclobber;
3763 goto finish;
3766 /* REJECT, set by the ! and ? constraint characters and when a register
3767 would be reloaded into a non-preferred class, discourages the use of
3768 this alternative for a reload goal. REJECT is incremented by six
3769 for each ? and two for each non-preferred class. */
3770 losers = losers * 6 + reject;
3772 /* If this alternative can be made to work by reloading,
3773 and it needs less reloading than the others checked so far,
3774 record it as the chosen goal for reloading. */
3775 if (! bad)
3777 if (best > losers)
3779 for (i = 0; i < noperands; i++)
3781 goal_alternative[i] = this_alternative[i];
3782 goal_alternative_win[i] = this_alternative_win[i];
3783 goal_alternative_match_win[i]
3784 = this_alternative_match_win[i];
3785 goal_alternative_offmemok[i]
3786 = this_alternative_offmemok[i];
3787 goal_alternative_matches[i] = this_alternative_matches[i];
3788 goal_alternative_earlyclobber[i]
3789 = this_alternative_earlyclobber[i];
3791 goal_alternative_swapped = swapped;
3792 best = losers;
3793 goal_alternative_number = this_alternative_number;
3794 goal_earlyclobber = this_earlyclobber;
3798 if (swapped)
3800 /* If the commutative operands have been swapped, swap
3801 them back in order to check the next alternative. */
3802 recog_data.operand[commutative] = substed_operand[commutative];
3803 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3804 /* Unswap the duplicates too. */
3805 for (i = 0; i < recog_data.n_dups; i++)
3806 if (recog_data.dup_num[i] == commutative
3807 || recog_data.dup_num[i] == commutative + 1)
3808 *recog_data.dup_loc[i]
3809 = recog_data.operand[(int) recog_data.dup_num[i]];
3811 /* Unswap the operand related information as well. */
3812 std::swap (preferred_class[commutative],
3813 preferred_class[commutative + 1]);
3814 std::swap (pref_or_nothing[commutative],
3815 pref_or_nothing[commutative + 1]);
3816 std::swap (address_reloaded[commutative],
3817 address_reloaded[commutative + 1]);
3822 /* The operands don't meet the constraints.
3823 goal_alternative describes the alternative
3824 that we could reach by reloading the fewest operands.
3825 Reload so as to fit it. */
3827 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3829 /* No alternative works with reloads?? */
3830 if (insn_code_number >= 0)
3831 fatal_insn ("unable to generate reloads for:", insn);
3832 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3833 /* Avoid further trouble with this insn. */
3834 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3835 n_reloads = 0;
3836 return 0;
3839 /* Jump to `finish' from above if all operands are valid already.
3840 In that case, goal_alternative_win is all 1. */
3841 finish:
3843 /* Right now, for any pair of operands I and J that are required to match,
3844 with I < J,
3845 goal_alternative_matches[J] is I.
3846 Set up goal_alternative_matched as the inverse function:
3847 goal_alternative_matched[I] = J. */
3849 for (i = 0; i < noperands; i++)
3850 goal_alternative_matched[i] = -1;
3852 for (i = 0; i < noperands; i++)
3853 if (! goal_alternative_win[i]
3854 && goal_alternative_matches[i] >= 0)
3855 goal_alternative_matched[goal_alternative_matches[i]] = i;
3857 for (i = 0; i < noperands; i++)
3858 goal_alternative_win[i] |= goal_alternative_match_win[i];
3860 /* If the best alternative is with operands 1 and 2 swapped,
3861 consider them swapped before reporting the reloads. Update the
3862 operand numbers of any reloads already pushed. */
3864 if (goal_alternative_swapped)
3866 std::swap (substed_operand[commutative],
3867 substed_operand[commutative + 1]);
3868 std::swap (recog_data.operand[commutative],
3869 recog_data.operand[commutative + 1]);
3870 std::swap (*recog_data.operand_loc[commutative],
3871 *recog_data.operand_loc[commutative + 1]);
3873 for (i = 0; i < recog_data.n_dups; i++)
3874 if (recog_data.dup_num[i] == commutative
3875 || recog_data.dup_num[i] == commutative + 1)
3876 *recog_data.dup_loc[i]
3877 = recog_data.operand[(int) recog_data.dup_num[i]];
3879 for (i = 0; i < n_reloads; i++)
3881 if (rld[i].opnum == commutative)
3882 rld[i].opnum = commutative + 1;
3883 else if (rld[i].opnum == commutative + 1)
3884 rld[i].opnum = commutative;
3888 for (i = 0; i < noperands; i++)
3890 operand_reloadnum[i] = -1;
3892 /* If this is an earlyclobber operand, we need to widen the scope.
3893 The reload must remain valid from the start of the insn being
3894 reloaded until after the operand is stored into its destination.
3895 We approximate this with RELOAD_OTHER even though we know that we
3896 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3898 One special case that is worth checking is when we have an
3899 output that is earlyclobber but isn't used past the insn (typically
3900 a SCRATCH). In this case, we only need have the reload live
3901 through the insn itself, but not for any of our input or output
3902 reloads.
3903 But we must not accidentally narrow the scope of an existing
3904 RELOAD_OTHER reload - leave these alone.
3906 In any case, anything needed to address this operand can remain
3907 however they were previously categorized. */
3909 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3910 operand_type[i]
3911 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3912 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3915 /* Any constants that aren't allowed and can't be reloaded
3916 into registers are here changed into memory references. */
3917 for (i = 0; i < noperands; i++)
3918 if (! goal_alternative_win[i])
3920 rtx op = recog_data.operand[i];
3921 rtx subreg = NULL_RTX;
3922 rtx plus = NULL_RTX;
3923 machine_mode mode = operand_mode[i];
3925 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3926 push_reload so we have to let them pass here. */
3927 if (GET_CODE (op) == SUBREG)
3929 subreg = op;
3930 op = SUBREG_REG (op);
3931 mode = GET_MODE (op);
3934 if (GET_CODE (op) == PLUS)
3936 plus = op;
3937 op = XEXP (op, 1);
3940 if (CONST_POOL_OK_P (mode, op)
3941 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3942 == NO_REGS)
3943 || no_input_reloads))
3945 int this_address_reloaded;
3946 rtx tem = force_const_mem (mode, op);
3948 /* If we stripped a SUBREG or a PLUS above add it back. */
3949 if (plus != NULL_RTX)
3950 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3952 if (subreg != NULL_RTX)
3953 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3955 this_address_reloaded = 0;
3956 substed_operand[i] = recog_data.operand[i]
3957 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3958 0, insn, &this_address_reloaded);
3960 /* If the alternative accepts constant pool refs directly
3961 there will be no reload needed at all. */
3962 if (plus == NULL_RTX
3963 && subreg == NULL_RTX
3964 && alternative_allows_const_pool_ref (this_address_reloaded != 1
3965 ? substed_operand[i]
3966 : NULL,
3967 recog_data.constraints[i],
3968 goal_alternative_number))
3969 goal_alternative_win[i] = 1;
3973 /* Record the values of the earlyclobber operands for the caller. */
3974 if (goal_earlyclobber)
3975 for (i = 0; i < noperands; i++)
3976 if (goal_alternative_earlyclobber[i])
3977 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3979 /* Now record reloads for all the operands that need them. */
3980 for (i = 0; i < noperands; i++)
3981 if (! goal_alternative_win[i])
3983 /* Operands that match previous ones have already been handled. */
3984 if (goal_alternative_matches[i] >= 0)
3986 /* Handle an operand with a nonoffsettable address
3987 appearing where an offsettable address will do
3988 by reloading the address into a base register.
3990 ??? We can also do this when the operand is a register and
3991 reg_equiv_mem is not offsettable, but this is a bit tricky,
3992 so we don't bother with it. It may not be worth doing. */
3993 else if (goal_alternative_matched[i] == -1
3994 && goal_alternative_offmemok[i]
3995 && MEM_P (recog_data.operand[i]))
3997 /* If the address to be reloaded is a VOIDmode constant,
3998 use the default address mode as mode of the reload register,
3999 as would have been done by find_reloads_address. */
4000 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
4001 machine_mode address_mode;
4003 address_mode = get_address_mode (recog_data.operand[i]);
4004 operand_reloadnum[i]
4005 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
4006 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
4007 base_reg_class (VOIDmode, as, MEM, SCRATCH),
4008 address_mode,
4009 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
4010 rld[operand_reloadnum[i]].inc
4011 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4013 /* If this operand is an output, we will have made any
4014 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4015 now we are treating part of the operand as an input, so
4016 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
4018 if (modified[i] == RELOAD_WRITE)
4020 for (j = 0; j < n_reloads; j++)
4022 if (rld[j].opnum == i)
4024 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4025 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4026 else if (rld[j].when_needed
4027 == RELOAD_FOR_OUTADDR_ADDRESS)
4028 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4033 else if (goal_alternative_matched[i] == -1)
4035 operand_reloadnum[i]
4036 = push_reload ((modified[i] != RELOAD_WRITE
4037 ? recog_data.operand[i] : 0),
4038 (modified[i] != RELOAD_READ
4039 ? recog_data.operand[i] : 0),
4040 (modified[i] != RELOAD_WRITE
4041 ? recog_data.operand_loc[i] : 0),
4042 (modified[i] != RELOAD_READ
4043 ? recog_data.operand_loc[i] : 0),
4044 (enum reg_class) goal_alternative[i],
4045 (modified[i] == RELOAD_WRITE
4046 ? VOIDmode : operand_mode[i]),
4047 (modified[i] == RELOAD_READ
4048 ? VOIDmode : operand_mode[i]),
4049 (insn_code_number < 0 ? 0
4050 : insn_data[insn_code_number].operand[i].strict_low),
4051 0, i, operand_type[i]);
4053 /* In a matching pair of operands, one must be input only
4054 and the other must be output only.
4055 Pass the input operand as IN and the other as OUT. */
4056 else if (modified[i] == RELOAD_READ
4057 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4059 operand_reloadnum[i]
4060 = push_reload (recog_data.operand[i],
4061 recog_data.operand[goal_alternative_matched[i]],
4062 recog_data.operand_loc[i],
4063 recog_data.operand_loc[goal_alternative_matched[i]],
4064 (enum reg_class) goal_alternative[i],
4065 operand_mode[i],
4066 operand_mode[goal_alternative_matched[i]],
4067 0, 0, i, RELOAD_OTHER);
4068 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4070 else if (modified[i] == RELOAD_WRITE
4071 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4073 operand_reloadnum[goal_alternative_matched[i]]
4074 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4075 recog_data.operand[i],
4076 recog_data.operand_loc[goal_alternative_matched[i]],
4077 recog_data.operand_loc[i],
4078 (enum reg_class) goal_alternative[i],
4079 operand_mode[goal_alternative_matched[i]],
4080 operand_mode[i],
4081 0, 0, i, RELOAD_OTHER);
4082 operand_reloadnum[i] = output_reloadnum;
4084 else
4086 gcc_assert (insn_code_number < 0);
4087 error_for_asm (insn, "inconsistent operand constraints "
4088 "in an %<asm%>");
4089 /* Avoid further trouble with this insn. */
4090 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4091 n_reloads = 0;
4092 return 0;
4095 else if (goal_alternative_matched[i] < 0
4096 && goal_alternative_matches[i] < 0
4097 && address_operand_reloaded[i] != 1
4098 && optimize)
4100 /* For each non-matching operand that's a MEM or a pseudo-register
4101 that didn't get a hard register, make an optional reload.
4102 This may get done even if the insn needs no reloads otherwise. */
4104 rtx operand = recog_data.operand[i];
4106 while (GET_CODE (operand) == SUBREG)
4107 operand = SUBREG_REG (operand);
4108 if ((MEM_P (operand)
4109 || (REG_P (operand)
4110 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4111 /* If this is only for an output, the optional reload would not
4112 actually cause us to use a register now, just note that
4113 something is stored here. */
4114 && (goal_alternative[i] != NO_REGS
4115 || modified[i] == RELOAD_WRITE)
4116 && ! no_input_reloads
4117 /* An optional output reload might allow to delete INSN later.
4118 We mustn't make in-out reloads on insns that are not permitted
4119 output reloads.
4120 If this is an asm, we can't delete it; we must not even call
4121 push_reload for an optional output reload in this case,
4122 because we can't be sure that the constraint allows a register,
4123 and push_reload verifies the constraints for asms. */
4124 && (modified[i] == RELOAD_READ
4125 || (! no_output_reloads && ! this_insn_is_asm)))
4126 operand_reloadnum[i]
4127 = push_reload ((modified[i] != RELOAD_WRITE
4128 ? recog_data.operand[i] : 0),
4129 (modified[i] != RELOAD_READ
4130 ? recog_data.operand[i] : 0),
4131 (modified[i] != RELOAD_WRITE
4132 ? recog_data.operand_loc[i] : 0),
4133 (modified[i] != RELOAD_READ
4134 ? recog_data.operand_loc[i] : 0),
4135 (enum reg_class) goal_alternative[i],
4136 (modified[i] == RELOAD_WRITE
4137 ? VOIDmode : operand_mode[i]),
4138 (modified[i] == RELOAD_READ
4139 ? VOIDmode : operand_mode[i]),
4140 (insn_code_number < 0 ? 0
4141 : insn_data[insn_code_number].operand[i].strict_low),
4142 1, i, operand_type[i]);
4143 /* If a memory reference remains (either as a MEM or a pseudo that
4144 did not get a hard register), yet we can't make an optional
4145 reload, check if this is actually a pseudo register reference;
4146 we then need to emit a USE and/or a CLOBBER so that reload
4147 inheritance will do the right thing. */
4148 else if (replace
4149 && (MEM_P (operand)
4150 || (REG_P (operand)
4151 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4152 && reg_renumber [REGNO (operand)] < 0)))
4154 operand = *recog_data.operand_loc[i];
4156 while (GET_CODE (operand) == SUBREG)
4157 operand = SUBREG_REG (operand);
4158 if (REG_P (operand))
4160 if (modified[i] != RELOAD_WRITE)
4161 /* We mark the USE with QImode so that we recognize
4162 it as one that can be safely deleted at the end
4163 of reload. */
4164 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4165 insn), QImode);
4166 if (modified[i] != RELOAD_READ)
4167 emit_insn_after (gen_clobber (operand), insn);
4171 else if (goal_alternative_matches[i] >= 0
4172 && goal_alternative_win[goal_alternative_matches[i]]
4173 && modified[i] == RELOAD_READ
4174 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4175 && ! no_input_reloads && ! no_output_reloads
4176 && optimize)
4178 /* Similarly, make an optional reload for a pair of matching
4179 objects that are in MEM or a pseudo that didn't get a hard reg. */
4181 rtx operand = recog_data.operand[i];
4183 while (GET_CODE (operand) == SUBREG)
4184 operand = SUBREG_REG (operand);
4185 if ((MEM_P (operand)
4186 || (REG_P (operand)
4187 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4188 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4189 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4190 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4191 recog_data.operand[i],
4192 recog_data.operand_loc[goal_alternative_matches[i]],
4193 recog_data.operand_loc[i],
4194 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4195 operand_mode[goal_alternative_matches[i]],
4196 operand_mode[i],
4197 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4200 /* Perform whatever substitutions on the operands we are supposed
4201 to make due to commutativity or replacement of registers
4202 with equivalent constants or memory slots. */
4204 for (i = 0; i < noperands; i++)
4206 /* We only do this on the last pass through reload, because it is
4207 possible for some data (like reg_equiv_address) to be changed during
4208 later passes. Moreover, we lose the opportunity to get a useful
4209 reload_{in,out}_reg when we do these replacements. */
4211 if (replace)
4213 rtx substitution = substed_operand[i];
4215 *recog_data.operand_loc[i] = substitution;
4217 /* If we're replacing an operand with a LABEL_REF, we need to
4218 make sure that there's a REG_LABEL_OPERAND note attached to
4219 this instruction. */
4220 if (GET_CODE (substitution) == LABEL_REF
4221 && !find_reg_note (insn, REG_LABEL_OPERAND,
4222 LABEL_REF_LABEL (substitution))
4223 /* For a JUMP_P, if it was a branch target it must have
4224 already been recorded as such. */
4225 && (!JUMP_P (insn)
4226 || !label_is_jump_target_p (LABEL_REF_LABEL (substitution),
4227 insn)))
4229 add_reg_note (insn, REG_LABEL_OPERAND,
4230 LABEL_REF_LABEL (substitution));
4231 if (LABEL_P (LABEL_REF_LABEL (substitution)))
4232 ++LABEL_NUSES (LABEL_REF_LABEL (substitution));
4236 else
4237 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4240 /* If this insn pattern contains any MATCH_DUP's, make sure that
4241 they will be substituted if the operands they match are substituted.
4242 Also do now any substitutions we already did on the operands.
4244 Don't do this if we aren't making replacements because we might be
4245 propagating things allocated by frame pointer elimination into places
4246 it doesn't expect. */
4248 if (insn_code_number >= 0 && replace)
4249 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4251 int opno = recog_data.dup_num[i];
4252 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4253 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4256 #if 0
4257 /* This loses because reloading of prior insns can invalidate the equivalence
4258 (or at least find_equiv_reg isn't smart enough to find it any more),
4259 causing this insn to need more reload regs than it needed before.
4260 It may be too late to make the reload regs available.
4261 Now this optimization is done safely in choose_reload_regs. */
4263 /* For each reload of a reg into some other class of reg,
4264 search for an existing equivalent reg (same value now) in the right class.
4265 We can use it as long as we don't need to change its contents. */
4266 for (i = 0; i < n_reloads; i++)
4267 if (rld[i].reg_rtx == 0
4268 && rld[i].in != 0
4269 && REG_P (rld[i].in)
4270 && rld[i].out == 0)
4272 rld[i].reg_rtx
4273 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4274 static_reload_reg_p, 0, rld[i].inmode);
4275 /* Prevent generation of insn to load the value
4276 because the one we found already has the value. */
4277 if (rld[i].reg_rtx)
4278 rld[i].in = rld[i].reg_rtx;
4280 #endif
4282 /* If we detected error and replaced asm instruction by USE, forget about the
4283 reloads. */
4284 if (GET_CODE (PATTERN (insn)) == USE
4285 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4286 n_reloads = 0;
4288 /* Perhaps an output reload can be combined with another
4289 to reduce needs by one. */
4290 if (!goal_earlyclobber)
4291 combine_reloads ();
4293 /* If we have a pair of reloads for parts of an address, they are reloading
4294 the same object, the operands themselves were not reloaded, and they
4295 are for two operands that are supposed to match, merge the reloads and
4296 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4298 for (i = 0; i < n_reloads; i++)
4300 int k;
4302 for (j = i + 1; j < n_reloads; j++)
4303 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4304 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4305 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4306 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4307 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4308 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4309 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4310 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4311 && rtx_equal_p (rld[i].in, rld[j].in)
4312 && (operand_reloadnum[rld[i].opnum] < 0
4313 || rld[operand_reloadnum[rld[i].opnum]].optional)
4314 && (operand_reloadnum[rld[j].opnum] < 0
4315 || rld[operand_reloadnum[rld[j].opnum]].optional)
4316 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4317 || (goal_alternative_matches[rld[j].opnum]
4318 == rld[i].opnum)))
4320 for (k = 0; k < n_replacements; k++)
4321 if (replacements[k].what == j)
4322 replacements[k].what = i;
4324 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4325 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4326 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4327 else
4328 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4329 rld[j].in = 0;
4333 /* Scan all the reloads and update their type.
4334 If a reload is for the address of an operand and we didn't reload
4335 that operand, change the type. Similarly, change the operand number
4336 of a reload when two operands match. If a reload is optional, treat it
4337 as though the operand isn't reloaded.
4339 ??? This latter case is somewhat odd because if we do the optional
4340 reload, it means the object is hanging around. Thus we need only
4341 do the address reload if the optional reload was NOT done.
4343 Change secondary reloads to be the address type of their operand, not
4344 the normal type.
4346 If an operand's reload is now RELOAD_OTHER, change any
4347 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4348 RELOAD_FOR_OTHER_ADDRESS. */
4350 for (i = 0; i < n_reloads; i++)
4352 if (rld[i].secondary_p
4353 && rld[i].when_needed == operand_type[rld[i].opnum])
4354 rld[i].when_needed = address_type[rld[i].opnum];
4356 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4357 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4358 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4359 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4360 && (operand_reloadnum[rld[i].opnum] < 0
4361 || rld[operand_reloadnum[rld[i].opnum]].optional))
4363 /* If we have a secondary reload to go along with this reload,
4364 change its type to RELOAD_FOR_OPADDR_ADDR. */
4366 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4367 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4368 && rld[i].secondary_in_reload != -1)
4370 int secondary_in_reload = rld[i].secondary_in_reload;
4372 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4374 /* If there's a tertiary reload we have to change it also. */
4375 if (secondary_in_reload > 0
4376 && rld[secondary_in_reload].secondary_in_reload != -1)
4377 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4378 = RELOAD_FOR_OPADDR_ADDR;
4381 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4382 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4383 && rld[i].secondary_out_reload != -1)
4385 int secondary_out_reload = rld[i].secondary_out_reload;
4387 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4389 /* If there's a tertiary reload we have to change it also. */
4390 if (secondary_out_reload
4391 && rld[secondary_out_reload].secondary_out_reload != -1)
4392 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4393 = RELOAD_FOR_OPADDR_ADDR;
4396 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4397 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4398 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4399 else
4400 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4403 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4404 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4405 && operand_reloadnum[rld[i].opnum] >= 0
4406 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4407 == RELOAD_OTHER))
4408 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4410 if (goal_alternative_matches[rld[i].opnum] >= 0)
4411 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4414 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4415 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4416 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4418 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4419 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4420 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4421 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4422 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4423 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4424 This is complicated by the fact that a single operand can have more
4425 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4426 choose_reload_regs without affecting code quality, and cases that
4427 actually fail are extremely rare, so it turns out to be better to fix
4428 the problem here by not generating cases that choose_reload_regs will
4429 fail for. */
4430 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4431 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4432 a single operand.
4433 We can reduce the register pressure by exploiting that a
4434 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4435 does not conflict with any of them, if it is only used for the first of
4436 the RELOAD_FOR_X_ADDRESS reloads. */
4438 int first_op_addr_num = -2;
4439 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4440 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4441 int need_change = 0;
4442 /* We use last_op_addr_reload and the contents of the above arrays
4443 first as flags - -2 means no instance encountered, -1 means exactly
4444 one instance encountered.
4445 If more than one instance has been encountered, we store the reload
4446 number of the first reload of the kind in question; reload numbers
4447 are known to be non-negative. */
4448 for (i = 0; i < noperands; i++)
4449 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4450 for (i = n_reloads - 1; i >= 0; i--)
4452 switch (rld[i].when_needed)
4454 case RELOAD_FOR_OPERAND_ADDRESS:
4455 if (++first_op_addr_num >= 0)
4457 first_op_addr_num = i;
4458 need_change = 1;
4460 break;
4461 case RELOAD_FOR_INPUT_ADDRESS:
4462 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4464 first_inpaddr_num[rld[i].opnum] = i;
4465 need_change = 1;
4467 break;
4468 case RELOAD_FOR_OUTPUT_ADDRESS:
4469 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4471 first_outpaddr_num[rld[i].opnum] = i;
4472 need_change = 1;
4474 break;
4475 default:
4476 break;
4480 if (need_change)
4482 for (i = 0; i < n_reloads; i++)
4484 int first_num;
4485 enum reload_type type;
4487 switch (rld[i].when_needed)
4489 case RELOAD_FOR_OPADDR_ADDR:
4490 first_num = first_op_addr_num;
4491 type = RELOAD_FOR_OPERAND_ADDRESS;
4492 break;
4493 case RELOAD_FOR_INPADDR_ADDRESS:
4494 first_num = first_inpaddr_num[rld[i].opnum];
4495 type = RELOAD_FOR_INPUT_ADDRESS;
4496 break;
4497 case RELOAD_FOR_OUTADDR_ADDRESS:
4498 first_num = first_outpaddr_num[rld[i].opnum];
4499 type = RELOAD_FOR_OUTPUT_ADDRESS;
4500 break;
4501 default:
4502 continue;
4504 if (first_num < 0)
4505 continue;
4506 else if (i > first_num)
4507 rld[i].when_needed = type;
4508 else
4510 /* Check if the only TYPE reload that uses reload I is
4511 reload FIRST_NUM. */
4512 for (j = n_reloads - 1; j > first_num; j--)
4514 if (rld[j].when_needed == type
4515 && (rld[i].secondary_p
4516 ? rld[j].secondary_in_reload == i
4517 : reg_mentioned_p (rld[i].in, rld[j].in)))
4519 rld[i].when_needed = type;
4520 break;
4528 /* See if we have any reloads that are now allowed to be merged
4529 because we've changed when the reload is needed to
4530 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4531 check for the most common cases. */
4533 for (i = 0; i < n_reloads; i++)
4534 if (rld[i].in != 0 && rld[i].out == 0
4535 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4536 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4537 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4538 for (j = 0; j < n_reloads; j++)
4539 if (i != j && rld[j].in != 0 && rld[j].out == 0
4540 && rld[j].when_needed == rld[i].when_needed
4541 && MATCHES (rld[i].in, rld[j].in)
4542 && rld[i].rclass == rld[j].rclass
4543 && !rld[i].nocombine && !rld[j].nocombine
4544 && rld[i].reg_rtx == rld[j].reg_rtx)
4546 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4547 transfer_replacements (i, j);
4548 rld[j].in = 0;
4551 /* If we made any reloads for addresses, see if they violate a
4552 "no input reloads" requirement for this insn. But loads that we
4553 do after the insn (such as for output addresses) are fine. */
4554 if (HAVE_cc0 && no_input_reloads)
4555 for (i = 0; i < n_reloads; i++)
4556 gcc_assert (rld[i].in == 0
4557 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4558 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4560 /* Compute reload_mode and reload_nregs. */
4561 for (i = 0; i < n_reloads; i++)
4563 rld[i].mode
4564 = (rld[i].inmode == VOIDmode
4565 || (GET_MODE_SIZE (rld[i].outmode)
4566 > GET_MODE_SIZE (rld[i].inmode)))
4567 ? rld[i].outmode : rld[i].inmode;
4569 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4572 /* Special case a simple move with an input reload and a
4573 destination of a hard reg, if the hard reg is ok, use it. */
4574 for (i = 0; i < n_reloads; i++)
4575 if (rld[i].when_needed == RELOAD_FOR_INPUT
4576 && GET_CODE (PATTERN (insn)) == SET
4577 && REG_P (SET_DEST (PATTERN (insn)))
4578 && (SET_SRC (PATTERN (insn)) == rld[i].in
4579 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4580 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4582 rtx dest = SET_DEST (PATTERN (insn));
4583 unsigned int regno = REGNO (dest);
4585 if (regno < FIRST_PSEUDO_REGISTER
4586 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4587 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4589 int nr = hard_regno_nregs[regno][rld[i].mode];
4590 int ok = 1, nri;
4592 for (nri = 1; nri < nr; nri ++)
4593 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4595 ok = 0;
4596 break;
4599 if (ok)
4600 rld[i].reg_rtx = dest;
4604 return retval;
4607 /* Return true if alternative number ALTNUM in constraint-string
4608 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4609 MEM gives the reference if its address hasn't been fully reloaded,
4610 otherwise it is NULL. */
4612 static bool
4613 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4614 const char *constraint, int altnum)
4616 int c;
4618 /* Skip alternatives before the one requested. */
4619 while (altnum > 0)
4621 while (*constraint++ != ',')
4623 altnum--;
4625 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4626 If one of them is present, this alternative accepts the result of
4627 passing a constant-pool reference through find_reloads_toplev.
4629 The same is true of extra memory constraints if the address
4630 was reloaded into a register. However, the target may elect
4631 to disallow the original constant address, forcing it to be
4632 reloaded into a register instead. */
4633 for (; (c = *constraint) && c != ',' && c != '#';
4634 constraint += CONSTRAINT_LEN (c, constraint))
4636 enum constraint_num cn = lookup_constraint (constraint);
4637 if (insn_extra_memory_constraint (cn)
4638 && (mem == NULL || constraint_satisfied_p (mem, cn)))
4639 return true;
4641 return false;
4644 /* Scan X for memory references and scan the addresses for reloading.
4645 Also checks for references to "constant" regs that we want to eliminate
4646 and replaces them with the values they stand for.
4647 We may alter X destructively if it contains a reference to such.
4648 If X is just a constant reg, we return the equivalent value
4649 instead of X.
4651 IND_LEVELS says how many levels of indirect addressing this machine
4652 supports.
4654 OPNUM and TYPE identify the purpose of the reload.
4656 IS_SET_DEST is true if X is the destination of a SET, which is not
4657 appropriate to be replaced by a constant.
4659 INSN, if nonzero, is the insn in which we do the reload. It is used
4660 to determine if we may generate output reloads, and where to put USEs
4661 for pseudos that we have to replace with stack slots.
4663 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4664 result of find_reloads_address. */
4666 static rtx
4667 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4668 int ind_levels, int is_set_dest, rtx_insn *insn,
4669 int *address_reloaded)
4671 RTX_CODE code = GET_CODE (x);
4673 const char *fmt = GET_RTX_FORMAT (code);
4674 int i;
4675 int copied;
4677 if (code == REG)
4679 /* This code is duplicated for speed in find_reloads. */
4680 int regno = REGNO (x);
4681 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4682 x = reg_equiv_constant (regno);
4683 #if 0
4684 /* This creates (subreg (mem...)) which would cause an unnecessary
4685 reload of the mem. */
4686 else if (reg_equiv_mem (regno) != 0)
4687 x = reg_equiv_mem (regno);
4688 #endif
4689 else if (reg_equiv_memory_loc (regno)
4690 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4692 rtx mem = make_memloc (x, regno);
4693 if (reg_equiv_address (regno)
4694 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4696 /* If this is not a toplevel operand, find_reloads doesn't see
4697 this substitution. We have to emit a USE of the pseudo so
4698 that delete_output_reload can see it. */
4699 if (replace_reloads && recog_data.operand[opnum] != x)
4700 /* We mark the USE with QImode so that we recognize it
4701 as one that can be safely deleted at the end of
4702 reload. */
4703 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4704 QImode);
4705 x = mem;
4706 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4707 opnum, type, ind_levels, insn);
4708 if (!rtx_equal_p (x, mem))
4709 push_reg_equiv_alt_mem (regno, x);
4710 if (address_reloaded)
4711 *address_reloaded = i;
4714 return x;
4716 if (code == MEM)
4718 rtx tem = x;
4720 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4721 opnum, type, ind_levels, insn);
4722 if (address_reloaded)
4723 *address_reloaded = i;
4725 return tem;
4728 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4730 /* Check for SUBREG containing a REG that's equivalent to a
4731 constant. If the constant has a known value, truncate it
4732 right now. Similarly if we are extracting a single-word of a
4733 multi-word constant. If the constant is symbolic, allow it
4734 to be substituted normally. push_reload will strip the
4735 subreg later. The constant must not be VOIDmode, because we
4736 will lose the mode of the register (this should never happen
4737 because one of the cases above should handle it). */
4739 int regno = REGNO (SUBREG_REG (x));
4740 rtx tem;
4742 if (regno >= FIRST_PSEUDO_REGISTER
4743 && reg_renumber[regno] < 0
4744 && reg_equiv_constant (regno) != 0)
4746 tem =
4747 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4748 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4749 gcc_assert (tem);
4750 if (CONSTANT_P (tem)
4751 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4753 tem = force_const_mem (GET_MODE (x), tem);
4754 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4755 &XEXP (tem, 0), opnum, type,
4756 ind_levels, insn);
4757 if (address_reloaded)
4758 *address_reloaded = i;
4760 return tem;
4763 /* If the subreg contains a reg that will be converted to a mem,
4764 attempt to convert the whole subreg to a (narrower or wider)
4765 memory reference instead. If this succeeds, we're done --
4766 otherwise fall through to check whether the inner reg still
4767 needs address reloads anyway. */
4769 if (regno >= FIRST_PSEUDO_REGISTER
4770 && reg_equiv_memory_loc (regno) != 0)
4772 tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4773 insn, address_reloaded);
4774 if (tem)
4775 return tem;
4779 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4781 if (fmt[i] == 'e')
4783 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4784 ind_levels, is_set_dest, insn,
4785 address_reloaded);
4786 /* If we have replaced a reg with it's equivalent memory loc -
4787 that can still be handled here e.g. if it's in a paradoxical
4788 subreg - we must make the change in a copy, rather than using
4789 a destructive change. This way, find_reloads can still elect
4790 not to do the change. */
4791 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4793 x = shallow_copy_rtx (x);
4794 copied = 1;
4796 XEXP (x, i) = new_part;
4799 return x;
4802 /* Return a mem ref for the memory equivalent of reg REGNO.
4803 This mem ref is not shared with anything. */
4805 static rtx
4806 make_memloc (rtx ad, int regno)
4808 /* We must rerun eliminate_regs, in case the elimination
4809 offsets have changed. */
4810 rtx tem
4811 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4814 /* If TEM might contain a pseudo, we must copy it to avoid
4815 modifying it when we do the substitution for the reload. */
4816 if (rtx_varies_p (tem, 0))
4817 tem = copy_rtx (tem);
4819 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4820 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4822 /* Copy the result if it's still the same as the equivalence, to avoid
4823 modifying it when we do the substitution for the reload. */
4824 if (tem == reg_equiv_memory_loc (regno))
4825 tem = copy_rtx (tem);
4826 return tem;
4829 /* Returns true if AD could be turned into a valid memory reference
4830 to mode MODE in address space AS by reloading the part pointed to
4831 by PART into a register. */
4833 static int
4834 maybe_memory_address_addr_space_p (machine_mode mode, rtx ad,
4835 addr_space_t as, rtx *part)
4837 int retv;
4838 rtx tem = *part;
4839 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4841 *part = reg;
4842 retv = memory_address_addr_space_p (mode, ad, as);
4843 *part = tem;
4845 return retv;
4848 /* Record all reloads needed for handling memory address AD
4849 which appears in *LOC in a memory reference to mode MODE
4850 which itself is found in location *MEMREFLOC.
4851 Note that we take shortcuts assuming that no multi-reg machine mode
4852 occurs as part of an address.
4854 OPNUM and TYPE specify the purpose of this reload.
4856 IND_LEVELS says how many levels of indirect addressing this machine
4857 supports.
4859 INSN, if nonzero, is the insn in which we do the reload. It is used
4860 to determine if we may generate output reloads, and where to put USEs
4861 for pseudos that we have to replace with stack slots.
4863 Value is one if this address is reloaded or replaced as a whole; it is
4864 zero if the top level of this address was not reloaded or replaced, and
4865 it is -1 if it may or may not have been reloaded or replaced.
4867 Note that there is no verification that the address will be valid after
4868 this routine does its work. Instead, we rely on the fact that the address
4869 was valid when reload started. So we need only undo things that reload
4870 could have broken. These are wrong register types, pseudos not allocated
4871 to a hard register, and frame pointer elimination. */
4873 static int
4874 find_reloads_address (machine_mode mode, rtx *memrefloc, rtx ad,
4875 rtx *loc, int opnum, enum reload_type type,
4876 int ind_levels, rtx_insn *insn)
4878 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4879 : ADDR_SPACE_GENERIC;
4880 int regno;
4881 int removed_and = 0;
4882 int op_index;
4883 rtx tem;
4885 /* If the address is a register, see if it is a legitimate address and
4886 reload if not. We first handle the cases where we need not reload
4887 or where we must reload in a non-standard way. */
4889 if (REG_P (ad))
4891 regno = REGNO (ad);
4893 if (reg_equiv_constant (regno) != 0)
4895 find_reloads_address_part (reg_equiv_constant (regno), loc,
4896 base_reg_class (mode, as, MEM, SCRATCH),
4897 GET_MODE (ad), opnum, type, ind_levels);
4898 return 1;
4901 tem = reg_equiv_memory_loc (regno);
4902 if (tem != 0)
4904 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4906 tem = make_memloc (ad, regno);
4907 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4908 XEXP (tem, 0),
4909 MEM_ADDR_SPACE (tem)))
4911 rtx orig = tem;
4913 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4914 &XEXP (tem, 0), opnum,
4915 ADDR_TYPE (type), ind_levels, insn);
4916 if (!rtx_equal_p (tem, orig))
4917 push_reg_equiv_alt_mem (regno, tem);
4919 /* We can avoid a reload if the register's equivalent memory
4920 expression is valid as an indirect memory address.
4921 But not all addresses are valid in a mem used as an indirect
4922 address: only reg or reg+constant. */
4924 if (ind_levels > 0
4925 && strict_memory_address_addr_space_p (mode, tem, as)
4926 && (REG_P (XEXP (tem, 0))
4927 || (GET_CODE (XEXP (tem, 0)) == PLUS
4928 && REG_P (XEXP (XEXP (tem, 0), 0))
4929 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4931 /* TEM is not the same as what we'll be replacing the
4932 pseudo with after reload, put a USE in front of INSN
4933 in the final reload pass. */
4934 if (replace_reloads
4935 && num_not_at_initial_offset
4936 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4938 *loc = tem;
4939 /* We mark the USE with QImode so that we
4940 recognize it as one that can be safely
4941 deleted at the end of reload. */
4942 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4943 insn), QImode);
4945 /* This doesn't really count as replacing the address
4946 as a whole, since it is still a memory access. */
4948 return 0;
4950 ad = tem;
4954 /* The only remaining case where we can avoid a reload is if this is a
4955 hard register that is valid as a base register and which is not the
4956 subject of a CLOBBER in this insn. */
4958 else if (regno < FIRST_PSEUDO_REGISTER
4959 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4960 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4961 return 0;
4963 /* If we do not have one of the cases above, we must do the reload. */
4964 push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4965 base_reg_class (mode, as, MEM, SCRATCH),
4966 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4967 return 1;
4970 if (strict_memory_address_addr_space_p (mode, ad, as))
4972 /* The address appears valid, so reloads are not needed.
4973 But the address may contain an eliminable register.
4974 This can happen because a machine with indirect addressing
4975 may consider a pseudo register by itself a valid address even when
4976 it has failed to get a hard reg.
4977 So do a tree-walk to find and eliminate all such regs. */
4979 /* But first quickly dispose of a common case. */
4980 if (GET_CODE (ad) == PLUS
4981 && CONST_INT_P (XEXP (ad, 1))
4982 && REG_P (XEXP (ad, 0))
4983 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4984 return 0;
4986 subst_reg_equivs_changed = 0;
4987 *loc = subst_reg_equivs (ad, insn);
4989 if (! subst_reg_equivs_changed)
4990 return 0;
4992 /* Check result for validity after substitution. */
4993 if (strict_memory_address_addr_space_p (mode, ad, as))
4994 return 0;
4997 #ifdef LEGITIMIZE_RELOAD_ADDRESS
5000 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
5002 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5003 ind_levels, win);
5005 break;
5006 win:
5007 *memrefloc = copy_rtx (*memrefloc);
5008 XEXP (*memrefloc, 0) = ad;
5009 move_replacements (&ad, &XEXP (*memrefloc, 0));
5010 return -1;
5012 while (0);
5013 #endif
5015 /* The address is not valid. We have to figure out why. First see if
5016 we have an outer AND and remove it if so. Then analyze what's inside. */
5018 if (GET_CODE (ad) == AND)
5020 removed_and = 1;
5021 loc = &XEXP (ad, 0);
5022 ad = *loc;
5025 /* One possibility for why the address is invalid is that it is itself
5026 a MEM. This can happen when the frame pointer is being eliminated, a
5027 pseudo is not allocated to a hard register, and the offset between the
5028 frame and stack pointers is not its initial value. In that case the
5029 pseudo will have been replaced by a MEM referring to the
5030 stack pointer. */
5031 if (MEM_P (ad))
5033 /* First ensure that the address in this MEM is valid. Then, unless
5034 indirect addresses are valid, reload the MEM into a register. */
5035 tem = ad;
5036 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5037 opnum, ADDR_TYPE (type),
5038 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5040 /* If tem was changed, then we must create a new memory reference to
5041 hold it and store it back into memrefloc. */
5042 if (tem != ad && memrefloc)
5044 *memrefloc = copy_rtx (*memrefloc);
5045 copy_replacements (tem, XEXP (*memrefloc, 0));
5046 loc = &XEXP (*memrefloc, 0);
5047 if (removed_and)
5048 loc = &XEXP (*loc, 0);
5051 /* Check similar cases as for indirect addresses as above except
5052 that we can allow pseudos and a MEM since they should have been
5053 taken care of above. */
5055 if (ind_levels == 0
5056 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5057 || MEM_P (XEXP (tem, 0))
5058 || ! (REG_P (XEXP (tem, 0))
5059 || (GET_CODE (XEXP (tem, 0)) == PLUS
5060 && REG_P (XEXP (XEXP (tem, 0), 0))
5061 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5063 /* Must use TEM here, not AD, since it is the one that will
5064 have any subexpressions reloaded, if needed. */
5065 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5066 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5067 VOIDmode, 0,
5068 0, opnum, type);
5069 return ! removed_and;
5071 else
5072 return 0;
5075 /* If we have address of a stack slot but it's not valid because the
5076 displacement is too large, compute the sum in a register.
5077 Handle all base registers here, not just fp/ap/sp, because on some
5078 targets (namely SH) we can also get too large displacements from
5079 big-endian corrections. */
5080 else if (GET_CODE (ad) == PLUS
5081 && REG_P (XEXP (ad, 0))
5082 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5083 && CONST_INT_P (XEXP (ad, 1))
5084 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5085 CONST_INT)
5086 /* Similarly, if we were to reload the base register and the
5087 mem+offset address is still invalid, then we want to reload
5088 the whole address, not just the base register. */
5089 || ! maybe_memory_address_addr_space_p
5090 (mode, ad, as, &(XEXP (ad, 0)))))
5093 /* Unshare the MEM rtx so we can safely alter it. */
5094 if (memrefloc)
5096 *memrefloc = copy_rtx (*memrefloc);
5097 loc = &XEXP (*memrefloc, 0);
5098 if (removed_and)
5099 loc = &XEXP (*loc, 0);
5102 if (double_reg_address_ok
5103 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5104 PLUS, CONST_INT))
5106 /* Unshare the sum as well. */
5107 *loc = ad = copy_rtx (ad);
5109 /* Reload the displacement into an index reg.
5110 We assume the frame pointer or arg pointer is a base reg. */
5111 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5112 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5113 type, ind_levels);
5114 return 0;
5116 else
5118 /* If the sum of two regs is not necessarily valid,
5119 reload the sum into a base reg.
5120 That will at least work. */
5121 find_reloads_address_part (ad, loc,
5122 base_reg_class (mode, as, MEM, SCRATCH),
5123 GET_MODE (ad), opnum, type, ind_levels);
5125 return ! removed_and;
5128 /* If we have an indexed stack slot, there are three possible reasons why
5129 it might be invalid: The index might need to be reloaded, the address
5130 might have been made by frame pointer elimination and hence have a
5131 constant out of range, or both reasons might apply.
5133 We can easily check for an index needing reload, but even if that is the
5134 case, we might also have an invalid constant. To avoid making the
5135 conservative assumption and requiring two reloads, we see if this address
5136 is valid when not interpreted strictly. If it is, the only problem is
5137 that the index needs a reload and find_reloads_address_1 will take care
5138 of it.
5140 Handle all base registers here, not just fp/ap/sp, because on some
5141 targets (namely SPARC) we can also get invalid addresses from preventive
5142 subreg big-endian corrections made by find_reloads_toplev. We
5143 can also get expressions involving LO_SUM (rather than PLUS) from
5144 find_reloads_subreg_address.
5146 If we decide to do something, it must be that `double_reg_address_ok'
5147 is true. We generate a reload of the base register + constant and
5148 rework the sum so that the reload register will be added to the index.
5149 This is safe because we know the address isn't shared.
5151 We check for the base register as both the first and second operand of
5152 the innermost PLUS and/or LO_SUM. */
5154 for (op_index = 0; op_index < 2; ++op_index)
5156 rtx operand, addend;
5157 enum rtx_code inner_code;
5159 if (GET_CODE (ad) != PLUS)
5160 continue;
5162 inner_code = GET_CODE (XEXP (ad, 0));
5163 if (!(GET_CODE (ad) == PLUS
5164 && CONST_INT_P (XEXP (ad, 1))
5165 && (inner_code == PLUS || inner_code == LO_SUM)))
5166 continue;
5168 operand = XEXP (XEXP (ad, 0), op_index);
5169 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5170 continue;
5172 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5174 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5175 GET_CODE (addend))
5176 || operand == frame_pointer_rtx
5177 || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
5178 && operand == hard_frame_pointer_rtx)
5179 || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5180 && operand == arg_pointer_rtx)
5181 || operand == stack_pointer_rtx)
5182 && ! maybe_memory_address_addr_space_p
5183 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5185 rtx offset_reg;
5186 enum reg_class cls;
5188 offset_reg = plus_constant (GET_MODE (ad), operand,
5189 INTVAL (XEXP (ad, 1)));
5191 /* Form the adjusted address. */
5192 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5193 ad = gen_rtx_PLUS (GET_MODE (ad),
5194 op_index == 0 ? offset_reg : addend,
5195 op_index == 0 ? addend : offset_reg);
5196 else
5197 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5198 op_index == 0 ? offset_reg : addend,
5199 op_index == 0 ? addend : offset_reg);
5200 *loc = ad;
5202 cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5203 find_reloads_address_part (XEXP (ad, op_index),
5204 &XEXP (ad, op_index), cls,
5205 GET_MODE (ad), opnum, type, ind_levels);
5206 find_reloads_address_1 (mode, as,
5207 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5208 GET_CODE (XEXP (ad, op_index)),
5209 &XEXP (ad, 1 - op_index), opnum,
5210 type, 0, insn);
5212 return 0;
5216 /* See if address becomes valid when an eliminable register
5217 in a sum is replaced. */
5219 tem = ad;
5220 if (GET_CODE (ad) == PLUS)
5221 tem = subst_indexed_address (ad);
5222 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5224 /* Ok, we win that way. Replace any additional eliminable
5225 registers. */
5227 subst_reg_equivs_changed = 0;
5228 tem = subst_reg_equivs (tem, insn);
5230 /* Make sure that didn't make the address invalid again. */
5232 if (! subst_reg_equivs_changed
5233 || strict_memory_address_addr_space_p (mode, tem, as))
5235 *loc = tem;
5236 return 0;
5240 /* If constants aren't valid addresses, reload the constant address
5241 into a register. */
5242 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5244 machine_mode address_mode = GET_MODE (ad);
5245 if (address_mode == VOIDmode)
5246 address_mode = targetm.addr_space.address_mode (as);
5248 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5249 Unshare it so we can safely alter it. */
5250 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5251 && CONSTANT_POOL_ADDRESS_P (ad))
5253 *memrefloc = copy_rtx (*memrefloc);
5254 loc = &XEXP (*memrefloc, 0);
5255 if (removed_and)
5256 loc = &XEXP (*loc, 0);
5259 find_reloads_address_part (ad, loc,
5260 base_reg_class (mode, as, MEM, SCRATCH),
5261 address_mode, opnum, type, ind_levels);
5262 return ! removed_and;
5265 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5266 opnum, type, ind_levels, insn);
5269 /* Find all pseudo regs appearing in AD
5270 that are eliminable in favor of equivalent values
5271 and do not have hard regs; replace them by their equivalents.
5272 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5273 front of it for pseudos that we have to replace with stack slots. */
5275 static rtx
5276 subst_reg_equivs (rtx ad, rtx_insn *insn)
5278 RTX_CODE code = GET_CODE (ad);
5279 int i;
5280 const char *fmt;
5282 switch (code)
5284 case HIGH:
5285 case CONST:
5286 CASE_CONST_ANY:
5287 case SYMBOL_REF:
5288 case LABEL_REF:
5289 case PC:
5290 case CC0:
5291 return ad;
5293 case REG:
5295 int regno = REGNO (ad);
5297 if (reg_equiv_constant (regno) != 0)
5299 subst_reg_equivs_changed = 1;
5300 return reg_equiv_constant (regno);
5302 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5304 rtx mem = make_memloc (ad, regno);
5305 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5307 subst_reg_equivs_changed = 1;
5308 /* We mark the USE with QImode so that we recognize it
5309 as one that can be safely deleted at the end of
5310 reload. */
5311 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5312 QImode);
5313 return mem;
5317 return ad;
5319 case PLUS:
5320 /* Quickly dispose of a common case. */
5321 if (XEXP (ad, 0) == frame_pointer_rtx
5322 && CONST_INT_P (XEXP (ad, 1)))
5323 return ad;
5324 break;
5326 default:
5327 break;
5330 fmt = GET_RTX_FORMAT (code);
5331 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5332 if (fmt[i] == 'e')
5333 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5334 return ad;
5337 /* Compute the sum of X and Y, making canonicalizations assumed in an
5338 address, namely: sum constant integers, surround the sum of two
5339 constants with a CONST, put the constant as the second operand, and
5340 group the constant on the outermost sum.
5342 This routine assumes both inputs are already in canonical form. */
5345 form_sum (machine_mode mode, rtx x, rtx y)
5347 rtx tem;
5349 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5350 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5352 if (CONST_INT_P (x))
5353 return plus_constant (mode, y, INTVAL (x));
5354 else if (CONST_INT_P (y))
5355 return plus_constant (mode, x, INTVAL (y));
5356 else if (CONSTANT_P (x))
5357 tem = x, x = y, y = tem;
5359 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5360 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5362 /* Note that if the operands of Y are specified in the opposite
5363 order in the recursive calls below, infinite recursion will occur. */
5364 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5365 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5367 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5368 constant will have been placed second. */
5369 if (CONSTANT_P (x) && CONSTANT_P (y))
5371 if (GET_CODE (x) == CONST)
5372 x = XEXP (x, 0);
5373 if (GET_CODE (y) == CONST)
5374 y = XEXP (y, 0);
5376 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5379 return gen_rtx_PLUS (mode, x, y);
5382 /* If ADDR is a sum containing a pseudo register that should be
5383 replaced with a constant (from reg_equiv_constant),
5384 return the result of doing so, and also apply the associative
5385 law so that the result is more likely to be a valid address.
5386 (But it is not guaranteed to be one.)
5388 Note that at most one register is replaced, even if more are
5389 replaceable. Also, we try to put the result into a canonical form
5390 so it is more likely to be a valid address.
5392 In all other cases, return ADDR. */
5394 static rtx
5395 subst_indexed_address (rtx addr)
5397 rtx op0 = 0, op1 = 0, op2 = 0;
5398 rtx tem;
5399 int regno;
5401 if (GET_CODE (addr) == PLUS)
5403 /* Try to find a register to replace. */
5404 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5405 if (REG_P (op0)
5406 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5407 && reg_renumber[regno] < 0
5408 && reg_equiv_constant (regno) != 0)
5409 op0 = reg_equiv_constant (regno);
5410 else if (REG_P (op1)
5411 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5412 && reg_renumber[regno] < 0
5413 && reg_equiv_constant (regno) != 0)
5414 op1 = reg_equiv_constant (regno);
5415 else if (GET_CODE (op0) == PLUS
5416 && (tem = subst_indexed_address (op0)) != op0)
5417 op0 = tem;
5418 else if (GET_CODE (op1) == PLUS
5419 && (tem = subst_indexed_address (op1)) != op1)
5420 op1 = tem;
5421 else
5422 return addr;
5424 /* Pick out up to three things to add. */
5425 if (GET_CODE (op1) == PLUS)
5426 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5427 else if (GET_CODE (op0) == PLUS)
5428 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5430 /* Compute the sum. */
5431 if (op2 != 0)
5432 op1 = form_sum (GET_MODE (addr), op1, op2);
5433 if (op1 != 0)
5434 op0 = form_sum (GET_MODE (addr), op0, op1);
5436 return op0;
5438 return addr;
5441 /* Update the REG_INC notes for an insn. It updates all REG_INC
5442 notes for the instruction which refer to REGNO the to refer
5443 to the reload number.
5445 INSN is the insn for which any REG_INC notes need updating.
5447 REGNO is the register number which has been reloaded.
5449 RELOADNUM is the reload number. */
5451 static void
5452 update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5453 int reloadnum ATTRIBUTE_UNUSED)
5455 if (!AUTO_INC_DEC)
5456 return;
5458 for (rtx link = REG_NOTES (insn); link; link = XEXP (link, 1))
5459 if (REG_NOTE_KIND (link) == REG_INC
5460 && (int) REGNO (XEXP (link, 0)) == regno)
5461 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5464 /* Record the pseudo registers we must reload into hard registers in a
5465 subexpression of a would-be memory address, X referring to a value
5466 in mode MODE. (This function is not called if the address we find
5467 is strictly valid.)
5469 CONTEXT = 1 means we are considering regs as index regs,
5470 = 0 means we are considering them as base regs.
5471 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5472 or an autoinc code.
5473 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5474 is the code of the index part of the address. Otherwise, pass SCRATCH
5475 for this argument.
5476 OPNUM and TYPE specify the purpose of any reloads made.
5478 IND_LEVELS says how many levels of indirect addressing are
5479 supported at this point in the address.
5481 INSN, if nonzero, is the insn in which we do the reload. It is used
5482 to determine if we may generate output reloads.
5484 We return nonzero if X, as a whole, is reloaded or replaced. */
5486 /* Note that we take shortcuts assuming that no multi-reg machine mode
5487 occurs as part of an address.
5488 Also, this is not fully machine-customizable; it works for machines
5489 such as VAXen and 68000's and 32000's, but other possible machines
5490 could have addressing modes that this does not handle right.
5491 If you add push_reload calls here, you need to make sure gen_reload
5492 handles those cases gracefully. */
5494 static int
5495 find_reloads_address_1 (machine_mode mode, addr_space_t as,
5496 rtx x, int context,
5497 enum rtx_code outer_code, enum rtx_code index_code,
5498 rtx *loc, int opnum, enum reload_type type,
5499 int ind_levels, rtx_insn *insn)
5501 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5502 ((CONTEXT) == 0 \
5503 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5504 : REGNO_OK_FOR_INDEX_P (REGNO))
5506 enum reg_class context_reg_class;
5507 RTX_CODE code = GET_CODE (x);
5508 bool reloaded_inner_of_autoinc = false;
5510 if (context == 1)
5511 context_reg_class = INDEX_REG_CLASS;
5512 else
5513 context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5515 switch (code)
5517 case PLUS:
5519 rtx orig_op0 = XEXP (x, 0);
5520 rtx orig_op1 = XEXP (x, 1);
5521 RTX_CODE code0 = GET_CODE (orig_op0);
5522 RTX_CODE code1 = GET_CODE (orig_op1);
5523 rtx op0 = orig_op0;
5524 rtx op1 = orig_op1;
5526 if (GET_CODE (op0) == SUBREG)
5528 op0 = SUBREG_REG (op0);
5529 code0 = GET_CODE (op0);
5530 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5531 op0 = gen_rtx_REG (word_mode,
5532 (REGNO (op0) +
5533 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5534 GET_MODE (SUBREG_REG (orig_op0)),
5535 SUBREG_BYTE (orig_op0),
5536 GET_MODE (orig_op0))));
5539 if (GET_CODE (op1) == SUBREG)
5541 op1 = SUBREG_REG (op1);
5542 code1 = GET_CODE (op1);
5543 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5544 /* ??? Why is this given op1's mode and above for
5545 ??? op0 SUBREGs we use word_mode? */
5546 op1 = gen_rtx_REG (GET_MODE (op1),
5547 (REGNO (op1) +
5548 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5549 GET_MODE (SUBREG_REG (orig_op1)),
5550 SUBREG_BYTE (orig_op1),
5551 GET_MODE (orig_op1))));
5553 /* Plus in the index register may be created only as a result of
5554 register rematerialization for expression like &localvar*4. Reload it.
5555 It may be possible to combine the displacement on the outer level,
5556 but it is probably not worthwhile to do so. */
5557 if (context == 1)
5559 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5560 opnum, ADDR_TYPE (type), ind_levels, insn);
5561 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5562 context_reg_class,
5563 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5564 return 1;
5567 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5568 || code0 == ZERO_EXTEND || code1 == MEM)
5570 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5571 &XEXP (x, 0), opnum, type, ind_levels,
5572 insn);
5573 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5574 &XEXP (x, 1), opnum, type, ind_levels,
5575 insn);
5578 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5579 || code1 == ZERO_EXTEND || code0 == MEM)
5581 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5582 &XEXP (x, 0), opnum, type, ind_levels,
5583 insn);
5584 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5585 &XEXP (x, 1), opnum, type, ind_levels,
5586 insn);
5589 else if (code0 == CONST_INT || code0 == CONST
5590 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5591 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5592 &XEXP (x, 1), opnum, type, ind_levels,
5593 insn);
5595 else if (code1 == CONST_INT || code1 == CONST
5596 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5597 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5598 &XEXP (x, 0), opnum, type, ind_levels,
5599 insn);
5601 else if (code0 == REG && code1 == REG)
5603 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5604 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5605 return 0;
5606 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5607 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5608 return 0;
5609 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5610 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5611 &XEXP (x, 1), opnum, type, ind_levels,
5612 insn);
5613 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5614 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5615 &XEXP (x, 0), opnum, type, ind_levels,
5616 insn);
5617 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5618 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5619 &XEXP (x, 0), opnum, type, ind_levels,
5620 insn);
5621 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5622 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5623 &XEXP (x, 1), opnum, type, ind_levels,
5624 insn);
5625 else
5627 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5628 &XEXP (x, 0), opnum, type, ind_levels,
5629 insn);
5630 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5631 &XEXP (x, 1), opnum, type, ind_levels,
5632 insn);
5636 else if (code0 == REG)
5638 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5639 &XEXP (x, 0), opnum, type, ind_levels,
5640 insn);
5641 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5642 &XEXP (x, 1), opnum, type, ind_levels,
5643 insn);
5646 else if (code1 == REG)
5648 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5649 &XEXP (x, 1), opnum, type, ind_levels,
5650 insn);
5651 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5652 &XEXP (x, 0), opnum, type, ind_levels,
5653 insn);
5657 return 0;
5659 case POST_MODIFY:
5660 case PRE_MODIFY:
5662 rtx op0 = XEXP (x, 0);
5663 rtx op1 = XEXP (x, 1);
5664 enum rtx_code index_code;
5665 int regno;
5666 int reloadnum;
5668 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5669 return 0;
5671 /* Currently, we only support {PRE,POST}_MODIFY constructs
5672 where a base register is {inc,dec}remented by the contents
5673 of another register or by a constant value. Thus, these
5674 operands must match. */
5675 gcc_assert (op0 == XEXP (op1, 0));
5677 /* Require index register (or constant). Let's just handle the
5678 register case in the meantime... If the target allows
5679 auto-modify by a constant then we could try replacing a pseudo
5680 register with its equivalent constant where applicable.
5682 We also handle the case where the register was eliminated
5683 resulting in a PLUS subexpression.
5685 If we later decide to reload the whole PRE_MODIFY or
5686 POST_MODIFY, inc_for_reload might clobber the reload register
5687 before reading the index. The index register might therefore
5688 need to live longer than a TYPE reload normally would, so be
5689 conservative and class it as RELOAD_OTHER. */
5690 if ((REG_P (XEXP (op1, 1))
5691 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5692 || GET_CODE (XEXP (op1, 1)) == PLUS)
5693 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5694 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5695 ind_levels, insn);
5697 gcc_assert (REG_P (XEXP (op1, 0)));
5699 regno = REGNO (XEXP (op1, 0));
5700 index_code = GET_CODE (XEXP (op1, 1));
5702 /* A register that is incremented cannot be constant! */
5703 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5704 || reg_equiv_constant (regno) == 0);
5706 /* Handle a register that is equivalent to a memory location
5707 which cannot be addressed directly. */
5708 if (reg_equiv_memory_loc (regno) != 0
5709 && (reg_equiv_address (regno) != 0
5710 || num_not_at_initial_offset))
5712 rtx tem = make_memloc (XEXP (x, 0), regno);
5714 if (reg_equiv_address (regno)
5715 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5717 rtx orig = tem;
5719 /* First reload the memory location's address.
5720 We can't use ADDR_TYPE (type) here, because we need to
5721 write back the value after reading it, hence we actually
5722 need two registers. */
5723 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5724 &XEXP (tem, 0), opnum,
5725 RELOAD_OTHER,
5726 ind_levels, insn);
5728 if (!rtx_equal_p (tem, orig))
5729 push_reg_equiv_alt_mem (regno, tem);
5731 /* Then reload the memory location into a base
5732 register. */
5733 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5734 &XEXP (op1, 0),
5735 base_reg_class (mode, as,
5736 code, index_code),
5737 GET_MODE (x), GET_MODE (x), 0,
5738 0, opnum, RELOAD_OTHER);
5740 update_auto_inc_notes (this_insn, regno, reloadnum);
5741 return 0;
5745 if (reg_renumber[regno] >= 0)
5746 regno = reg_renumber[regno];
5748 /* We require a base register here... */
5749 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5751 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5752 &XEXP (op1, 0), &XEXP (x, 0),
5753 base_reg_class (mode, as,
5754 code, index_code),
5755 GET_MODE (x), GET_MODE (x), 0, 0,
5756 opnum, RELOAD_OTHER);
5758 update_auto_inc_notes (this_insn, regno, reloadnum);
5759 return 0;
5762 return 0;
5764 case POST_INC:
5765 case POST_DEC:
5766 case PRE_INC:
5767 case PRE_DEC:
5768 if (REG_P (XEXP (x, 0)))
5770 int regno = REGNO (XEXP (x, 0));
5771 int value = 0;
5772 rtx x_orig = x;
5774 /* A register that is incremented cannot be constant! */
5775 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5776 || reg_equiv_constant (regno) == 0);
5778 /* Handle a register that is equivalent to a memory location
5779 which cannot be addressed directly. */
5780 if (reg_equiv_memory_loc (regno) != 0
5781 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5783 rtx tem = make_memloc (XEXP (x, 0), regno);
5784 if (reg_equiv_address (regno)
5785 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5787 rtx orig = tem;
5789 /* First reload the memory location's address.
5790 We can't use ADDR_TYPE (type) here, because we need to
5791 write back the value after reading it, hence we actually
5792 need two registers. */
5793 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5794 &XEXP (tem, 0), opnum, type,
5795 ind_levels, insn);
5796 reloaded_inner_of_autoinc = true;
5797 if (!rtx_equal_p (tem, orig))
5798 push_reg_equiv_alt_mem (regno, tem);
5799 /* Put this inside a new increment-expression. */
5800 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5801 /* Proceed to reload that, as if it contained a register. */
5805 /* If we have a hard register that is ok in this incdec context,
5806 don't make a reload. If the register isn't nice enough for
5807 autoincdec, we can reload it. But, if an autoincrement of a
5808 register that we here verified as playing nice, still outside
5809 isn't "valid", it must be that no autoincrement is "valid".
5810 If that is true and something made an autoincrement anyway,
5811 this must be a special context where one is allowed.
5812 (For example, a "push" instruction.)
5813 We can't improve this address, so leave it alone. */
5815 /* Otherwise, reload the autoincrement into a suitable hard reg
5816 and record how much to increment by. */
5818 if (reg_renumber[regno] >= 0)
5819 regno = reg_renumber[regno];
5820 if (regno >= FIRST_PSEUDO_REGISTER
5821 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5822 index_code))
5824 int reloadnum;
5826 /* If we can output the register afterwards, do so, this
5827 saves the extra update.
5828 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5829 CALL_INSN - and it does not set CC0.
5830 But don't do this if we cannot directly address the
5831 memory location, since this will make it harder to
5832 reuse address reloads, and increases register pressure.
5833 Also don't do this if we can probably update x directly. */
5834 rtx equiv = (MEM_P (XEXP (x, 0))
5835 ? XEXP (x, 0)
5836 : reg_equiv_mem (regno));
5837 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5838 if (insn && NONJUMP_INSN_P (insn)
5839 #if HAVE_cc0
5840 && ! sets_cc0_p (PATTERN (insn))
5841 #endif
5842 && (regno < FIRST_PSEUDO_REGISTER
5843 || (equiv
5844 && memory_operand (equiv, GET_MODE (equiv))
5845 && ! (icode != CODE_FOR_nothing
5846 && insn_operand_matches (icode, 0, equiv)
5847 && insn_operand_matches (icode, 1, equiv))))
5848 /* Using RELOAD_OTHER means we emit this and the reload we
5849 made earlier in the wrong order. */
5850 && !reloaded_inner_of_autoinc)
5852 /* We use the original pseudo for loc, so that
5853 emit_reload_insns() knows which pseudo this
5854 reload refers to and updates the pseudo rtx, not
5855 its equivalent memory location, as well as the
5856 corresponding entry in reg_last_reload_reg. */
5857 loc = &XEXP (x_orig, 0);
5858 x = XEXP (x, 0);
5859 reloadnum
5860 = push_reload (x, x, loc, loc,
5861 context_reg_class,
5862 GET_MODE (x), GET_MODE (x), 0, 0,
5863 opnum, RELOAD_OTHER);
5865 else
5867 reloadnum
5868 = push_reload (x, x, loc, (rtx*) 0,
5869 context_reg_class,
5870 GET_MODE (x), GET_MODE (x), 0, 0,
5871 opnum, type);
5872 rld[reloadnum].inc
5873 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5875 value = 1;
5878 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5879 reloadnum);
5881 return value;
5883 return 0;
5885 case TRUNCATE:
5886 case SIGN_EXTEND:
5887 case ZERO_EXTEND:
5888 /* Look for parts to reload in the inner expression and reload them
5889 too, in addition to this operation. Reloading all inner parts in
5890 addition to this one shouldn't be necessary, but at this point,
5891 we don't know if we can possibly omit any part that *can* be
5892 reloaded. Targets that are better off reloading just either part
5893 (or perhaps even a different part of an outer expression), should
5894 define LEGITIMIZE_RELOAD_ADDRESS. */
5895 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5896 context, code, SCRATCH, &XEXP (x, 0), opnum,
5897 type, ind_levels, insn);
5898 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5899 context_reg_class,
5900 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5901 return 1;
5903 case MEM:
5904 /* This is probably the result of a substitution, by eliminate_regs, of
5905 an equivalent address for a pseudo that was not allocated to a hard
5906 register. Verify that the specified address is valid and reload it
5907 into a register.
5909 Since we know we are going to reload this item, don't decrement for
5910 the indirection level.
5912 Note that this is actually conservative: it would be slightly more
5913 efficient to use the value of SPILL_INDIRECT_LEVELS from
5914 reload1.c here. */
5916 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5917 opnum, ADDR_TYPE (type), ind_levels, insn);
5918 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5919 context_reg_class,
5920 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5921 return 1;
5923 case REG:
5925 int regno = REGNO (x);
5927 if (reg_equiv_constant (regno) != 0)
5929 find_reloads_address_part (reg_equiv_constant (regno), loc,
5930 context_reg_class,
5931 GET_MODE (x), opnum, type, ind_levels);
5932 return 1;
5935 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5936 that feeds this insn. */
5937 if (reg_equiv_mem (regno) != 0)
5939 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5940 context_reg_class,
5941 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5942 return 1;
5944 #endif
5946 if (reg_equiv_memory_loc (regno)
5947 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5949 rtx tem = make_memloc (x, regno);
5950 if (reg_equiv_address (regno) != 0
5951 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5953 x = tem;
5954 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5955 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5956 ind_levels, insn);
5957 if (!rtx_equal_p (x, tem))
5958 push_reg_equiv_alt_mem (regno, x);
5962 if (reg_renumber[regno] >= 0)
5963 regno = reg_renumber[regno];
5965 if (regno >= FIRST_PSEUDO_REGISTER
5966 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5967 index_code))
5969 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5970 context_reg_class,
5971 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5972 return 1;
5975 /* If a register appearing in an address is the subject of a CLOBBER
5976 in this insn, reload it into some other register to be safe.
5977 The CLOBBER is supposed to make the register unavailable
5978 from before this insn to after it. */
5979 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5981 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5982 context_reg_class,
5983 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5984 return 1;
5987 return 0;
5989 case SUBREG:
5990 if (REG_P (SUBREG_REG (x)))
5992 /* If this is a SUBREG of a hard register and the resulting register
5993 is of the wrong class, reload the whole SUBREG. This avoids
5994 needless copies if SUBREG_REG is multi-word. */
5995 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5997 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5999 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6000 index_code))
6002 push_reload (x, NULL_RTX, loc, (rtx*) 0,
6003 context_reg_class,
6004 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6005 return 1;
6008 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6009 is larger than the class size, then reload the whole SUBREG. */
6010 else
6012 enum reg_class rclass = context_reg_class;
6013 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6014 > reg_class_size[(int) rclass])
6016 /* If the inner register will be replaced by a memory
6017 reference, we can do this only if we can replace the
6018 whole subreg by a (narrower) memory reference. If
6019 this is not possible, fall through and reload just
6020 the inner register (including address reloads). */
6021 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6023 rtx tem = find_reloads_subreg_address (x, opnum,
6024 ADDR_TYPE (type),
6025 ind_levels, insn,
6026 NULL);
6027 if (tem)
6029 push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6030 GET_MODE (tem), VOIDmode, 0, 0,
6031 opnum, type);
6032 return 1;
6035 else
6037 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6038 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6039 return 1;
6044 break;
6046 default:
6047 break;
6051 const char *fmt = GET_RTX_FORMAT (code);
6052 int i;
6054 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6056 if (fmt[i] == 'e')
6057 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6058 we get here. */
6059 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6060 code, SCRATCH, &XEXP (x, i),
6061 opnum, type, ind_levels, insn);
6065 #undef REG_OK_FOR_CONTEXT
6066 return 0;
6069 /* X, which is found at *LOC, is a part of an address that needs to be
6070 reloaded into a register of class RCLASS. If X is a constant, or if
6071 X is a PLUS that contains a constant, check that the constant is a
6072 legitimate operand and that we are supposed to be able to load
6073 it into the register.
6075 If not, force the constant into memory and reload the MEM instead.
6077 MODE is the mode to use, in case X is an integer constant.
6079 OPNUM and TYPE describe the purpose of any reloads made.
6081 IND_LEVELS says how many levels of indirect addressing this machine
6082 supports. */
6084 static void
6085 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6086 machine_mode mode, int opnum,
6087 enum reload_type type, int ind_levels)
6089 if (CONSTANT_P (x)
6090 && (!targetm.legitimate_constant_p (mode, x)
6091 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6093 x = force_const_mem (mode, x);
6094 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6095 opnum, type, ind_levels, 0);
6098 else if (GET_CODE (x) == PLUS
6099 && CONSTANT_P (XEXP (x, 1))
6100 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6101 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6102 == NO_REGS))
6104 rtx tem;
6106 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6107 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6108 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6109 opnum, type, ind_levels, 0);
6112 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6113 mode, VOIDmode, 0, 0, opnum, type);
6116 /* X, a subreg of a pseudo, is a part of an address that needs to be
6117 reloaded, and the pseusdo is equivalent to a memory location.
6119 Attempt to replace the whole subreg by a (possibly narrower or wider)
6120 memory reference. If this is possible, return this new memory
6121 reference, and push all required address reloads. Otherwise,
6122 return NULL.
6124 OPNUM and TYPE identify the purpose of the reload.
6126 IND_LEVELS says how many levels of indirect addressing are
6127 supported at this point in the address.
6129 INSN, if nonzero, is the insn in which we do the reload. It is used
6130 to determine where to put USEs for pseudos that we have to replace with
6131 stack slots. */
6133 static rtx
6134 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6135 int ind_levels, rtx_insn *insn,
6136 int *address_reloaded)
6138 machine_mode outer_mode = GET_MODE (x);
6139 machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6140 int regno = REGNO (SUBREG_REG (x));
6141 int reloaded = 0;
6142 rtx tem, orig;
6143 int offset;
6145 gcc_assert (reg_equiv_memory_loc (regno) != 0);
6147 /* We cannot replace the subreg with a modified memory reference if:
6149 - we have a paradoxical subreg that implicitly acts as a zero or
6150 sign extension operation due to LOAD_EXTEND_OP;
6152 - we have a subreg that is implicitly supposed to act on the full
6153 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6155 - the address of the equivalent memory location is mode-dependent; or
6157 - we have a paradoxical subreg and the resulting memory is not
6158 sufficiently aligned to allow access in the wider mode.
6160 In addition, we choose not to perform the replacement for *any*
6161 paradoxical subreg, even if it were possible in principle. This
6162 is to avoid generating wider memory references than necessary.
6164 This corresponds to how previous versions of reload used to handle
6165 paradoxical subregs where no address reload was required. */
6167 if (paradoxical_subreg_p (x))
6168 return NULL;
6170 if (WORD_REGISTER_OPERATIONS
6171 && GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode)
6172 && ((GET_MODE_SIZE (outer_mode) - 1) / UNITS_PER_WORD
6173 == (GET_MODE_SIZE (inner_mode) - 1) / UNITS_PER_WORD))
6174 return NULL;
6176 /* Since we don't attempt to handle paradoxical subregs, we can just
6177 call into simplify_subreg, which will handle all remaining checks
6178 for us. */
6179 orig = make_memloc (SUBREG_REG (x), regno);
6180 offset = SUBREG_BYTE (x);
6181 tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6182 if (!tem || !MEM_P (tem))
6183 return NULL;
6185 /* Now push all required address reloads, if any. */
6186 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6187 XEXP (tem, 0), &XEXP (tem, 0),
6188 opnum, type, ind_levels, insn);
6189 /* ??? Do we need to handle nonzero offsets somehow? */
6190 if (!offset && !rtx_equal_p (tem, orig))
6191 push_reg_equiv_alt_mem (regno, tem);
6193 /* For some processors an address may be valid in the original mode but
6194 not in a smaller mode. For example, ARM accepts a scaled index register
6195 in SImode but not in HImode. Note that this is only a problem if the
6196 address in reg_equiv_mem is already invalid in the new mode; other
6197 cases would be fixed by find_reloads_address as usual.
6199 ??? We attempt to handle such cases here by doing an additional reload
6200 of the full address after the usual processing by find_reloads_address.
6201 Note that this may not work in the general case, but it seems to cover
6202 the cases where this situation currently occurs. A more general fix
6203 might be to reload the *value* instead of the address, but this would
6204 not be expected by the callers of this routine as-is.
6206 If find_reloads_address already completed replaced the address, there
6207 is nothing further to do. */
6208 if (reloaded == 0
6209 && reg_equiv_mem (regno) != 0
6210 && !strict_memory_address_addr_space_p
6211 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6212 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6214 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6215 base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6216 MEM, SCRATCH),
6217 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6218 reloaded = 1;
6221 /* If this is not a toplevel operand, find_reloads doesn't see this
6222 substitution. We have to emit a USE of the pseudo so that
6223 delete_output_reload can see it. */
6224 if (replace_reloads && recog_data.operand[opnum] != x)
6225 /* We mark the USE with QImode so that we recognize it as one that
6226 can be safely deleted at the end of reload. */
6227 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6228 QImode);
6230 if (address_reloaded)
6231 *address_reloaded = reloaded;
6233 return tem;
6236 /* Substitute into the current INSN the registers into which we have reloaded
6237 the things that need reloading. The array `replacements'
6238 contains the locations of all pointers that must be changed
6239 and says what to replace them with.
6241 Return the rtx that X translates into; usually X, but modified. */
6243 void
6244 subst_reloads (rtx_insn *insn)
6246 int i;
6248 for (i = 0; i < n_replacements; i++)
6250 struct replacement *r = &replacements[i];
6251 rtx reloadreg = rld[r->what].reg_rtx;
6252 if (reloadreg)
6254 #ifdef DEBUG_RELOAD
6255 /* This checking takes a very long time on some platforms
6256 causing the gcc.c-torture/compile/limits-fnargs.c test
6257 to time out during testing. See PR 31850.
6259 Internal consistency test. Check that we don't modify
6260 anything in the equivalence arrays. Whenever something from
6261 those arrays needs to be reloaded, it must be unshared before
6262 being substituted into; the equivalence must not be modified.
6263 Otherwise, if the equivalence is used after that, it will
6264 have been modified, and the thing substituted (probably a
6265 register) is likely overwritten and not a usable equivalence. */
6266 int check_regno;
6268 for (check_regno = 0; check_regno < max_regno; check_regno++)
6270 #define CHECK_MODF(ARRAY) \
6271 gcc_assert (!(*reg_equivs)[check_regno].ARRAY \
6272 || !loc_mentioned_in_p (r->where, \
6273 (*reg_equivs)[check_regno].ARRAY))
6275 CHECK_MODF (constant);
6276 CHECK_MODF (memory_loc);
6277 CHECK_MODF (address);
6278 CHECK_MODF (mem);
6279 #undef CHECK_MODF
6281 #endif /* DEBUG_RELOAD */
6283 /* If we're replacing a LABEL_REF with a register, there must
6284 already be an indication (to e.g. flow) which label this
6285 register refers to. */
6286 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6287 || !JUMP_P (insn)
6288 || find_reg_note (insn,
6289 REG_LABEL_OPERAND,
6290 XEXP (*r->where, 0))
6291 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6293 /* Encapsulate RELOADREG so its machine mode matches what
6294 used to be there. Note that gen_lowpart_common will
6295 do the wrong thing if RELOADREG is multi-word. RELOADREG
6296 will always be a REG here. */
6297 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6298 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6300 *r->where = reloadreg;
6302 /* If reload got no reg and isn't optional, something's wrong. */
6303 else
6304 gcc_assert (rld[r->what].optional);
6308 /* Make a copy of any replacements being done into X and move those
6309 copies to locations in Y, a copy of X. */
6311 void
6312 copy_replacements (rtx x, rtx y)
6314 copy_replacements_1 (&x, &y, n_replacements);
6317 static void
6318 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6320 int i, j;
6321 rtx x, y;
6322 struct replacement *r;
6323 enum rtx_code code;
6324 const char *fmt;
6326 for (j = 0; j < orig_replacements; j++)
6327 if (replacements[j].where == px)
6329 r = &replacements[n_replacements++];
6330 r->where = py;
6331 r->what = replacements[j].what;
6332 r->mode = replacements[j].mode;
6335 x = *px;
6336 y = *py;
6337 code = GET_CODE (x);
6338 fmt = GET_RTX_FORMAT (code);
6340 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6342 if (fmt[i] == 'e')
6343 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6344 else if (fmt[i] == 'E')
6345 for (j = XVECLEN (x, i); --j >= 0; )
6346 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6347 orig_replacements);
6351 /* Change any replacements being done to *X to be done to *Y. */
6353 void
6354 move_replacements (rtx *x, rtx *y)
6356 int i;
6358 for (i = 0; i < n_replacements; i++)
6359 if (replacements[i].where == x)
6360 replacements[i].where = y;
6363 /* If LOC was scheduled to be replaced by something, return the replacement.
6364 Otherwise, return *LOC. */
6367 find_replacement (rtx *loc)
6369 struct replacement *r;
6371 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6373 rtx reloadreg = rld[r->what].reg_rtx;
6375 if (reloadreg && r->where == loc)
6377 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6378 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6380 return reloadreg;
6382 else if (reloadreg && GET_CODE (*loc) == SUBREG
6383 && r->where == &SUBREG_REG (*loc))
6385 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6386 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6388 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6389 GET_MODE (SUBREG_REG (*loc)),
6390 SUBREG_BYTE (*loc));
6394 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6395 what's inside and make a new rtl if so. */
6396 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6397 || GET_CODE (*loc) == MULT)
6399 rtx x = find_replacement (&XEXP (*loc, 0));
6400 rtx y = find_replacement (&XEXP (*loc, 1));
6402 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6403 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6406 return *loc;
6409 /* Return nonzero if register in range [REGNO, ENDREGNO)
6410 appears either explicitly or implicitly in X
6411 other than being stored into (except for earlyclobber operands).
6413 References contained within the substructure at LOC do not count.
6414 LOC may be zero, meaning don't ignore anything.
6416 This is similar to refers_to_regno_p in rtlanal.c except that we
6417 look at equivalences for pseudos that didn't get hard registers. */
6419 static int
6420 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6421 rtx x, rtx *loc)
6423 int i;
6424 unsigned int r;
6425 RTX_CODE code;
6426 const char *fmt;
6428 if (x == 0)
6429 return 0;
6431 repeat:
6432 code = GET_CODE (x);
6434 switch (code)
6436 case REG:
6437 r = REGNO (x);
6439 /* If this is a pseudo, a hard register must not have been allocated.
6440 X must therefore either be a constant or be in memory. */
6441 if (r >= FIRST_PSEUDO_REGISTER)
6443 if (reg_equiv_memory_loc (r))
6444 return refers_to_regno_for_reload_p (regno, endregno,
6445 reg_equiv_memory_loc (r),
6446 (rtx*) 0);
6448 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6449 return 0;
6452 return (endregno > r
6453 && regno < r + (r < FIRST_PSEUDO_REGISTER
6454 ? hard_regno_nregs[r][GET_MODE (x)]
6455 : 1));
6457 case SUBREG:
6458 /* If this is a SUBREG of a hard reg, we can see exactly which
6459 registers are being modified. Otherwise, handle normally. */
6460 if (REG_P (SUBREG_REG (x))
6461 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6463 unsigned int inner_regno = subreg_regno (x);
6464 unsigned int inner_endregno
6465 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6466 ? subreg_nregs (x) : 1);
6468 return endregno > inner_regno && regno < inner_endregno;
6470 break;
6472 case CLOBBER:
6473 case SET:
6474 if (&SET_DEST (x) != loc
6475 /* Note setting a SUBREG counts as referring to the REG it is in for
6476 a pseudo but not for hard registers since we can
6477 treat each word individually. */
6478 && ((GET_CODE (SET_DEST (x)) == SUBREG
6479 && loc != &SUBREG_REG (SET_DEST (x))
6480 && REG_P (SUBREG_REG (SET_DEST (x)))
6481 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6482 && refers_to_regno_for_reload_p (regno, endregno,
6483 SUBREG_REG (SET_DEST (x)),
6484 loc))
6485 /* If the output is an earlyclobber operand, this is
6486 a conflict. */
6487 || ((!REG_P (SET_DEST (x))
6488 || earlyclobber_operand_p (SET_DEST (x)))
6489 && refers_to_regno_for_reload_p (regno, endregno,
6490 SET_DEST (x), loc))))
6491 return 1;
6493 if (code == CLOBBER || loc == &SET_SRC (x))
6494 return 0;
6495 x = SET_SRC (x);
6496 goto repeat;
6498 default:
6499 break;
6502 /* X does not match, so try its subexpressions. */
6504 fmt = GET_RTX_FORMAT (code);
6505 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6507 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6509 if (i == 0)
6511 x = XEXP (x, 0);
6512 goto repeat;
6514 else
6515 if (refers_to_regno_for_reload_p (regno, endregno,
6516 XEXP (x, i), loc))
6517 return 1;
6519 else if (fmt[i] == 'E')
6521 int j;
6522 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6523 if (loc != &XVECEXP (x, i, j)
6524 && refers_to_regno_for_reload_p (regno, endregno,
6525 XVECEXP (x, i, j), loc))
6526 return 1;
6529 return 0;
6532 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6533 we check if any register number in X conflicts with the relevant register
6534 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6535 contains a MEM (we don't bother checking for memory addresses that can't
6536 conflict because we expect this to be a rare case.
6538 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6539 that we look at equivalences for pseudos that didn't get hard registers. */
6542 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6544 int regno, endregno;
6546 /* Overly conservative. */
6547 if (GET_CODE (x) == STRICT_LOW_PART
6548 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6549 x = XEXP (x, 0);
6551 /* If either argument is a constant, then modifying X can not affect IN. */
6552 if (CONSTANT_P (x) || CONSTANT_P (in))
6553 return 0;
6554 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6555 return refers_to_mem_for_reload_p (in);
6556 else if (GET_CODE (x) == SUBREG)
6558 regno = REGNO (SUBREG_REG (x));
6559 if (regno < FIRST_PSEUDO_REGISTER)
6560 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6561 GET_MODE (SUBREG_REG (x)),
6562 SUBREG_BYTE (x),
6563 GET_MODE (x));
6564 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6565 ? subreg_nregs (x) : 1);
6567 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6569 else if (REG_P (x))
6571 regno = REGNO (x);
6573 /* If this is a pseudo, it must not have been assigned a hard register.
6574 Therefore, it must either be in memory or be a constant. */
6576 if (regno >= FIRST_PSEUDO_REGISTER)
6578 if (reg_equiv_memory_loc (regno))
6579 return refers_to_mem_for_reload_p (in);
6580 gcc_assert (reg_equiv_constant (regno));
6581 return 0;
6584 endregno = END_REGNO (x);
6586 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6588 else if (MEM_P (x))
6589 return refers_to_mem_for_reload_p (in);
6590 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6591 || GET_CODE (x) == CC0)
6592 return reg_mentioned_p (x, in);
6593 else
6595 gcc_assert (GET_CODE (x) == PLUS);
6597 /* We actually want to know if X is mentioned somewhere inside IN.
6598 We must not say that (plus (sp) (const_int 124)) is in
6599 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6600 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6601 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6602 while (MEM_P (in))
6603 in = XEXP (in, 0);
6604 if (REG_P (in))
6605 return 0;
6606 else if (GET_CODE (in) == PLUS)
6607 return (rtx_equal_p (x, in)
6608 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6609 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6610 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6611 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6614 gcc_unreachable ();
6617 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6618 registers. */
6620 static int
6621 refers_to_mem_for_reload_p (rtx x)
6623 const char *fmt;
6624 int i;
6626 if (MEM_P (x))
6627 return 1;
6629 if (REG_P (x))
6630 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6631 && reg_equiv_memory_loc (REGNO (x)));
6633 fmt = GET_RTX_FORMAT (GET_CODE (x));
6634 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6635 if (fmt[i] == 'e'
6636 && (MEM_P (XEXP (x, i))
6637 || refers_to_mem_for_reload_p (XEXP (x, i))))
6638 return 1;
6640 return 0;
6643 /* Check the insns before INSN to see if there is a suitable register
6644 containing the same value as GOAL.
6645 If OTHER is -1, look for a register in class RCLASS.
6646 Otherwise, just see if register number OTHER shares GOAL's value.
6648 Return an rtx for the register found, or zero if none is found.
6650 If RELOAD_REG_P is (short *)1,
6651 we reject any hard reg that appears in reload_reg_rtx
6652 because such a hard reg is also needed coming into this insn.
6654 If RELOAD_REG_P is any other nonzero value,
6655 it is a vector indexed by hard reg number
6656 and we reject any hard reg whose element in the vector is nonnegative
6657 as well as any that appears in reload_reg_rtx.
6659 If GOAL is zero, then GOALREG is a register number; we look
6660 for an equivalent for that register.
6662 MODE is the machine mode of the value we want an equivalence for.
6663 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6665 This function is used by jump.c as well as in the reload pass.
6667 If GOAL is the sum of the stack pointer and a constant, we treat it
6668 as if it were a constant except that sp is required to be unchanging. */
6671 find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6672 short *reload_reg_p, int goalreg, machine_mode mode)
6674 rtx_insn *p = insn;
6675 rtx goaltry, valtry, value;
6676 rtx_insn *where;
6677 rtx pat;
6678 int regno = -1;
6679 int valueno;
6680 int goal_mem = 0;
6681 int goal_const = 0;
6682 int goal_mem_addr_varies = 0;
6683 int need_stable_sp = 0;
6684 int nregs;
6685 int valuenregs;
6686 int num = 0;
6688 if (goal == 0)
6689 regno = goalreg;
6690 else if (REG_P (goal))
6691 regno = REGNO (goal);
6692 else if (MEM_P (goal))
6694 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6695 if (MEM_VOLATILE_P (goal))
6696 return 0;
6697 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6698 return 0;
6699 /* An address with side effects must be reexecuted. */
6700 switch (code)
6702 case POST_INC:
6703 case PRE_INC:
6704 case POST_DEC:
6705 case PRE_DEC:
6706 case POST_MODIFY:
6707 case PRE_MODIFY:
6708 return 0;
6709 default:
6710 break;
6712 goal_mem = 1;
6714 else if (CONSTANT_P (goal))
6715 goal_const = 1;
6716 else if (GET_CODE (goal) == PLUS
6717 && XEXP (goal, 0) == stack_pointer_rtx
6718 && CONSTANT_P (XEXP (goal, 1)))
6719 goal_const = need_stable_sp = 1;
6720 else if (GET_CODE (goal) == PLUS
6721 && XEXP (goal, 0) == frame_pointer_rtx
6722 && CONSTANT_P (XEXP (goal, 1)))
6723 goal_const = 1;
6724 else
6725 return 0;
6727 num = 0;
6728 /* Scan insns back from INSN, looking for one that copies
6729 a value into or out of GOAL.
6730 Stop and give up if we reach a label. */
6732 while (1)
6734 p = PREV_INSN (p);
6735 if (p && DEBUG_INSN_P (p))
6736 continue;
6737 num++;
6738 if (p == 0 || LABEL_P (p)
6739 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6740 return 0;
6742 /* Don't reuse register contents from before a setjmp-type
6743 function call; on the second return (from the longjmp) it
6744 might have been clobbered by a later reuse. It doesn't
6745 seem worthwhile to actually go and see if it is actually
6746 reused even if that information would be readily available;
6747 just don't reuse it across the setjmp call. */
6748 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6749 return 0;
6751 if (NONJUMP_INSN_P (p)
6752 /* If we don't want spill regs ... */
6753 && (! (reload_reg_p != 0
6754 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6755 /* ... then ignore insns introduced by reload; they aren't
6756 useful and can cause results in reload_as_needed to be
6757 different from what they were when calculating the need for
6758 spills. If we notice an input-reload insn here, we will
6759 reject it below, but it might hide a usable equivalent.
6760 That makes bad code. It may even fail: perhaps no reg was
6761 spilled for this insn because it was assumed we would find
6762 that equivalent. */
6763 || INSN_UID (p) < reload_first_uid))
6765 rtx tem;
6766 pat = single_set (p);
6768 /* First check for something that sets some reg equal to GOAL. */
6769 if (pat != 0
6770 && ((regno >= 0
6771 && true_regnum (SET_SRC (pat)) == regno
6772 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6774 (regno >= 0
6775 && true_regnum (SET_DEST (pat)) == regno
6776 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6778 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6779 /* When looking for stack pointer + const,
6780 make sure we don't use a stack adjust. */
6781 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6782 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6783 || (goal_mem
6784 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6785 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6786 || (goal_mem
6787 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6788 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6789 /* If we are looking for a constant,
6790 and something equivalent to that constant was copied
6791 into a reg, we can use that reg. */
6792 || (goal_const && REG_NOTES (p) != 0
6793 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6794 && ((rtx_equal_p (XEXP (tem, 0), goal)
6795 && (valueno
6796 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6797 || (REG_P (SET_DEST (pat))
6798 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6799 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6800 && CONST_INT_P (goal)
6801 && 0 != (goaltry
6802 = operand_subword (XEXP (tem, 0), 0, 0,
6803 VOIDmode))
6804 && rtx_equal_p (goal, goaltry)
6805 && (valtry
6806 = operand_subword (SET_DEST (pat), 0, 0,
6807 VOIDmode))
6808 && (valueno = true_regnum (valtry)) >= 0)))
6809 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6810 NULL_RTX))
6811 && REG_P (SET_DEST (pat))
6812 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6813 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6814 && CONST_INT_P (goal)
6815 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6816 VOIDmode))
6817 && rtx_equal_p (goal, goaltry)
6818 && (valtry
6819 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6820 && (valueno = true_regnum (valtry)) >= 0)))
6822 if (other >= 0)
6824 if (valueno != other)
6825 continue;
6827 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6828 continue;
6829 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6830 mode, valueno))
6831 continue;
6832 value = valtry;
6833 where = p;
6834 break;
6839 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6840 (or copying VALUE into GOAL, if GOAL is also a register).
6841 Now verify that VALUE is really valid. */
6843 /* VALUENO is the register number of VALUE; a hard register. */
6845 /* Don't try to re-use something that is killed in this insn. We want
6846 to be able to trust REG_UNUSED notes. */
6847 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6848 return 0;
6850 /* If we propose to get the value from the stack pointer or if GOAL is
6851 a MEM based on the stack pointer, we need a stable SP. */
6852 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6853 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6854 goal)))
6855 need_stable_sp = 1;
6857 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6858 if (GET_MODE (value) != mode)
6859 return 0;
6861 /* Reject VALUE if it was loaded from GOAL
6862 and is also a register that appears in the address of GOAL. */
6864 if (goal_mem && value == SET_DEST (single_set (where))
6865 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6866 goal, (rtx*) 0))
6867 return 0;
6869 /* Reject registers that overlap GOAL. */
6871 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6872 nregs = hard_regno_nregs[regno][mode];
6873 else
6874 nregs = 1;
6875 valuenregs = hard_regno_nregs[valueno][mode];
6877 if (!goal_mem && !goal_const
6878 && regno + nregs > valueno && regno < valueno + valuenregs)
6879 return 0;
6881 /* Reject VALUE if it is one of the regs reserved for reloads.
6882 Reload1 knows how to reuse them anyway, and it would get
6883 confused if we allocated one without its knowledge.
6884 (Now that insns introduced by reload are ignored above,
6885 this case shouldn't happen, but I'm not positive.) */
6887 if (reload_reg_p != 0 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6889 int i;
6890 for (i = 0; i < valuenregs; ++i)
6891 if (reload_reg_p[valueno + i] >= 0)
6892 return 0;
6895 /* Reject VALUE if it is a register being used for an input reload
6896 even if it is not one of those reserved. */
6898 if (reload_reg_p != 0)
6900 int i;
6901 for (i = 0; i < n_reloads; i++)
6902 if (rld[i].reg_rtx != 0 && rld[i].in)
6904 int regno1 = REGNO (rld[i].reg_rtx);
6905 int nregs1 = hard_regno_nregs[regno1]
6906 [GET_MODE (rld[i].reg_rtx)];
6907 if (regno1 < valueno + valuenregs
6908 && regno1 + nregs1 > valueno)
6909 return 0;
6913 if (goal_mem)
6914 /* We must treat frame pointer as varying here,
6915 since it can vary--in a nonlocal goto as generated by expand_goto. */
6916 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6918 /* Now verify that the values of GOAL and VALUE remain unaltered
6919 until INSN is reached. */
6921 p = insn;
6922 while (1)
6924 p = PREV_INSN (p);
6925 if (p == where)
6926 return value;
6928 /* Don't trust the conversion past a function call
6929 if either of the two is in a call-clobbered register, or memory. */
6930 if (CALL_P (p))
6932 int i;
6934 if (goal_mem || need_stable_sp)
6935 return 0;
6937 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6938 for (i = 0; i < nregs; ++i)
6939 if (call_used_regs[regno + i]
6940 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6941 return 0;
6943 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6944 for (i = 0; i < valuenregs; ++i)
6945 if (call_used_regs[valueno + i]
6946 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6947 return 0;
6950 if (INSN_P (p))
6952 pat = PATTERN (p);
6954 /* Watch out for unspec_volatile, and volatile asms. */
6955 if (volatile_insn_p (pat))
6956 return 0;
6958 /* If this insn P stores in either GOAL or VALUE, return 0.
6959 If GOAL is a memory ref and this insn writes memory, return 0.
6960 If GOAL is a memory ref and its address is not constant,
6961 and this insn P changes a register used in GOAL, return 0. */
6963 if (GET_CODE (pat) == COND_EXEC)
6964 pat = COND_EXEC_CODE (pat);
6965 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6967 rtx dest = SET_DEST (pat);
6968 while (GET_CODE (dest) == SUBREG
6969 || GET_CODE (dest) == ZERO_EXTRACT
6970 || GET_CODE (dest) == STRICT_LOW_PART)
6971 dest = XEXP (dest, 0);
6972 if (REG_P (dest))
6974 int xregno = REGNO (dest);
6975 int xnregs;
6976 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6977 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6978 else
6979 xnregs = 1;
6980 if (xregno < regno + nregs && xregno + xnregs > regno)
6981 return 0;
6982 if (xregno < valueno + valuenregs
6983 && xregno + xnregs > valueno)
6984 return 0;
6985 if (goal_mem_addr_varies
6986 && reg_overlap_mentioned_for_reload_p (dest, goal))
6987 return 0;
6988 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6989 return 0;
6991 else if (goal_mem && MEM_P (dest)
6992 && ! push_operand (dest, GET_MODE (dest)))
6993 return 0;
6994 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6995 && reg_equiv_memory_loc (regno) != 0)
6996 return 0;
6997 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
6998 return 0;
7000 else if (GET_CODE (pat) == PARALLEL)
7002 int i;
7003 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7005 rtx v1 = XVECEXP (pat, 0, i);
7006 if (GET_CODE (v1) == COND_EXEC)
7007 v1 = COND_EXEC_CODE (v1);
7008 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7010 rtx dest = SET_DEST (v1);
7011 while (GET_CODE (dest) == SUBREG
7012 || GET_CODE (dest) == ZERO_EXTRACT
7013 || GET_CODE (dest) == STRICT_LOW_PART)
7014 dest = XEXP (dest, 0);
7015 if (REG_P (dest))
7017 int xregno = REGNO (dest);
7018 int xnregs;
7019 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7020 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7021 else
7022 xnregs = 1;
7023 if (xregno < regno + nregs
7024 && xregno + xnregs > regno)
7025 return 0;
7026 if (xregno < valueno + valuenregs
7027 && xregno + xnregs > valueno)
7028 return 0;
7029 if (goal_mem_addr_varies
7030 && reg_overlap_mentioned_for_reload_p (dest,
7031 goal))
7032 return 0;
7033 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7034 return 0;
7036 else if (goal_mem && MEM_P (dest)
7037 && ! push_operand (dest, GET_MODE (dest)))
7038 return 0;
7039 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7040 && reg_equiv_memory_loc (regno) != 0)
7041 return 0;
7042 else if (need_stable_sp
7043 && push_operand (dest, GET_MODE (dest)))
7044 return 0;
7049 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7051 rtx link;
7053 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7054 link = XEXP (link, 1))
7056 pat = XEXP (link, 0);
7057 if (GET_CODE (pat) == CLOBBER)
7059 rtx dest = SET_DEST (pat);
7061 if (REG_P (dest))
7063 int xregno = REGNO (dest);
7064 int xnregs
7065 = hard_regno_nregs[xregno][GET_MODE (dest)];
7067 if (xregno < regno + nregs
7068 && xregno + xnregs > regno)
7069 return 0;
7070 else if (xregno < valueno + valuenregs
7071 && xregno + xnregs > valueno)
7072 return 0;
7073 else if (goal_mem_addr_varies
7074 && reg_overlap_mentioned_for_reload_p (dest,
7075 goal))
7076 return 0;
7079 else if (goal_mem && MEM_P (dest)
7080 && ! push_operand (dest, GET_MODE (dest)))
7081 return 0;
7082 else if (need_stable_sp
7083 && push_operand (dest, GET_MODE (dest)))
7084 return 0;
7089 #if AUTO_INC_DEC
7090 /* If this insn auto-increments or auto-decrements
7091 either regno or valueno, return 0 now.
7092 If GOAL is a memory ref and its address is not constant,
7093 and this insn P increments a register used in GOAL, return 0. */
7095 rtx link;
7097 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7098 if (REG_NOTE_KIND (link) == REG_INC
7099 && REG_P (XEXP (link, 0)))
7101 int incno = REGNO (XEXP (link, 0));
7102 if (incno < regno + nregs && incno >= regno)
7103 return 0;
7104 if (incno < valueno + valuenregs && incno >= valueno)
7105 return 0;
7106 if (goal_mem_addr_varies
7107 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7108 goal))
7109 return 0;
7112 #endif
7117 /* Find a place where INCED appears in an increment or decrement operator
7118 within X, and return the amount INCED is incremented or decremented by.
7119 The value is always positive. */
7121 static int
7122 find_inc_amount (rtx x, rtx inced)
7124 enum rtx_code code = GET_CODE (x);
7125 const char *fmt;
7126 int i;
7128 if (code == MEM)
7130 rtx addr = XEXP (x, 0);
7131 if ((GET_CODE (addr) == PRE_DEC
7132 || GET_CODE (addr) == POST_DEC
7133 || GET_CODE (addr) == PRE_INC
7134 || GET_CODE (addr) == POST_INC)
7135 && XEXP (addr, 0) == inced)
7136 return GET_MODE_SIZE (GET_MODE (x));
7137 else if ((GET_CODE (addr) == PRE_MODIFY
7138 || GET_CODE (addr) == POST_MODIFY)
7139 && GET_CODE (XEXP (addr, 1)) == PLUS
7140 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7141 && XEXP (addr, 0) == inced
7142 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7144 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7145 return i < 0 ? -i : i;
7149 fmt = GET_RTX_FORMAT (code);
7150 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7152 if (fmt[i] == 'e')
7154 int tem = find_inc_amount (XEXP (x, i), inced);
7155 if (tem != 0)
7156 return tem;
7158 if (fmt[i] == 'E')
7160 int j;
7161 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7163 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7164 if (tem != 0)
7165 return tem;
7170 return 0;
7173 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7174 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7176 static int
7177 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7178 rtx insn)
7180 rtx link;
7182 if (!AUTO_INC_DEC)
7183 return 0;
7185 gcc_assert (insn);
7187 if (! INSN_P (insn))
7188 return 0;
7190 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7191 if (REG_NOTE_KIND (link) == REG_INC)
7193 unsigned int test = (int) REGNO (XEXP (link, 0));
7194 if (test >= regno && test < endregno)
7195 return 1;
7197 return 0;
7200 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7201 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7202 REG_INC. REGNO must refer to a hard register. */
7205 regno_clobbered_p (unsigned int regno, rtx_insn *insn, machine_mode mode,
7206 int sets)
7208 unsigned int nregs, endregno;
7210 /* regno must be a hard register. */
7211 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7213 nregs = hard_regno_nregs[regno][mode];
7214 endregno = regno + nregs;
7216 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7217 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7218 && REG_P (XEXP (PATTERN (insn), 0)))
7220 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7222 return test >= regno && test < endregno;
7225 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7226 return 1;
7228 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7230 int i = XVECLEN (PATTERN (insn), 0) - 1;
7232 for (; i >= 0; i--)
7234 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7235 if ((GET_CODE (elt) == CLOBBER
7236 || (sets == 1 && GET_CODE (elt) == SET))
7237 && REG_P (XEXP (elt, 0)))
7239 unsigned int test = REGNO (XEXP (elt, 0));
7241 if (test >= regno && test < endregno)
7242 return 1;
7244 if (sets == 2
7245 && reg_inc_found_and_valid_p (regno, endregno, elt))
7246 return 1;
7250 return 0;
7253 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7255 reload_adjust_reg_for_mode (rtx reloadreg, machine_mode mode)
7257 int regno;
7259 if (GET_MODE (reloadreg) == mode)
7260 return reloadreg;
7262 regno = REGNO (reloadreg);
7264 if (REG_WORDS_BIG_ENDIAN)
7265 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7266 - (int) hard_regno_nregs[regno][mode];
7268 return gen_rtx_REG (mode, regno);
7271 static const char *const reload_when_needed_name[] =
7273 "RELOAD_FOR_INPUT",
7274 "RELOAD_FOR_OUTPUT",
7275 "RELOAD_FOR_INSN",
7276 "RELOAD_FOR_INPUT_ADDRESS",
7277 "RELOAD_FOR_INPADDR_ADDRESS",
7278 "RELOAD_FOR_OUTPUT_ADDRESS",
7279 "RELOAD_FOR_OUTADDR_ADDRESS",
7280 "RELOAD_FOR_OPERAND_ADDRESS",
7281 "RELOAD_FOR_OPADDR_ADDR",
7282 "RELOAD_OTHER",
7283 "RELOAD_FOR_OTHER_ADDRESS"
7286 /* These functions are used to print the variables set by 'find_reloads' */
7288 DEBUG_FUNCTION void
7289 debug_reload_to_stream (FILE *f)
7291 int r;
7292 const char *prefix;
7294 if (! f)
7295 f = stderr;
7296 for (r = 0; r < n_reloads; r++)
7298 fprintf (f, "Reload %d: ", r);
7300 if (rld[r].in != 0)
7302 fprintf (f, "reload_in (%s) = ",
7303 GET_MODE_NAME (rld[r].inmode));
7304 print_inline_rtx (f, rld[r].in, 24);
7305 fprintf (f, "\n\t");
7308 if (rld[r].out != 0)
7310 fprintf (f, "reload_out (%s) = ",
7311 GET_MODE_NAME (rld[r].outmode));
7312 print_inline_rtx (f, rld[r].out, 24);
7313 fprintf (f, "\n\t");
7316 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7318 fprintf (f, "%s (opnum = %d)",
7319 reload_when_needed_name[(int) rld[r].when_needed],
7320 rld[r].opnum);
7322 if (rld[r].optional)
7323 fprintf (f, ", optional");
7325 if (rld[r].nongroup)
7326 fprintf (f, ", nongroup");
7328 if (rld[r].inc != 0)
7329 fprintf (f, ", inc by %d", rld[r].inc);
7331 if (rld[r].nocombine)
7332 fprintf (f, ", can't combine");
7334 if (rld[r].secondary_p)
7335 fprintf (f, ", secondary_reload_p");
7337 if (rld[r].in_reg != 0)
7339 fprintf (f, "\n\treload_in_reg: ");
7340 print_inline_rtx (f, rld[r].in_reg, 24);
7343 if (rld[r].out_reg != 0)
7345 fprintf (f, "\n\treload_out_reg: ");
7346 print_inline_rtx (f, rld[r].out_reg, 24);
7349 if (rld[r].reg_rtx != 0)
7351 fprintf (f, "\n\treload_reg_rtx: ");
7352 print_inline_rtx (f, rld[r].reg_rtx, 24);
7355 prefix = "\n\t";
7356 if (rld[r].secondary_in_reload != -1)
7358 fprintf (f, "%ssecondary_in_reload = %d",
7359 prefix, rld[r].secondary_in_reload);
7360 prefix = ", ";
7363 if (rld[r].secondary_out_reload != -1)
7364 fprintf (f, "%ssecondary_out_reload = %d\n",
7365 prefix, rld[r].secondary_out_reload);
7367 prefix = "\n\t";
7368 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7370 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7371 insn_data[rld[r].secondary_in_icode].name);
7372 prefix = ", ";
7375 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7376 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7377 insn_data[rld[r].secondary_out_icode].name);
7379 fprintf (f, "\n");
7383 DEBUG_FUNCTION void
7384 debug_reload (void)
7386 debug_reload_to_stream (stderr);