[31/77] Use scalar_int_mode for move2add
[official-gcc.git] / gcc / reload.c
blob2116332b55aa33babd9ab08023026592f2a471c4
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains subroutines used only from the file reload1.c.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
56 NOTE SIDE EFFECTS:
58 find_reloads can alter the operands of the instruction it is called on.
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
63 better that way.
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
72 Using a reload register for several reloads in one insn:
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload. */
86 #define REG_OK_STRICT
88 /* We do not enable this with CHECKING_P, since it is awfully slow. */
89 #undef DEBUG_RELOAD
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "backend.h"
95 #include "target.h"
96 #include "rtl.h"
97 #include "tree.h"
98 #include "df.h"
99 #include "memmodel.h"
100 #include "tm_p.h"
101 #include "optabs.h"
102 #include "regs.h"
103 #include "ira.h"
104 #include "recog.h"
105 #include "rtl-error.h"
106 #include "reload.h"
107 #include "addresses.h"
108 #include "params.h"
110 /* True if X is a constant that can be forced into the constant pool.
111 MODE is the mode of the operand, or VOIDmode if not known. */
112 #define CONST_POOL_OK_P(MODE, X) \
113 ((MODE) != VOIDmode \
114 && CONSTANT_P (X) \
115 && GET_CODE (X) != HIGH \
116 && !targetm.cannot_force_const_mem (MODE, X))
118 /* True if C is a non-empty register class that has too few registers
119 to be safely used as a reload target class. */
121 static inline bool
122 small_register_class_p (reg_class_t rclass)
124 return (reg_class_size [(int) rclass] == 1
125 || (reg_class_size [(int) rclass] >= 1
126 && targetm.class_likely_spilled_p (rclass)));
130 /* All reloads of the current insn are recorded here. See reload.h for
131 comments. */
132 int n_reloads;
133 struct reload rld[MAX_RELOADS];
135 /* All the "earlyclobber" operands of the current insn
136 are recorded here. */
137 int n_earlyclobbers;
138 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
140 int reload_n_operands;
142 /* Replacing reloads.
144 If `replace_reloads' is nonzero, then as each reload is recorded
145 an entry is made for it in the table `replacements'.
146 Then later `subst_reloads' can look through that table and
147 perform all the replacements needed. */
149 /* Nonzero means record the places to replace. */
150 static int replace_reloads;
152 /* Each replacement is recorded with a structure like this. */
153 struct replacement
155 rtx *where; /* Location to store in */
156 int what; /* which reload this is for */
157 machine_mode mode; /* mode it must have */
160 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
162 /* Number of replacements currently recorded. */
163 static int n_replacements;
165 /* Used to track what is modified by an operand. */
166 struct decomposition
168 int reg_flag; /* Nonzero if referencing a register. */
169 int safe; /* Nonzero if this can't conflict with anything. */
170 rtx base; /* Base address for MEM. */
171 HOST_WIDE_INT start; /* Starting offset or register number. */
172 HOST_WIDE_INT end; /* Ending offset or register number. */
175 #ifdef SECONDARY_MEMORY_NEEDED
177 /* Save MEMs needed to copy from one class of registers to another. One MEM
178 is used per mode, but normally only one or two modes are ever used.
180 We keep two versions, before and after register elimination. The one
181 after register elimination is record separately for each operand. This
182 is done in case the address is not valid to be sure that we separately
183 reload each. */
185 static rtx secondary_memlocs[NUM_MACHINE_MODES];
186 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
187 static int secondary_memlocs_elim_used = 0;
188 #endif
190 /* The instruction we are doing reloads for;
191 so we can test whether a register dies in it. */
192 static rtx_insn *this_insn;
194 /* Nonzero if this instruction is a user-specified asm with operands. */
195 static int this_insn_is_asm;
197 /* If hard_regs_live_known is nonzero,
198 we can tell which hard regs are currently live,
199 at least enough to succeed in choosing dummy reloads. */
200 static int hard_regs_live_known;
202 /* Indexed by hard reg number,
203 element is nonnegative if hard reg has been spilled.
204 This vector is passed to `find_reloads' as an argument
205 and is not changed here. */
206 static short *static_reload_reg_p;
208 /* Set to 1 in subst_reg_equivs if it changes anything. */
209 static int subst_reg_equivs_changed;
211 /* On return from push_reload, holds the reload-number for the OUT
212 operand, which can be different for that from the input operand. */
213 static int output_reloadnum;
215 /* Compare two RTX's. */
216 #define MATCHES(x, y) \
217 (x == y || (x != 0 && (REG_P (x) \
218 ? REG_P (y) && REGNO (x) == REGNO (y) \
219 : rtx_equal_p (x, y) && ! side_effects_p (x))))
221 /* Indicates if two reloads purposes are for similar enough things that we
222 can merge their reloads. */
223 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
224 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
225 || ((when1) == (when2) && (op1) == (op2)) \
226 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
227 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
228 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
229 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
230 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
232 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
233 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
234 ((when1) != (when2) \
235 || ! ((op1) == (op2) \
236 || (when1) == RELOAD_FOR_INPUT \
237 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
238 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
240 /* If we are going to reload an address, compute the reload type to
241 use. */
242 #define ADDR_TYPE(type) \
243 ((type) == RELOAD_FOR_INPUT_ADDRESS \
244 ? RELOAD_FOR_INPADDR_ADDRESS \
245 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
246 ? RELOAD_FOR_OUTADDR_ADDRESS \
247 : (type)))
249 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
250 machine_mode, enum reload_type,
251 enum insn_code *, secondary_reload_info *);
252 static enum reg_class find_valid_class (machine_mode, machine_mode,
253 int, unsigned int);
254 static void push_replacement (rtx *, int, machine_mode);
255 static void dup_replacements (rtx *, rtx *);
256 static void combine_reloads (void);
257 static int find_reusable_reload (rtx *, rtx, enum reg_class,
258 enum reload_type, int, int);
259 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode,
260 machine_mode, reg_class_t, int, int);
261 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
262 static struct decomposition decompose (rtx);
263 static int immune_p (rtx, rtx, struct decomposition);
264 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
265 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
266 rtx_insn *, int *);
267 static rtx make_memloc (rtx, int);
268 static int maybe_memory_address_addr_space_p (machine_mode, rtx,
269 addr_space_t, rtx *);
270 static int find_reloads_address (machine_mode, rtx *, rtx, rtx *,
271 int, enum reload_type, int, rtx_insn *);
272 static rtx subst_reg_equivs (rtx, rtx_insn *);
273 static rtx subst_indexed_address (rtx);
274 static void update_auto_inc_notes (rtx_insn *, int, int);
275 static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int,
276 enum rtx_code, enum rtx_code, rtx *,
277 int, enum reload_type,int, rtx_insn *);
278 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
279 machine_mode, int,
280 enum reload_type, int);
281 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
282 int, rtx_insn *, int *);
283 static void copy_replacements_1 (rtx *, rtx *, int);
284 static int find_inc_amount (rtx, rtx);
285 static int refers_to_mem_for_reload_p (rtx);
286 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
287 rtx, rtx *);
289 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
290 list yet. */
292 static void
293 push_reg_equiv_alt_mem (int regno, rtx mem)
295 rtx it;
297 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
298 if (rtx_equal_p (XEXP (it, 0), mem))
299 return;
301 reg_equiv_alt_mem_list (regno)
302 = alloc_EXPR_LIST (REG_EQUIV, mem,
303 reg_equiv_alt_mem_list (regno));
306 /* Determine if any secondary reloads are needed for loading (if IN_P is
307 nonzero) or storing (if IN_P is zero) X to or from a reload register of
308 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
309 are needed, push them.
311 Return the reload number of the secondary reload we made, or -1 if
312 we didn't need one. *PICODE is set to the insn_code to use if we do
313 need a secondary reload. */
315 static int
316 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
317 enum reg_class reload_class,
318 machine_mode reload_mode, enum reload_type type,
319 enum insn_code *picode, secondary_reload_info *prev_sri)
321 enum reg_class rclass = NO_REGS;
322 enum reg_class scratch_class;
323 machine_mode mode = reload_mode;
324 enum insn_code icode = CODE_FOR_nothing;
325 enum insn_code t_icode = CODE_FOR_nothing;
326 enum reload_type secondary_type;
327 int s_reload, t_reload = -1;
328 const char *scratch_constraint;
329 secondary_reload_info sri;
331 if (type == RELOAD_FOR_INPUT_ADDRESS
332 || type == RELOAD_FOR_OUTPUT_ADDRESS
333 || type == RELOAD_FOR_INPADDR_ADDRESS
334 || type == RELOAD_FOR_OUTADDR_ADDRESS)
335 secondary_type = type;
336 else
337 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
339 *picode = CODE_FOR_nothing;
341 /* If X is a paradoxical SUBREG, use the inner value to determine both the
342 mode and object being reloaded. */
343 if (paradoxical_subreg_p (x))
345 x = SUBREG_REG (x);
346 reload_mode = GET_MODE (x);
349 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
350 is still a pseudo-register by now, it *must* have an equivalent MEM
351 but we don't want to assume that), use that equivalent when seeing if
352 a secondary reload is needed since whether or not a reload is needed
353 might be sensitive to the form of the MEM. */
355 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
356 && reg_equiv_mem (REGNO (x)))
357 x = reg_equiv_mem (REGNO (x));
359 sri.icode = CODE_FOR_nothing;
360 sri.prev_sri = prev_sri;
361 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
362 reload_mode, &sri);
363 icode = (enum insn_code) sri.icode;
365 /* If we don't need any secondary registers, done. */
366 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
367 return -1;
369 if (rclass != NO_REGS)
370 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
371 reload_mode, type, &t_icode, &sri);
373 /* If we will be using an insn, the secondary reload is for a
374 scratch register. */
376 if (icode != CODE_FOR_nothing)
378 /* If IN_P is nonzero, the reload register will be the output in
379 operand 0. If IN_P is zero, the reload register will be the input
380 in operand 1. Outputs should have an initial "=", which we must
381 skip. */
383 /* ??? It would be useful to be able to handle only two, or more than
384 three, operands, but for now we can only handle the case of having
385 exactly three: output, input and one temp/scratch. */
386 gcc_assert (insn_data[(int) icode].n_operands == 3);
388 /* ??? We currently have no way to represent a reload that needs
389 an icode to reload from an intermediate tertiary reload register.
390 We should probably have a new field in struct reload to tag a
391 chain of scratch operand reloads onto. */
392 gcc_assert (rclass == NO_REGS);
394 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
395 gcc_assert (*scratch_constraint == '=');
396 scratch_constraint++;
397 if (*scratch_constraint == '&')
398 scratch_constraint++;
399 scratch_class = (reg_class_for_constraint
400 (lookup_constraint (scratch_constraint)));
402 rclass = scratch_class;
403 mode = insn_data[(int) icode].operand[2].mode;
406 /* This case isn't valid, so fail. Reload is allowed to use the same
407 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
408 in the case of a secondary register, we actually need two different
409 registers for correct code. We fail here to prevent the possibility of
410 silently generating incorrect code later.
412 The convention is that secondary input reloads are valid only if the
413 secondary_class is different from class. If you have such a case, you
414 can not use secondary reloads, you must work around the problem some
415 other way.
417 Allow this when a reload_in/out pattern is being used. I.e. assume
418 that the generated code handles this case. */
420 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
421 || t_icode != CODE_FOR_nothing);
423 /* See if we can reuse an existing secondary reload. */
424 for (s_reload = 0; s_reload < n_reloads; s_reload++)
425 if (rld[s_reload].secondary_p
426 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
427 || reg_class_subset_p (rld[s_reload].rclass, rclass))
428 && ((in_p && rld[s_reload].inmode == mode)
429 || (! in_p && rld[s_reload].outmode == mode))
430 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
431 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
432 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
433 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
434 && (small_register_class_p (rclass)
435 || targetm.small_register_classes_for_mode_p (VOIDmode))
436 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
437 opnum, rld[s_reload].opnum))
439 if (in_p)
440 rld[s_reload].inmode = mode;
441 if (! in_p)
442 rld[s_reload].outmode = mode;
444 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
445 rld[s_reload].rclass = rclass;
447 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
448 rld[s_reload].optional &= optional;
449 rld[s_reload].secondary_p = 1;
450 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
451 opnum, rld[s_reload].opnum))
452 rld[s_reload].when_needed = RELOAD_OTHER;
454 break;
457 if (s_reload == n_reloads)
459 #ifdef SECONDARY_MEMORY_NEEDED
460 /* If we need a memory location to copy between the two reload regs,
461 set it up now. Note that we do the input case before making
462 the reload and the output case after. This is due to the
463 way reloads are output. */
465 if (in_p && icode == CODE_FOR_nothing
466 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
468 get_secondary_mem (x, reload_mode, opnum, type);
470 /* We may have just added new reloads. Make sure we add
471 the new reload at the end. */
472 s_reload = n_reloads;
474 #endif
476 /* We need to make a new secondary reload for this register class. */
477 rld[s_reload].in = rld[s_reload].out = 0;
478 rld[s_reload].rclass = rclass;
480 rld[s_reload].inmode = in_p ? mode : VOIDmode;
481 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
482 rld[s_reload].reg_rtx = 0;
483 rld[s_reload].optional = optional;
484 rld[s_reload].inc = 0;
485 /* Maybe we could combine these, but it seems too tricky. */
486 rld[s_reload].nocombine = 1;
487 rld[s_reload].in_reg = 0;
488 rld[s_reload].out_reg = 0;
489 rld[s_reload].opnum = opnum;
490 rld[s_reload].when_needed = secondary_type;
491 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
492 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
493 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
494 rld[s_reload].secondary_out_icode
495 = ! in_p ? t_icode : CODE_FOR_nothing;
496 rld[s_reload].secondary_p = 1;
498 n_reloads++;
500 #ifdef SECONDARY_MEMORY_NEEDED
501 if (! in_p && icode == CODE_FOR_nothing
502 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
503 get_secondary_mem (x, mode, opnum, type);
504 #endif
507 *picode = icode;
508 return s_reload;
511 /* If a secondary reload is needed, return its class. If both an intermediate
512 register and a scratch register is needed, we return the class of the
513 intermediate register. */
514 reg_class_t
515 secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode,
516 rtx x)
518 enum insn_code icode;
519 secondary_reload_info sri;
521 sri.icode = CODE_FOR_nothing;
522 sri.prev_sri = NULL;
523 rclass
524 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
525 icode = (enum insn_code) sri.icode;
527 /* If there are no secondary reloads at all, we return NO_REGS.
528 If an intermediate register is needed, we return its class. */
529 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
530 return rclass;
532 /* No intermediate register is needed, but we have a special reload
533 pattern, which we assume for now needs a scratch register. */
534 return scratch_reload_class (icode);
537 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
538 three operands, verify that operand 2 is an output operand, and return
539 its register class.
540 ??? We'd like to be able to handle any pattern with at least 2 operands,
541 for zero or more scratch registers, but that needs more infrastructure. */
542 enum reg_class
543 scratch_reload_class (enum insn_code icode)
545 const char *scratch_constraint;
546 enum reg_class rclass;
548 gcc_assert (insn_data[(int) icode].n_operands == 3);
549 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
550 gcc_assert (*scratch_constraint == '=');
551 scratch_constraint++;
552 if (*scratch_constraint == '&')
553 scratch_constraint++;
554 rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
555 gcc_assert (rclass != NO_REGS);
556 return rclass;
559 #ifdef SECONDARY_MEMORY_NEEDED
561 /* Return a memory location that will be used to copy X in mode MODE.
562 If we haven't already made a location for this mode in this insn,
563 call find_reloads_address on the location being returned. */
566 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode,
567 int opnum, enum reload_type type)
569 rtx loc;
570 int mem_valid;
572 /* By default, if MODE is narrower than a word, widen it to a word.
573 This is required because most machines that require these memory
574 locations do not support short load and stores from all registers
575 (e.g., FP registers). */
577 #ifdef SECONDARY_MEMORY_NEEDED_MODE
578 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
579 #else
580 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
581 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
582 #endif
584 /* If we already have made a MEM for this operand in MODE, return it. */
585 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
586 return secondary_memlocs_elim[(int) mode][opnum];
588 /* If this is the first time we've tried to get a MEM for this mode,
589 allocate a new one. `something_changed' in reload will get set
590 by noticing that the frame size has changed. */
592 if (secondary_memlocs[(int) mode] == 0)
594 #ifdef SECONDARY_MEMORY_NEEDED_RTX
595 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
596 #else
597 secondary_memlocs[(int) mode]
598 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
599 #endif
602 /* Get a version of the address doing any eliminations needed. If that
603 didn't give us a new MEM, make a new one if it isn't valid. */
605 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
606 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
607 MEM_ADDR_SPACE (loc));
609 if (! mem_valid && loc == secondary_memlocs[(int) mode])
610 loc = copy_rtx (loc);
612 /* The only time the call below will do anything is if the stack
613 offset is too large. In that case IND_LEVELS doesn't matter, so we
614 can just pass a zero. Adjust the type to be the address of the
615 corresponding object. If the address was valid, save the eliminated
616 address. If it wasn't valid, we need to make a reload each time, so
617 don't save it. */
619 if (! mem_valid)
621 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
622 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
623 : RELOAD_OTHER);
625 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
626 opnum, type, 0, 0);
629 secondary_memlocs_elim[(int) mode][opnum] = loc;
630 if (secondary_memlocs_elim_used <= (int)mode)
631 secondary_memlocs_elim_used = (int)mode + 1;
632 return loc;
635 /* Clear any secondary memory locations we've made. */
637 void
638 clear_secondary_mem (void)
640 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
642 #endif /* SECONDARY_MEMORY_NEEDED */
645 /* Find the largest class which has at least one register valid in
646 mode INNER, and which for every such register, that register number
647 plus N is also valid in OUTER (if in range) and is cheap to move
648 into REGNO. Such a class must exist. */
650 static enum reg_class
651 find_valid_class (machine_mode outer ATTRIBUTE_UNUSED,
652 machine_mode inner ATTRIBUTE_UNUSED, int n,
653 unsigned int dest_regno ATTRIBUTE_UNUSED)
655 int best_cost = -1;
656 int rclass;
657 int regno;
658 enum reg_class best_class = NO_REGS;
659 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
660 unsigned int best_size = 0;
661 int cost;
663 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
665 int bad = 0;
666 int good = 0;
667 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
668 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
670 if (HARD_REGNO_MODE_OK (regno, inner))
672 good = 1;
673 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
674 && ! HARD_REGNO_MODE_OK (regno + n, outer))
675 bad = 1;
679 if (bad || !good)
680 continue;
681 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
683 if ((reg_class_size[rclass] > best_size
684 && (best_cost < 0 || best_cost >= cost))
685 || best_cost > cost)
687 best_class = (enum reg_class) rclass;
688 best_size = reg_class_size[rclass];
689 best_cost = register_move_cost (outer, (enum reg_class) rclass,
690 dest_class);
694 gcc_assert (best_size != 0);
696 return best_class;
699 /* We are trying to reload a subreg of something that is not a register.
700 Find the largest class which contains only registers valid in
701 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
702 which we would eventually like to obtain the object. */
704 static enum reg_class
705 find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED,
706 machine_mode mode ATTRIBUTE_UNUSED,
707 enum reg_class dest_class ATTRIBUTE_UNUSED)
709 int best_cost = -1;
710 int rclass;
711 int regno;
712 enum reg_class best_class = NO_REGS;
713 unsigned int best_size = 0;
714 int cost;
716 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
718 unsigned int computed_rclass_size = 0;
720 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
722 if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
723 && (HARD_REGNO_MODE_OK (regno, mode)))
724 computed_rclass_size++;
727 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
729 if ((computed_rclass_size > best_size
730 && (best_cost < 0 || best_cost >= cost))
731 || best_cost > cost)
733 best_class = (enum reg_class) rclass;
734 best_size = computed_rclass_size;
735 best_cost = register_move_cost (outer, (enum reg_class) rclass,
736 dest_class);
740 gcc_assert (best_size != 0);
742 #ifdef LIMIT_RELOAD_CLASS
743 best_class = LIMIT_RELOAD_CLASS (mode, best_class);
744 #endif
745 return best_class;
748 /* Return the number of a previously made reload that can be combined with
749 a new one, or n_reloads if none of the existing reloads can be used.
750 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
751 push_reload, they determine the kind of the new reload that we try to
752 combine. P_IN points to the corresponding value of IN, which can be
753 modified by this function.
754 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
756 static int
757 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
758 enum reload_type type, int opnum, int dont_share)
760 rtx in = *p_in;
761 int i;
762 /* We can't merge two reloads if the output of either one is
763 earlyclobbered. */
765 if (earlyclobber_operand_p (out))
766 return n_reloads;
768 /* We can use an existing reload if the class is right
769 and at least one of IN and OUT is a match
770 and the other is at worst neutral.
771 (A zero compared against anything is neutral.)
773 For targets with small register classes, don't use existing reloads
774 unless they are for the same thing since that can cause us to need
775 more reload registers than we otherwise would. */
777 for (i = 0; i < n_reloads; i++)
778 if ((reg_class_subset_p (rclass, rld[i].rclass)
779 || reg_class_subset_p (rld[i].rclass, rclass))
780 /* If the existing reload has a register, it must fit our class. */
781 && (rld[i].reg_rtx == 0
782 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
783 true_regnum (rld[i].reg_rtx)))
784 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
785 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
786 || (out != 0 && MATCHES (rld[i].out, out)
787 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
788 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
789 && (small_register_class_p (rclass)
790 || targetm.small_register_classes_for_mode_p (VOIDmode))
791 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
792 return i;
794 /* Reloading a plain reg for input can match a reload to postincrement
795 that reg, since the postincrement's value is the right value.
796 Likewise, it can match a preincrement reload, since we regard
797 the preincrementation as happening before any ref in this insn
798 to that register. */
799 for (i = 0; i < n_reloads; i++)
800 if ((reg_class_subset_p (rclass, rld[i].rclass)
801 || reg_class_subset_p (rld[i].rclass, rclass))
802 /* If the existing reload has a register, it must fit our
803 class. */
804 && (rld[i].reg_rtx == 0
805 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
806 true_regnum (rld[i].reg_rtx)))
807 && out == 0 && rld[i].out == 0 && rld[i].in != 0
808 && ((REG_P (in)
809 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
810 && MATCHES (XEXP (rld[i].in, 0), in))
811 || (REG_P (rld[i].in)
812 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
813 && MATCHES (XEXP (in, 0), rld[i].in)))
814 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
815 && (small_register_class_p (rclass)
816 || targetm.small_register_classes_for_mode_p (VOIDmode))
817 && MERGABLE_RELOADS (type, rld[i].when_needed,
818 opnum, rld[i].opnum))
820 /* Make sure reload_in ultimately has the increment,
821 not the plain register. */
822 if (REG_P (in))
823 *p_in = rld[i].in;
824 return i;
826 return n_reloads;
829 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
830 expression. MODE is the mode that X will be used in. OUTPUT is true if
831 the function is invoked for the output part of an enclosing reload. */
833 static bool
834 reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output)
836 rtx inner;
838 /* Only SUBREGs are problematical. */
839 if (GET_CODE (x) != SUBREG)
840 return false;
842 inner = SUBREG_REG (x);
844 /* If INNER is a constant or PLUS, then INNER will need reloading. */
845 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
846 return true;
848 /* If INNER is not a hard register, then INNER will not need reloading. */
849 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
850 return false;
852 /* If INNER is not ok for MODE, then INNER will need reloading. */
853 if (!HARD_REGNO_MODE_OK (subreg_regno (x), mode))
854 return true;
856 /* If this is for an output, and the outer part is a word or smaller,
857 INNER is larger than a word and the number of registers in INNER is
858 not the same as the number of words in INNER, then INNER will need
859 reloading (with an in-out reload). */
860 return (output
861 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
862 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
863 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
864 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
867 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
868 requiring an extra reload register. The caller has already found that
869 IN contains some reference to REGNO, so check that we can produce the
870 new value in a single step. E.g. if we have
871 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
872 instruction that adds one to a register, this should succeed.
873 However, if we have something like
874 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
875 needs to be loaded into a register first, we need a separate reload
876 register.
877 Such PLUS reloads are generated by find_reload_address_part.
878 The out-of-range PLUS expressions are usually introduced in the instruction
879 patterns by register elimination and substituting pseudos without a home
880 by their function-invariant equivalences. */
881 static int
882 can_reload_into (rtx in, int regno, machine_mode mode)
884 rtx dst;
885 rtx_insn *test_insn;
886 int r = 0;
887 struct recog_data_d save_recog_data;
889 /* For matching constraints, we often get notional input reloads where
890 we want to use the original register as the reload register. I.e.
891 technically this is a non-optional input-output reload, but IN is
892 already a valid register, and has been chosen as the reload register.
893 Speed this up, since it trivially works. */
894 if (REG_P (in))
895 return 1;
897 /* To test MEMs properly, we'd have to take into account all the reloads
898 that are already scheduled, which can become quite complicated.
899 And since we've already handled address reloads for this MEM, it
900 should always succeed anyway. */
901 if (MEM_P (in))
902 return 1;
904 /* If we can make a simple SET insn that does the job, everything should
905 be fine. */
906 dst = gen_rtx_REG (mode, regno);
907 test_insn = make_insn_raw (gen_rtx_SET (dst, in));
908 save_recog_data = recog_data;
909 if (recog_memoized (test_insn) >= 0)
911 extract_insn (test_insn);
912 r = constrain_operands (1, get_enabled_alternatives (test_insn));
914 recog_data = save_recog_data;
915 return r;
918 /* Record one reload that needs to be performed.
919 IN is an rtx saying where the data are to be found before this instruction.
920 OUT says where they must be stored after the instruction.
921 (IN is zero for data not read, and OUT is zero for data not written.)
922 INLOC and OUTLOC point to the places in the instructions where
923 IN and OUT were found.
924 If IN and OUT are both nonzero, it means the same register must be used
925 to reload both IN and OUT.
927 RCLASS is a register class required for the reloaded data.
928 INMODE is the machine mode that the instruction requires
929 for the reg that replaces IN and OUTMODE is likewise for OUT.
931 If IN is zero, then OUT's location and mode should be passed as
932 INLOC and INMODE.
934 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
936 OPTIONAL nonzero means this reload does not need to be performed:
937 it can be discarded if that is more convenient.
939 OPNUM and TYPE say what the purpose of this reload is.
941 The return value is the reload-number for this reload.
943 If both IN and OUT are nonzero, in some rare cases we might
944 want to make two separate reloads. (Actually we never do this now.)
945 Therefore, the reload-number for OUT is stored in
946 output_reloadnum when we return; the return value applies to IN.
947 Usually (presently always), when IN and OUT are nonzero,
948 the two reload-numbers are equal, but the caller should be careful to
949 distinguish them. */
952 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
953 enum reg_class rclass, machine_mode inmode,
954 machine_mode outmode, int strict_low, int optional,
955 int opnum, enum reload_type type)
957 int i;
958 int dont_share = 0;
959 int dont_remove_subreg = 0;
960 #ifdef LIMIT_RELOAD_CLASS
961 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
962 #endif
963 int secondary_in_reload = -1, secondary_out_reload = -1;
964 enum insn_code secondary_in_icode = CODE_FOR_nothing;
965 enum insn_code secondary_out_icode = CODE_FOR_nothing;
966 enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
967 subreg_in_class = NO_REGS;
969 /* INMODE and/or OUTMODE could be VOIDmode if no mode
970 has been specified for the operand. In that case,
971 use the operand's mode as the mode to reload. */
972 if (inmode == VOIDmode && in != 0)
973 inmode = GET_MODE (in);
974 if (outmode == VOIDmode && out != 0)
975 outmode = GET_MODE (out);
977 /* If find_reloads and friends until now missed to replace a pseudo
978 with a constant of reg_equiv_constant something went wrong
979 beforehand.
980 Note that it can't simply be done here if we missed it earlier
981 since the constant might need to be pushed into the literal pool
982 and the resulting memref would probably need further
983 reloading. */
984 if (in != 0 && REG_P (in))
986 int regno = REGNO (in);
988 gcc_assert (regno < FIRST_PSEUDO_REGISTER
989 || reg_renumber[regno] >= 0
990 || reg_equiv_constant (regno) == NULL_RTX);
993 /* reg_equiv_constant only contains constants which are obviously
994 not appropriate as destination. So if we would need to replace
995 the destination pseudo with a constant we are in real
996 trouble. */
997 if (out != 0 && REG_P (out))
999 int regno = REGNO (out);
1001 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1002 || reg_renumber[regno] >= 0
1003 || reg_equiv_constant (regno) == NULL_RTX);
1006 /* If we have a read-write operand with an address side-effect,
1007 change either IN or OUT so the side-effect happens only once. */
1008 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1009 switch (GET_CODE (XEXP (in, 0)))
1011 case POST_INC: case POST_DEC: case POST_MODIFY:
1012 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1013 break;
1015 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1016 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1017 break;
1019 default:
1020 break;
1023 /* If we are reloading a (SUBREG constant ...), really reload just the
1024 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1025 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1026 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1027 register is a pseudo, also reload the inside expression.
1028 For machines that extend byte loads, do this for any SUBREG of a pseudo
1029 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1030 M2 is an integral mode that gets extended when loaded.
1031 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1032 where either M1 is not valid for R or M2 is wider than a word but we
1033 only need one register to store an M2-sized quantity in R.
1034 (However, if OUT is nonzero, we need to reload the reg *and*
1035 the subreg, so do nothing here, and let following statement handle it.)
1037 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1038 we can't handle it here because CONST_INT does not indicate a mode.
1040 Similarly, we must reload the inside expression if we have a
1041 STRICT_LOW_PART (presumably, in == out in this case).
1043 Also reload the inner expression if it does not require a secondary
1044 reload but the SUBREG does.
1046 Finally, reload the inner expression if it is a register that is in
1047 the class whose registers cannot be referenced in a different size
1048 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1049 cannot reload just the inside since we might end up with the wrong
1050 register class. But if it is inside a STRICT_LOW_PART, we have
1051 no choice, so we hope we do get the right register class there. */
1053 scalar_int_mode inner_mode;
1054 if (in != 0 && GET_CODE (in) == SUBREG
1055 && (subreg_lowpart_p (in) || strict_low)
1056 #ifdef CANNOT_CHANGE_MODE_CLASS
1057 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1058 #endif
1059 && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (in))]
1060 && (CONSTANT_P (SUBREG_REG (in))
1061 || GET_CODE (SUBREG_REG (in)) == PLUS
1062 || strict_low
1063 || (((REG_P (SUBREG_REG (in))
1064 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1065 || MEM_P (SUBREG_REG (in)))
1066 && (paradoxical_subreg_p (inmode, GET_MODE (SUBREG_REG (in)))
1067 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1068 && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (in)),
1069 &inner_mode)
1070 && GET_MODE_SIZE (inner_mode) <= UNITS_PER_WORD
1071 && paradoxical_subreg_p (inmode, inner_mode)
1072 && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)
1073 || (WORD_REGISTER_OPERATIONS
1074 && (GET_MODE_PRECISION (inmode)
1075 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1076 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1077 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1078 / UNITS_PER_WORD)))))
1079 || (REG_P (SUBREG_REG (in))
1080 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1081 /* The case where out is nonzero
1082 is handled differently in the following statement. */
1083 && (out == 0 || subreg_lowpart_p (in))
1084 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1085 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1086 > UNITS_PER_WORD)
1087 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1088 / UNITS_PER_WORD)
1089 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1090 [GET_MODE (SUBREG_REG (in))]))
1091 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1092 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1093 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1094 SUBREG_REG (in))
1095 == NO_REGS))
1096 #ifdef CANNOT_CHANGE_MODE_CLASS
1097 || (REG_P (SUBREG_REG (in))
1098 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1099 && REG_CANNOT_CHANGE_MODE_P
1100 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1101 #endif
1104 #ifdef LIMIT_RELOAD_CLASS
1105 in_subreg_loc = inloc;
1106 #endif
1107 inloc = &SUBREG_REG (in);
1108 in = *inloc;
1110 if (!WORD_REGISTER_OPERATIONS
1111 && LOAD_EXTEND_OP (GET_MODE (in)) == UNKNOWN
1112 && MEM_P (in))
1113 /* This is supposed to happen only for paradoxical subregs made by
1114 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1115 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1117 inmode = GET_MODE (in);
1120 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1121 where M1 is not valid for R if it was not handled by the code above.
1123 Similar issue for (SUBREG constant ...) if it was not handled by the
1124 code above. This can happen if SUBREG_BYTE != 0.
1126 However, we must reload the inner reg *as well as* the subreg in
1127 that case. */
1129 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1131 if (REG_P (SUBREG_REG (in)))
1132 subreg_in_class
1133 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1134 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1135 GET_MODE (SUBREG_REG (in)),
1136 SUBREG_BYTE (in),
1137 GET_MODE (in)),
1138 REGNO (SUBREG_REG (in)));
1139 else if (CONSTANT_P (SUBREG_REG (in))
1140 || GET_CODE (SUBREG_REG (in)) == PLUS)
1141 subreg_in_class = find_valid_class_1 (inmode,
1142 GET_MODE (SUBREG_REG (in)),
1143 rclass);
1145 /* This relies on the fact that emit_reload_insns outputs the
1146 instructions for input reloads of type RELOAD_OTHER in the same
1147 order as the reloads. Thus if the outer reload is also of type
1148 RELOAD_OTHER, we are guaranteed that this inner reload will be
1149 output before the outer reload. */
1150 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1151 subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1152 dont_remove_subreg = 1;
1155 /* Similarly for paradoxical and problematical SUBREGs on the output.
1156 Note that there is no reason we need worry about the previous value
1157 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1158 entitled to clobber it all (except in the case of a word mode subreg
1159 or of a STRICT_LOW_PART, in that latter case the constraint should
1160 label it input-output.) */
1161 if (out != 0 && GET_CODE (out) == SUBREG
1162 && (subreg_lowpart_p (out) || strict_low)
1163 #ifdef CANNOT_CHANGE_MODE_CLASS
1164 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1165 #endif
1166 && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (out))]
1167 && (CONSTANT_P (SUBREG_REG (out))
1168 || strict_low
1169 || (((REG_P (SUBREG_REG (out))
1170 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1171 || MEM_P (SUBREG_REG (out)))
1172 && (paradoxical_subreg_p (outmode, GET_MODE (SUBREG_REG (out)))
1173 || (WORD_REGISTER_OPERATIONS
1174 && (GET_MODE_PRECISION (outmode)
1175 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1176 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1177 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1178 / UNITS_PER_WORD)))))
1179 || (REG_P (SUBREG_REG (out))
1180 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1181 /* The case of a word mode subreg
1182 is handled differently in the following statement. */
1183 && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1184 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1185 > UNITS_PER_WORD))
1186 && ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode))
1187 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1188 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1189 SUBREG_REG (out))
1190 == NO_REGS))
1191 #ifdef CANNOT_CHANGE_MODE_CLASS
1192 || (REG_P (SUBREG_REG (out))
1193 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1194 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1195 GET_MODE (SUBREG_REG (out)),
1196 outmode))
1197 #endif
1200 #ifdef LIMIT_RELOAD_CLASS
1201 out_subreg_loc = outloc;
1202 #endif
1203 outloc = &SUBREG_REG (out);
1204 out = *outloc;
1205 gcc_assert (WORD_REGISTER_OPERATIONS || !MEM_P (out)
1206 || GET_MODE_SIZE (GET_MODE (out))
1207 <= GET_MODE_SIZE (outmode));
1208 outmode = GET_MODE (out);
1211 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1212 where either M1 is not valid for R or M2 is wider than a word but we
1213 only need one register to store an M2-sized quantity in R.
1215 However, we must reload the inner reg *as well as* the subreg in
1216 that case and the inner reg is an in-out reload. */
1218 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1220 enum reg_class in_out_class
1221 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1222 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1223 GET_MODE (SUBREG_REG (out)),
1224 SUBREG_BYTE (out),
1225 GET_MODE (out)),
1226 REGNO (SUBREG_REG (out)));
1228 /* This relies on the fact that emit_reload_insns outputs the
1229 instructions for output reloads of type RELOAD_OTHER in reverse
1230 order of the reloads. Thus if the outer reload is also of type
1231 RELOAD_OTHER, we are guaranteed that this inner reload will be
1232 output after the outer reload. */
1233 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1234 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1235 0, 0, opnum, RELOAD_OTHER);
1236 dont_remove_subreg = 1;
1239 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1240 if (in != 0 && out != 0 && MEM_P (out)
1241 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1242 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1243 dont_share = 1;
1245 /* If IN is a SUBREG of a hard register, make a new REG. This
1246 simplifies some of the cases below. */
1248 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1249 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1250 && ! dont_remove_subreg)
1251 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1253 /* Similarly for OUT. */
1254 if (out != 0 && GET_CODE (out) == SUBREG
1255 && REG_P (SUBREG_REG (out))
1256 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1257 && ! dont_remove_subreg)
1258 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1260 /* Narrow down the class of register wanted if that is
1261 desirable on this machine for efficiency. */
1263 reg_class_t preferred_class = rclass;
1265 if (in != 0)
1266 preferred_class = targetm.preferred_reload_class (in, rclass);
1268 /* Output reloads may need analogous treatment, different in detail. */
1269 if (out != 0)
1270 preferred_class
1271 = targetm.preferred_output_reload_class (out, preferred_class);
1273 /* Discard what the target said if we cannot do it. */
1274 if (preferred_class != NO_REGS
1275 || (optional && type == RELOAD_FOR_OUTPUT))
1276 rclass = (enum reg_class) preferred_class;
1279 /* Make sure we use a class that can handle the actual pseudo
1280 inside any subreg. For example, on the 386, QImode regs
1281 can appear within SImode subregs. Although GENERAL_REGS
1282 can handle SImode, QImode needs a smaller class. */
1283 #ifdef LIMIT_RELOAD_CLASS
1284 if (in_subreg_loc)
1285 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1286 else if (in != 0 && GET_CODE (in) == SUBREG)
1287 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1289 if (out_subreg_loc)
1290 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1291 if (out != 0 && GET_CODE (out) == SUBREG)
1292 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1293 #endif
1295 /* Verify that this class is at least possible for the mode that
1296 is specified. */
1297 if (this_insn_is_asm)
1299 machine_mode mode;
1300 if (paradoxical_subreg_p (inmode, outmode))
1301 mode = inmode;
1302 else
1303 mode = outmode;
1304 if (mode == VOIDmode)
1306 error_for_asm (this_insn, "cannot reload integer constant "
1307 "operand in %<asm%>");
1308 mode = word_mode;
1309 if (in != 0)
1310 inmode = word_mode;
1311 if (out != 0)
1312 outmode = word_mode;
1314 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1315 if (HARD_REGNO_MODE_OK (i, mode)
1316 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1317 break;
1318 if (i == FIRST_PSEUDO_REGISTER)
1320 error_for_asm (this_insn, "impossible register constraint "
1321 "in %<asm%>");
1322 /* Avoid further trouble with this insn. */
1323 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1324 /* We used to continue here setting class to ALL_REGS, but it triggers
1325 sanity check on i386 for:
1326 void foo(long double d)
1328 asm("" :: "a" (d));
1330 Returning zero here ought to be safe as we take care in
1331 find_reloads to not process the reloads when instruction was
1332 replaced by USE. */
1334 return 0;
1338 /* Optional output reloads are always OK even if we have no register class,
1339 since the function of these reloads is only to have spill_reg_store etc.
1340 set, so that the storing insn can be deleted later. */
1341 gcc_assert (rclass != NO_REGS
1342 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1344 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1346 if (i == n_reloads)
1348 /* See if we need a secondary reload register to move between CLASS
1349 and IN or CLASS and OUT. Get the icode and push any required reloads
1350 needed for each of them if so. */
1352 if (in != 0)
1353 secondary_in_reload
1354 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1355 &secondary_in_icode, NULL);
1356 if (out != 0 && GET_CODE (out) != SCRATCH)
1357 secondary_out_reload
1358 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1359 type, &secondary_out_icode, NULL);
1361 /* We found no existing reload suitable for re-use.
1362 So add an additional reload. */
1364 #ifdef SECONDARY_MEMORY_NEEDED
1365 if (subreg_in_class == NO_REGS
1366 && in != 0
1367 && (REG_P (in)
1368 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1369 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1370 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1371 /* If a memory location is needed for the copy, make one. */
1372 if (subreg_in_class != NO_REGS
1373 && SECONDARY_MEMORY_NEEDED (subreg_in_class, rclass, inmode))
1374 get_secondary_mem (in, inmode, opnum, type);
1375 #endif
1377 i = n_reloads;
1378 rld[i].in = in;
1379 rld[i].out = out;
1380 rld[i].rclass = rclass;
1381 rld[i].inmode = inmode;
1382 rld[i].outmode = outmode;
1383 rld[i].reg_rtx = 0;
1384 rld[i].optional = optional;
1385 rld[i].inc = 0;
1386 rld[i].nocombine = 0;
1387 rld[i].in_reg = inloc ? *inloc : 0;
1388 rld[i].out_reg = outloc ? *outloc : 0;
1389 rld[i].opnum = opnum;
1390 rld[i].when_needed = type;
1391 rld[i].secondary_in_reload = secondary_in_reload;
1392 rld[i].secondary_out_reload = secondary_out_reload;
1393 rld[i].secondary_in_icode = secondary_in_icode;
1394 rld[i].secondary_out_icode = secondary_out_icode;
1395 rld[i].secondary_p = 0;
1397 n_reloads++;
1399 #ifdef SECONDARY_MEMORY_NEEDED
1400 if (out != 0
1401 && (REG_P (out)
1402 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1403 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1404 && SECONDARY_MEMORY_NEEDED (rclass,
1405 REGNO_REG_CLASS (reg_or_subregno (out)),
1406 outmode))
1407 get_secondary_mem (out, outmode, opnum, type);
1408 #endif
1410 else
1412 /* We are reusing an existing reload,
1413 but we may have additional information for it.
1414 For example, we may now have both IN and OUT
1415 while the old one may have just one of them. */
1417 /* The modes can be different. If they are, we want to reload in
1418 the larger mode, so that the value is valid for both modes. */
1419 if (inmode != VOIDmode
1420 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1421 rld[i].inmode = inmode;
1422 if (outmode != VOIDmode
1423 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1424 rld[i].outmode = outmode;
1425 if (in != 0)
1427 rtx in_reg = inloc ? *inloc : 0;
1428 /* If we merge reloads for two distinct rtl expressions that
1429 are identical in content, there might be duplicate address
1430 reloads. Remove the extra set now, so that if we later find
1431 that we can inherit this reload, we can get rid of the
1432 address reloads altogether.
1434 Do not do this if both reloads are optional since the result
1435 would be an optional reload which could potentially leave
1436 unresolved address replacements.
1438 It is not sufficient to call transfer_replacements since
1439 choose_reload_regs will remove the replacements for address
1440 reloads of inherited reloads which results in the same
1441 problem. */
1442 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1443 && ! (rld[i].optional && optional))
1445 /* We must keep the address reload with the lower operand
1446 number alive. */
1447 if (opnum > rld[i].opnum)
1449 remove_address_replacements (in);
1450 in = rld[i].in;
1451 in_reg = rld[i].in_reg;
1453 else
1454 remove_address_replacements (rld[i].in);
1456 /* When emitting reloads we don't necessarily look at the in-
1457 and outmode, but also directly at the operands (in and out).
1458 So we can't simply overwrite them with whatever we have found
1459 for this (to-be-merged) reload, we have to "merge" that too.
1460 Reusing another reload already verified that we deal with the
1461 same operands, just possibly in different modes. So we
1462 overwrite the operands only when the new mode is larger.
1463 See also PR33613. */
1464 if (!rld[i].in
1465 || GET_MODE_SIZE (GET_MODE (in))
1466 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1467 rld[i].in = in;
1468 if (!rld[i].in_reg
1469 || (in_reg
1470 && GET_MODE_SIZE (GET_MODE (in_reg))
1471 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1472 rld[i].in_reg = in_reg;
1474 if (out != 0)
1476 if (!rld[i].out
1477 || (out
1478 && GET_MODE_SIZE (GET_MODE (out))
1479 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1480 rld[i].out = out;
1481 if (outloc
1482 && (!rld[i].out_reg
1483 || GET_MODE_SIZE (GET_MODE (*outloc))
1484 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1485 rld[i].out_reg = *outloc;
1487 if (reg_class_subset_p (rclass, rld[i].rclass))
1488 rld[i].rclass = rclass;
1489 rld[i].optional &= optional;
1490 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1491 opnum, rld[i].opnum))
1492 rld[i].when_needed = RELOAD_OTHER;
1493 rld[i].opnum = MIN (rld[i].opnum, opnum);
1496 /* If the ostensible rtx being reloaded differs from the rtx found
1497 in the location to substitute, this reload is not safe to combine
1498 because we cannot reliably tell whether it appears in the insn. */
1500 if (in != 0 && in != *inloc)
1501 rld[i].nocombine = 1;
1503 #if 0
1504 /* This was replaced by changes in find_reloads_address_1 and the new
1505 function inc_for_reload, which go with a new meaning of reload_inc. */
1507 /* If this is an IN/OUT reload in an insn that sets the CC,
1508 it must be for an autoincrement. It doesn't work to store
1509 the incremented value after the insn because that would clobber the CC.
1510 So we must do the increment of the value reloaded from,
1511 increment it, store it back, then decrement again. */
1512 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1514 out = 0;
1515 rld[i].out = 0;
1516 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1517 /* If we did not find a nonzero amount-to-increment-by,
1518 that contradicts the belief that IN is being incremented
1519 in an address in this insn. */
1520 gcc_assert (rld[i].inc != 0);
1522 #endif
1524 /* If we will replace IN and OUT with the reload-reg,
1525 record where they are located so that substitution need
1526 not do a tree walk. */
1528 if (replace_reloads)
1530 if (inloc != 0)
1532 struct replacement *r = &replacements[n_replacements++];
1533 r->what = i;
1534 r->where = inloc;
1535 r->mode = inmode;
1537 if (outloc != 0 && outloc != inloc)
1539 struct replacement *r = &replacements[n_replacements++];
1540 r->what = i;
1541 r->where = outloc;
1542 r->mode = outmode;
1546 /* If this reload is just being introduced and it has both
1547 an incoming quantity and an outgoing quantity that are
1548 supposed to be made to match, see if either one of the two
1549 can serve as the place to reload into.
1551 If one of them is acceptable, set rld[i].reg_rtx
1552 to that one. */
1554 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1556 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1557 inmode, outmode,
1558 rld[i].rclass, i,
1559 earlyclobber_operand_p (out));
1561 /* If the outgoing register already contains the same value
1562 as the incoming one, we can dispense with loading it.
1563 The easiest way to tell the caller that is to give a phony
1564 value for the incoming operand (same as outgoing one). */
1565 if (rld[i].reg_rtx == out
1566 && (REG_P (in) || CONSTANT_P (in))
1567 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1568 static_reload_reg_p, i, inmode))
1569 rld[i].in = out;
1572 /* If this is an input reload and the operand contains a register that
1573 dies in this insn and is used nowhere else, see if it is the right class
1574 to be used for this reload. Use it if so. (This occurs most commonly
1575 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1576 this if it is also an output reload that mentions the register unless
1577 the output is a SUBREG that clobbers an entire register.
1579 Note that the operand might be one of the spill regs, if it is a
1580 pseudo reg and we are in a block where spilling has not taken place.
1581 But if there is no spilling in this block, that is OK.
1582 An explicitly used hard reg cannot be a spill reg. */
1584 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1586 rtx note;
1587 int regno;
1588 machine_mode rel_mode = inmode;
1590 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1591 rel_mode = outmode;
1593 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1594 if (REG_NOTE_KIND (note) == REG_DEAD
1595 && REG_P (XEXP (note, 0))
1596 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1597 && reg_mentioned_p (XEXP (note, 0), in)
1598 /* Check that a former pseudo is valid; see find_dummy_reload. */
1599 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1600 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1601 ORIGINAL_REGNO (XEXP (note, 0)))
1602 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1603 && ! refers_to_regno_for_reload_p (regno,
1604 end_hard_regno (rel_mode,
1605 regno),
1606 PATTERN (this_insn), inloc)
1607 && ! find_reg_fusage (this_insn, USE, XEXP (note, 0))
1608 /* If this is also an output reload, IN cannot be used as
1609 the reload register if it is set in this insn unless IN
1610 is also OUT. */
1611 && (out == 0 || in == out
1612 || ! hard_reg_set_here_p (regno,
1613 end_hard_regno (rel_mode, regno),
1614 PATTERN (this_insn)))
1615 /* ??? Why is this code so different from the previous?
1616 Is there any simple coherent way to describe the two together?
1617 What's going on here. */
1618 && (in != out
1619 || (GET_CODE (in) == SUBREG
1620 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1621 / UNITS_PER_WORD)
1622 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1623 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1624 /* Make sure the operand fits in the reg that dies. */
1625 && (GET_MODE_SIZE (rel_mode)
1626 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1627 && HARD_REGNO_MODE_OK (regno, inmode)
1628 && HARD_REGNO_MODE_OK (regno, outmode))
1630 unsigned int offs;
1631 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1632 hard_regno_nregs[regno][outmode]);
1634 for (offs = 0; offs < nregs; offs++)
1635 if (fixed_regs[regno + offs]
1636 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1637 regno + offs))
1638 break;
1640 if (offs == nregs
1641 && (! (refers_to_regno_for_reload_p
1642 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1643 || can_reload_into (in, regno, inmode)))
1645 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1646 break;
1651 if (out)
1652 output_reloadnum = i;
1654 return i;
1657 /* Record an additional place we must replace a value
1658 for which we have already recorded a reload.
1659 RELOADNUM is the value returned by push_reload
1660 when the reload was recorded.
1661 This is used in insn patterns that use match_dup. */
1663 static void
1664 push_replacement (rtx *loc, int reloadnum, machine_mode mode)
1666 if (replace_reloads)
1668 struct replacement *r = &replacements[n_replacements++];
1669 r->what = reloadnum;
1670 r->where = loc;
1671 r->mode = mode;
1675 /* Duplicate any replacement we have recorded to apply at
1676 location ORIG_LOC to also be performed at DUP_LOC.
1677 This is used in insn patterns that use match_dup. */
1679 static void
1680 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1682 int i, n = n_replacements;
1684 for (i = 0; i < n; i++)
1686 struct replacement *r = &replacements[i];
1687 if (r->where == orig_loc)
1688 push_replacement (dup_loc, r->what, r->mode);
1692 /* Transfer all replacements that used to be in reload FROM to be in
1693 reload TO. */
1695 void
1696 transfer_replacements (int to, int from)
1698 int i;
1700 for (i = 0; i < n_replacements; i++)
1701 if (replacements[i].what == from)
1702 replacements[i].what = to;
1705 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1706 or a subpart of it. If we have any replacements registered for IN_RTX,
1707 cancel the reloads that were supposed to load them.
1708 Return nonzero if we canceled any reloads. */
1710 remove_address_replacements (rtx in_rtx)
1712 int i, j;
1713 char reload_flags[MAX_RELOADS];
1714 int something_changed = 0;
1716 memset (reload_flags, 0, sizeof reload_flags);
1717 for (i = 0, j = 0; i < n_replacements; i++)
1719 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1720 reload_flags[replacements[i].what] |= 1;
1721 else
1723 replacements[j++] = replacements[i];
1724 reload_flags[replacements[i].what] |= 2;
1727 /* Note that the following store must be done before the recursive calls. */
1728 n_replacements = j;
1730 for (i = n_reloads - 1; i >= 0; i--)
1732 if (reload_flags[i] == 1)
1734 deallocate_reload_reg (i);
1735 remove_address_replacements (rld[i].in);
1736 rld[i].in = 0;
1737 something_changed = 1;
1740 return something_changed;
1743 /* If there is only one output reload, and it is not for an earlyclobber
1744 operand, try to combine it with a (logically unrelated) input reload
1745 to reduce the number of reload registers needed.
1747 This is safe if the input reload does not appear in
1748 the value being output-reloaded, because this implies
1749 it is not needed any more once the original insn completes.
1751 If that doesn't work, see we can use any of the registers that
1752 die in this insn as a reload register. We can if it is of the right
1753 class and does not appear in the value being output-reloaded. */
1755 static void
1756 combine_reloads (void)
1758 int i, regno;
1759 int output_reload = -1;
1760 int secondary_out = -1;
1761 rtx note;
1763 /* Find the output reload; return unless there is exactly one
1764 and that one is mandatory. */
1766 for (i = 0; i < n_reloads; i++)
1767 if (rld[i].out != 0)
1769 if (output_reload >= 0)
1770 return;
1771 output_reload = i;
1774 if (output_reload < 0 || rld[output_reload].optional)
1775 return;
1777 /* An input-output reload isn't combinable. */
1779 if (rld[output_reload].in != 0)
1780 return;
1782 /* If this reload is for an earlyclobber operand, we can't do anything. */
1783 if (earlyclobber_operand_p (rld[output_reload].out))
1784 return;
1786 /* If there is a reload for part of the address of this operand, we would
1787 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1788 its life to the point where doing this combine would not lower the
1789 number of spill registers needed. */
1790 for (i = 0; i < n_reloads; i++)
1791 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1792 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1793 && rld[i].opnum == rld[output_reload].opnum)
1794 return;
1796 /* Check each input reload; can we combine it? */
1798 for (i = 0; i < n_reloads; i++)
1799 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1800 /* Life span of this reload must not extend past main insn. */
1801 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1802 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1803 && rld[i].when_needed != RELOAD_OTHER
1804 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1805 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1806 [(int) rld[output_reload].outmode])
1807 && rld[i].inc == 0
1808 && rld[i].reg_rtx == 0
1809 #ifdef SECONDARY_MEMORY_NEEDED
1810 /* Don't combine two reloads with different secondary
1811 memory locations. */
1812 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1813 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1814 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1815 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1816 #endif
1817 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1818 ? (rld[i].rclass == rld[output_reload].rclass)
1819 : (reg_class_subset_p (rld[i].rclass,
1820 rld[output_reload].rclass)
1821 || reg_class_subset_p (rld[output_reload].rclass,
1822 rld[i].rclass)))
1823 && (MATCHES (rld[i].in, rld[output_reload].out)
1824 /* Args reversed because the first arg seems to be
1825 the one that we imagine being modified
1826 while the second is the one that might be affected. */
1827 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1828 rld[i].in)
1829 /* However, if the input is a register that appears inside
1830 the output, then we also can't share.
1831 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1832 If the same reload reg is used for both reg 69 and the
1833 result to be stored in memory, then that result
1834 will clobber the address of the memory ref. */
1835 && ! (REG_P (rld[i].in)
1836 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1837 rld[output_reload].out))))
1838 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1839 rld[i].when_needed != RELOAD_FOR_INPUT)
1840 && (reg_class_size[(int) rld[i].rclass]
1841 || targetm.small_register_classes_for_mode_p (VOIDmode))
1842 /* We will allow making things slightly worse by combining an
1843 input and an output, but no worse than that. */
1844 && (rld[i].when_needed == RELOAD_FOR_INPUT
1845 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1847 int j;
1849 /* We have found a reload to combine with! */
1850 rld[i].out = rld[output_reload].out;
1851 rld[i].out_reg = rld[output_reload].out_reg;
1852 rld[i].outmode = rld[output_reload].outmode;
1853 /* Mark the old output reload as inoperative. */
1854 rld[output_reload].out = 0;
1855 /* The combined reload is needed for the entire insn. */
1856 rld[i].when_needed = RELOAD_OTHER;
1857 /* If the output reload had a secondary reload, copy it. */
1858 if (rld[output_reload].secondary_out_reload != -1)
1860 rld[i].secondary_out_reload
1861 = rld[output_reload].secondary_out_reload;
1862 rld[i].secondary_out_icode
1863 = rld[output_reload].secondary_out_icode;
1866 #ifdef SECONDARY_MEMORY_NEEDED
1867 /* Copy any secondary MEM. */
1868 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1869 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1870 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1871 #endif
1872 /* If required, minimize the register class. */
1873 if (reg_class_subset_p (rld[output_reload].rclass,
1874 rld[i].rclass))
1875 rld[i].rclass = rld[output_reload].rclass;
1877 /* Transfer all replacements from the old reload to the combined. */
1878 for (j = 0; j < n_replacements; j++)
1879 if (replacements[j].what == output_reload)
1880 replacements[j].what = i;
1882 return;
1885 /* If this insn has only one operand that is modified or written (assumed
1886 to be the first), it must be the one corresponding to this reload. It
1887 is safe to use anything that dies in this insn for that output provided
1888 that it does not occur in the output (we already know it isn't an
1889 earlyclobber. If this is an asm insn, give up. */
1891 if (INSN_CODE (this_insn) == -1)
1892 return;
1894 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1895 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1896 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1897 return;
1899 /* See if some hard register that dies in this insn and is not used in
1900 the output is the right class. Only works if the register we pick
1901 up can fully hold our output reload. */
1902 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1903 if (REG_NOTE_KIND (note) == REG_DEAD
1904 && REG_P (XEXP (note, 0))
1905 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1906 rld[output_reload].out)
1907 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1908 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1909 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1910 regno)
1911 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1912 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1913 /* Ensure that a secondary or tertiary reload for this output
1914 won't want this register. */
1915 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1916 || (!(TEST_HARD_REG_BIT
1917 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1918 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1919 || !(TEST_HARD_REG_BIT
1920 (reg_class_contents[(int) rld[secondary_out].rclass],
1921 regno)))))
1922 && !fixed_regs[regno]
1923 /* Check that a former pseudo is valid; see find_dummy_reload. */
1924 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1925 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1926 ORIGINAL_REGNO (XEXP (note, 0)))
1927 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1929 rld[output_reload].reg_rtx
1930 = gen_rtx_REG (rld[output_reload].outmode, regno);
1931 return;
1935 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1936 See if one of IN and OUT is a register that may be used;
1937 this is desirable since a spill-register won't be needed.
1938 If so, return the register rtx that proves acceptable.
1940 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1941 RCLASS is the register class required for the reload.
1943 If FOR_REAL is >= 0, it is the number of the reload,
1944 and in some cases when it can be discovered that OUT doesn't need
1945 to be computed, clear out rld[FOR_REAL].out.
1947 If FOR_REAL is -1, this should not be done, because this call
1948 is just to see if a register can be found, not to find and install it.
1950 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1951 puts an additional constraint on being able to use IN for OUT since
1952 IN must not appear elsewhere in the insn (it is assumed that IN itself
1953 is safe from the earlyclobber). */
1955 static rtx
1956 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1957 machine_mode inmode, machine_mode outmode,
1958 reg_class_t rclass, int for_real, int earlyclobber)
1960 rtx in = real_in;
1961 rtx out = real_out;
1962 int in_offset = 0;
1963 int out_offset = 0;
1964 rtx value = 0;
1966 /* If operands exceed a word, we can't use either of them
1967 unless they have the same size. */
1968 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1969 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1970 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1971 return 0;
1973 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1974 respectively refers to a hard register. */
1976 /* Find the inside of any subregs. */
1977 while (GET_CODE (out) == SUBREG)
1979 if (REG_P (SUBREG_REG (out))
1980 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1981 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1982 GET_MODE (SUBREG_REG (out)),
1983 SUBREG_BYTE (out),
1984 GET_MODE (out));
1985 out = SUBREG_REG (out);
1987 while (GET_CODE (in) == SUBREG)
1989 if (REG_P (SUBREG_REG (in))
1990 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1991 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1992 GET_MODE (SUBREG_REG (in)),
1993 SUBREG_BYTE (in),
1994 GET_MODE (in));
1995 in = SUBREG_REG (in);
1998 /* Narrow down the reg class, the same way push_reload will;
1999 otherwise we might find a dummy now, but push_reload won't. */
2001 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
2002 if (preferred_class != NO_REGS)
2003 rclass = (enum reg_class) preferred_class;
2006 /* See if OUT will do. */
2007 if (REG_P (out)
2008 && REGNO (out) < FIRST_PSEUDO_REGISTER)
2010 unsigned int regno = REGNO (out) + out_offset;
2011 unsigned int nwords = hard_regno_nregs[regno][outmode];
2012 rtx saved_rtx;
2014 /* When we consider whether the insn uses OUT,
2015 ignore references within IN. They don't prevent us
2016 from copying IN into OUT, because those refs would
2017 move into the insn that reloads IN.
2019 However, we only ignore IN in its role as this reload.
2020 If the insn uses IN elsewhere and it contains OUT,
2021 that counts. We can't be sure it's the "same" operand
2022 so it might not go through this reload.
2024 We also need to avoid using OUT if it, or part of it, is a
2025 fixed register. Modifying such registers, even transiently,
2026 may have undefined effects on the machine, such as modifying
2027 the stack pointer. */
2028 saved_rtx = *inloc;
2029 *inloc = const0_rtx;
2031 if (regno < FIRST_PSEUDO_REGISTER
2032 && HARD_REGNO_MODE_OK (regno, outmode)
2033 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2034 PATTERN (this_insn), outloc))
2036 unsigned int i;
2038 for (i = 0; i < nwords; i++)
2039 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2040 regno + i)
2041 || fixed_regs[regno + i])
2042 break;
2044 if (i == nwords)
2046 if (REG_P (real_out))
2047 value = real_out;
2048 else
2049 value = gen_rtx_REG (outmode, regno);
2053 *inloc = saved_rtx;
2056 /* Consider using IN if OUT was not acceptable
2057 or if OUT dies in this insn (like the quotient in a divmod insn).
2058 We can't use IN unless it is dies in this insn,
2059 which means we must know accurately which hard regs are live.
2060 Also, the result can't go in IN if IN is used within OUT,
2061 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2062 if (hard_regs_live_known
2063 && REG_P (in)
2064 && REGNO (in) < FIRST_PSEUDO_REGISTER
2065 && (value == 0
2066 || find_reg_note (this_insn, REG_UNUSED, real_out))
2067 && find_reg_note (this_insn, REG_DEAD, real_in)
2068 && !fixed_regs[REGNO (in)]
2069 && HARD_REGNO_MODE_OK (REGNO (in),
2070 /* The only case where out and real_out might
2071 have different modes is where real_out
2072 is a subreg, and in that case, out
2073 has a real mode. */
2074 (GET_MODE (out) != VOIDmode
2075 ? GET_MODE (out) : outmode))
2076 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2077 /* However only do this if we can be sure that this input
2078 operand doesn't correspond with an uninitialized pseudo.
2079 global can assign some hardreg to it that is the same as
2080 the one assigned to a different, also live pseudo (as it
2081 can ignore the conflict). We must never introduce writes
2082 to such hardregs, as they would clobber the other live
2083 pseudo. See PR 20973. */
2084 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2085 ORIGINAL_REGNO (in))
2086 /* Similarly, only do this if we can be sure that the death
2087 note is still valid. global can assign some hardreg to
2088 the pseudo referenced in the note and simultaneously a
2089 subword of this hardreg to a different, also live pseudo,
2090 because only another subword of the hardreg is actually
2091 used in the insn. This cannot happen if the pseudo has
2092 been assigned exactly one hardreg. See PR 33732. */
2093 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2095 unsigned int regno = REGNO (in) + in_offset;
2096 unsigned int nwords = hard_regno_nregs[regno][inmode];
2098 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2099 && ! hard_reg_set_here_p (regno, regno + nwords,
2100 PATTERN (this_insn))
2101 && (! earlyclobber
2102 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2103 PATTERN (this_insn), inloc)))
2105 unsigned int i;
2107 for (i = 0; i < nwords; i++)
2108 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2109 regno + i))
2110 break;
2112 if (i == nwords)
2114 /* If we were going to use OUT as the reload reg
2115 and changed our mind, it means OUT is a dummy that
2116 dies here. So don't bother copying value to it. */
2117 if (for_real >= 0 && value == real_out)
2118 rld[for_real].out = 0;
2119 if (REG_P (real_in))
2120 value = real_in;
2121 else
2122 value = gen_rtx_REG (inmode, regno);
2127 return value;
2130 /* This page contains subroutines used mainly for determining
2131 whether the IN or an OUT of a reload can serve as the
2132 reload register. */
2134 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2137 earlyclobber_operand_p (rtx x)
2139 int i;
2141 for (i = 0; i < n_earlyclobbers; i++)
2142 if (reload_earlyclobbers[i] == x)
2143 return 1;
2145 return 0;
2148 /* Return 1 if expression X alters a hard reg in the range
2149 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2150 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2151 X should be the body of an instruction. */
2153 static int
2154 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2156 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2158 rtx op0 = SET_DEST (x);
2160 while (GET_CODE (op0) == SUBREG)
2161 op0 = SUBREG_REG (op0);
2162 if (REG_P (op0))
2164 unsigned int r = REGNO (op0);
2166 /* See if this reg overlaps range under consideration. */
2167 if (r < end_regno
2168 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2169 return 1;
2172 else if (GET_CODE (x) == PARALLEL)
2174 int i = XVECLEN (x, 0) - 1;
2176 for (; i >= 0; i--)
2177 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2178 return 1;
2181 return 0;
2184 /* Return 1 if ADDR is a valid memory address for mode MODE
2185 in address space AS, and check that each pseudo reg has the
2186 proper kind of hard reg. */
2189 strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
2190 rtx addr, addr_space_t as)
2192 #ifdef GO_IF_LEGITIMATE_ADDRESS
2193 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2194 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2195 return 0;
2197 win:
2198 return 1;
2199 #else
2200 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2201 #endif
2204 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2205 if they are the same hard reg, and has special hacks for
2206 autoincrement and autodecrement.
2207 This is specifically intended for find_reloads to use
2208 in determining whether two operands match.
2209 X is the operand whose number is the lower of the two.
2211 The value is 2 if Y contains a pre-increment that matches
2212 a non-incrementing address in X. */
2214 /* ??? To be completely correct, we should arrange to pass
2215 for X the output operand and for Y the input operand.
2216 For now, we assume that the output operand has the lower number
2217 because that is natural in (SET output (... input ...)). */
2220 operands_match_p (rtx x, rtx y)
2222 int i;
2223 RTX_CODE code = GET_CODE (x);
2224 const char *fmt;
2225 int success_2;
2227 if (x == y)
2228 return 1;
2229 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2230 && (REG_P (y) || (GET_CODE (y) == SUBREG
2231 && REG_P (SUBREG_REG (y)))))
2233 int j;
2235 if (code == SUBREG)
2237 i = REGNO (SUBREG_REG (x));
2238 if (i >= FIRST_PSEUDO_REGISTER)
2239 goto slow;
2240 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2241 GET_MODE (SUBREG_REG (x)),
2242 SUBREG_BYTE (x),
2243 GET_MODE (x));
2245 else
2246 i = REGNO (x);
2248 if (GET_CODE (y) == SUBREG)
2250 j = REGNO (SUBREG_REG (y));
2251 if (j >= FIRST_PSEUDO_REGISTER)
2252 goto slow;
2253 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2254 GET_MODE (SUBREG_REG (y)),
2255 SUBREG_BYTE (y),
2256 GET_MODE (y));
2258 else
2259 j = REGNO (y);
2261 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2262 multiple hard register group of scalar integer registers, so that
2263 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2264 register. */
2265 scalar_int_mode xmode;
2266 if (REG_WORDS_BIG_ENDIAN
2267 && is_a <scalar_int_mode> (GET_MODE (x), &xmode)
2268 && GET_MODE_SIZE (xmode) > UNITS_PER_WORD
2269 && i < FIRST_PSEUDO_REGISTER)
2270 i += hard_regno_nregs[i][xmode] - 1;
2271 scalar_int_mode ymode;
2272 if (REG_WORDS_BIG_ENDIAN
2273 && is_a <scalar_int_mode> (GET_MODE (y), &ymode)
2274 && GET_MODE_SIZE (ymode) > UNITS_PER_WORD
2275 && j < FIRST_PSEUDO_REGISTER)
2276 j += hard_regno_nregs[j][ymode] - 1;
2278 return i == j;
2280 /* If two operands must match, because they are really a single
2281 operand of an assembler insn, then two postincrements are invalid
2282 because the assembler insn would increment only once.
2283 On the other hand, a postincrement matches ordinary indexing
2284 if the postincrement is the output operand. */
2285 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2286 return operands_match_p (XEXP (x, 0), y);
2287 /* Two preincrements are invalid
2288 because the assembler insn would increment only once.
2289 On the other hand, a preincrement matches ordinary indexing
2290 if the preincrement is the input operand.
2291 In this case, return 2, since some callers need to do special
2292 things when this happens. */
2293 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2294 || GET_CODE (y) == PRE_MODIFY)
2295 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2297 slow:
2299 /* Now we have disposed of all the cases in which different rtx codes
2300 can match. */
2301 if (code != GET_CODE (y))
2302 return 0;
2304 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2305 if (GET_MODE (x) != GET_MODE (y))
2306 return 0;
2308 /* MEMs referring to different address space are not equivalent. */
2309 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2310 return 0;
2312 switch (code)
2314 CASE_CONST_UNIQUE:
2315 return 0;
2317 case LABEL_REF:
2318 return label_ref_label (x) == label_ref_label (y);
2319 case SYMBOL_REF:
2320 return XSTR (x, 0) == XSTR (y, 0);
2322 default:
2323 break;
2326 /* Compare the elements. If any pair of corresponding elements
2327 fail to match, return 0 for the whole things. */
2329 success_2 = 0;
2330 fmt = GET_RTX_FORMAT (code);
2331 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2333 int val, j;
2334 switch (fmt[i])
2336 case 'w':
2337 if (XWINT (x, i) != XWINT (y, i))
2338 return 0;
2339 break;
2341 case 'i':
2342 if (XINT (x, i) != XINT (y, i))
2343 return 0;
2344 break;
2346 case 'e':
2347 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2348 if (val == 0)
2349 return 0;
2350 /* If any subexpression returns 2,
2351 we should return 2 if we are successful. */
2352 if (val == 2)
2353 success_2 = 1;
2354 break;
2356 case '0':
2357 break;
2359 case 'E':
2360 if (XVECLEN (x, i) != XVECLEN (y, i))
2361 return 0;
2362 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2364 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2365 if (val == 0)
2366 return 0;
2367 if (val == 2)
2368 success_2 = 1;
2370 break;
2372 /* It is believed that rtx's at this level will never
2373 contain anything but integers and other rtx's,
2374 except for within LABEL_REFs and SYMBOL_REFs. */
2375 default:
2376 gcc_unreachable ();
2379 return 1 + success_2;
2382 /* Describe the range of registers or memory referenced by X.
2383 If X is a register, set REG_FLAG and put the first register
2384 number into START and the last plus one into END.
2385 If X is a memory reference, put a base address into BASE
2386 and a range of integer offsets into START and END.
2387 If X is pushing on the stack, we can assume it causes no trouble,
2388 so we set the SAFE field. */
2390 static struct decomposition
2391 decompose (rtx x)
2393 struct decomposition val;
2394 int all_const = 0;
2396 memset (&val, 0, sizeof (val));
2398 switch (GET_CODE (x))
2400 case MEM:
2402 rtx base = NULL_RTX, offset = 0;
2403 rtx addr = XEXP (x, 0);
2405 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2406 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2408 val.base = XEXP (addr, 0);
2409 val.start = -GET_MODE_SIZE (GET_MODE (x));
2410 val.end = GET_MODE_SIZE (GET_MODE (x));
2411 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2412 return val;
2415 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2417 if (GET_CODE (XEXP (addr, 1)) == PLUS
2418 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2419 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2421 val.base = XEXP (addr, 0);
2422 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2423 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2424 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2425 return val;
2429 if (GET_CODE (addr) == CONST)
2431 addr = XEXP (addr, 0);
2432 all_const = 1;
2434 if (GET_CODE (addr) == PLUS)
2436 if (CONSTANT_P (XEXP (addr, 0)))
2438 base = XEXP (addr, 1);
2439 offset = XEXP (addr, 0);
2441 else if (CONSTANT_P (XEXP (addr, 1)))
2443 base = XEXP (addr, 0);
2444 offset = XEXP (addr, 1);
2448 if (offset == 0)
2450 base = addr;
2451 offset = const0_rtx;
2453 if (GET_CODE (offset) == CONST)
2454 offset = XEXP (offset, 0);
2455 if (GET_CODE (offset) == PLUS)
2457 if (CONST_INT_P (XEXP (offset, 0)))
2459 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2460 offset = XEXP (offset, 0);
2462 else if (CONST_INT_P (XEXP (offset, 1)))
2464 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2465 offset = XEXP (offset, 1);
2467 else
2469 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2470 offset = const0_rtx;
2473 else if (!CONST_INT_P (offset))
2475 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2476 offset = const0_rtx;
2479 if (all_const && GET_CODE (base) == PLUS)
2480 base = gen_rtx_CONST (GET_MODE (base), base);
2482 gcc_assert (CONST_INT_P (offset));
2484 val.start = INTVAL (offset);
2485 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2486 val.base = base;
2488 break;
2490 case REG:
2491 val.reg_flag = 1;
2492 val.start = true_regnum (x);
2493 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2495 /* A pseudo with no hard reg. */
2496 val.start = REGNO (x);
2497 val.end = val.start + 1;
2499 else
2500 /* A hard reg. */
2501 val.end = end_hard_regno (GET_MODE (x), val.start);
2502 break;
2504 case SUBREG:
2505 if (!REG_P (SUBREG_REG (x)))
2506 /* This could be more precise, but it's good enough. */
2507 return decompose (SUBREG_REG (x));
2508 val.reg_flag = 1;
2509 val.start = true_regnum (x);
2510 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2511 return decompose (SUBREG_REG (x));
2512 else
2513 /* A hard reg. */
2514 val.end = val.start + subreg_nregs (x);
2515 break;
2517 case SCRATCH:
2518 /* This hasn't been assigned yet, so it can't conflict yet. */
2519 val.safe = 1;
2520 break;
2522 default:
2523 gcc_assert (CONSTANT_P (x));
2524 val.safe = 1;
2525 break;
2527 return val;
2530 /* Return 1 if altering Y will not modify the value of X.
2531 Y is also described by YDATA, which should be decompose (Y). */
2533 static int
2534 immune_p (rtx x, rtx y, struct decomposition ydata)
2536 struct decomposition xdata;
2538 if (ydata.reg_flag)
2539 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2540 if (ydata.safe)
2541 return 1;
2543 gcc_assert (MEM_P (y));
2544 /* If Y is memory and X is not, Y can't affect X. */
2545 if (!MEM_P (x))
2546 return 1;
2548 xdata = decompose (x);
2550 if (! rtx_equal_p (xdata.base, ydata.base))
2552 /* If bases are distinct symbolic constants, there is no overlap. */
2553 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2554 return 1;
2555 /* Constants and stack slots never overlap. */
2556 if (CONSTANT_P (xdata.base)
2557 && (ydata.base == frame_pointer_rtx
2558 || ydata.base == hard_frame_pointer_rtx
2559 || ydata.base == stack_pointer_rtx))
2560 return 1;
2561 if (CONSTANT_P (ydata.base)
2562 && (xdata.base == frame_pointer_rtx
2563 || xdata.base == hard_frame_pointer_rtx
2564 || xdata.base == stack_pointer_rtx))
2565 return 1;
2566 /* If either base is variable, we don't know anything. */
2567 return 0;
2570 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2573 /* Similar, but calls decompose. */
2576 safe_from_earlyclobber (rtx op, rtx clobber)
2578 struct decomposition early_data;
2580 early_data = decompose (clobber);
2581 return immune_p (op, clobber, early_data);
2584 /* Main entry point of this file: search the body of INSN
2585 for values that need reloading and record them with push_reload.
2586 REPLACE nonzero means record also where the values occur
2587 so that subst_reloads can be used.
2589 IND_LEVELS says how many levels of indirection are supported by this
2590 machine; a value of zero means that a memory reference is not a valid
2591 memory address.
2593 LIVE_KNOWN says we have valid information about which hard
2594 regs are live at each point in the program; this is true when
2595 we are called from global_alloc but false when stupid register
2596 allocation has been done.
2598 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2599 which is nonnegative if the reg has been commandeered for reloading into.
2600 It is copied into STATIC_RELOAD_REG_P and referenced from there
2601 by various subroutines.
2603 Return TRUE if some operands need to be changed, because of swapping
2604 commutative operands, reg_equiv_address substitution, or whatever. */
2607 find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2608 short *reload_reg_p)
2610 int insn_code_number;
2611 int i, j;
2612 int noperands;
2613 /* These start out as the constraints for the insn
2614 and they are chewed up as we consider alternatives. */
2615 const char *constraints[MAX_RECOG_OPERANDS];
2616 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2617 a register. */
2618 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2619 char pref_or_nothing[MAX_RECOG_OPERANDS];
2620 /* Nonzero for a MEM operand whose entire address needs a reload.
2621 May be -1 to indicate the entire address may or may not need a reload. */
2622 int address_reloaded[MAX_RECOG_OPERANDS];
2623 /* Nonzero for an address operand that needs to be completely reloaded.
2624 May be -1 to indicate the entire operand may or may not need a reload. */
2625 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2626 /* Value of enum reload_type to use for operand. */
2627 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2628 /* Value of enum reload_type to use within address of operand. */
2629 enum reload_type address_type[MAX_RECOG_OPERANDS];
2630 /* Save the usage of each operand. */
2631 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2632 int no_input_reloads = 0, no_output_reloads = 0;
2633 int n_alternatives;
2634 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2635 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2636 char this_alternative_win[MAX_RECOG_OPERANDS];
2637 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2638 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2639 int this_alternative_matches[MAX_RECOG_OPERANDS];
2640 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2641 int this_alternative_number;
2642 int goal_alternative_number = 0;
2643 int operand_reloadnum[MAX_RECOG_OPERANDS];
2644 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2645 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2646 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2647 char goal_alternative_win[MAX_RECOG_OPERANDS];
2648 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2649 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2650 int goal_alternative_swapped;
2651 int best;
2652 int commutative;
2653 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2654 rtx substed_operand[MAX_RECOG_OPERANDS];
2655 rtx body = PATTERN (insn);
2656 rtx set = single_set (insn);
2657 int goal_earlyclobber = 0, this_earlyclobber;
2658 machine_mode operand_mode[MAX_RECOG_OPERANDS];
2659 int retval = 0;
2661 this_insn = insn;
2662 n_reloads = 0;
2663 n_replacements = 0;
2664 n_earlyclobbers = 0;
2665 replace_reloads = replace;
2666 hard_regs_live_known = live_known;
2667 static_reload_reg_p = reload_reg_p;
2669 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2670 neither are insns that SET cc0. Insns that use CC0 are not allowed
2671 to have any input reloads. */
2672 if (JUMP_P (insn) || CALL_P (insn))
2673 no_output_reloads = 1;
2675 if (HAVE_cc0 && reg_referenced_p (cc0_rtx, PATTERN (insn)))
2676 no_input_reloads = 1;
2677 if (HAVE_cc0 && reg_set_p (cc0_rtx, PATTERN (insn)))
2678 no_output_reloads = 1;
2680 #ifdef SECONDARY_MEMORY_NEEDED
2681 /* The eliminated forms of any secondary memory locations are per-insn, so
2682 clear them out here. */
2684 if (secondary_memlocs_elim_used)
2686 memset (secondary_memlocs_elim, 0,
2687 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2688 secondary_memlocs_elim_used = 0;
2690 #endif
2692 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2693 is cheap to move between them. If it is not, there may not be an insn
2694 to do the copy, so we may need a reload. */
2695 if (GET_CODE (body) == SET
2696 && REG_P (SET_DEST (body))
2697 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2698 && REG_P (SET_SRC (body))
2699 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2700 && register_move_cost (GET_MODE (SET_SRC (body)),
2701 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2702 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2703 return 0;
2705 extract_insn (insn);
2707 noperands = reload_n_operands = recog_data.n_operands;
2708 n_alternatives = recog_data.n_alternatives;
2710 /* Just return "no reloads" if insn has no operands with constraints. */
2711 if (noperands == 0 || n_alternatives == 0)
2712 return 0;
2714 insn_code_number = INSN_CODE (insn);
2715 this_insn_is_asm = insn_code_number < 0;
2717 memcpy (operand_mode, recog_data.operand_mode,
2718 noperands * sizeof (machine_mode));
2719 memcpy (constraints, recog_data.constraints,
2720 noperands * sizeof (const char *));
2722 commutative = -1;
2724 /* If we will need to know, later, whether some pair of operands
2725 are the same, we must compare them now and save the result.
2726 Reloading the base and index registers will clobber them
2727 and afterward they will fail to match. */
2729 for (i = 0; i < noperands; i++)
2731 const char *p;
2732 int c;
2733 char *end;
2735 substed_operand[i] = recog_data.operand[i];
2736 p = constraints[i];
2738 modified[i] = RELOAD_READ;
2740 /* Scan this operand's constraint to see if it is an output operand,
2741 an in-out operand, is commutative, or should match another. */
2743 while ((c = *p))
2745 p += CONSTRAINT_LEN (c, p);
2746 switch (c)
2748 case '=':
2749 modified[i] = RELOAD_WRITE;
2750 break;
2751 case '+':
2752 modified[i] = RELOAD_READ_WRITE;
2753 break;
2754 case '%':
2756 /* The last operand should not be marked commutative. */
2757 gcc_assert (i != noperands - 1);
2759 /* We currently only support one commutative pair of
2760 operands. Some existing asm code currently uses more
2761 than one pair. Previously, that would usually work,
2762 but sometimes it would crash the compiler. We
2763 continue supporting that case as well as we can by
2764 silently ignoring all but the first pair. In the
2765 future we may handle it correctly. */
2766 if (commutative < 0)
2767 commutative = i;
2768 else
2769 gcc_assert (this_insn_is_asm);
2771 break;
2772 /* Use of ISDIGIT is tempting here, but it may get expensive because
2773 of locale support we don't want. */
2774 case '0': case '1': case '2': case '3': case '4':
2775 case '5': case '6': case '7': case '8': case '9':
2777 c = strtoul (p - 1, &end, 10);
2778 p = end;
2780 operands_match[c][i]
2781 = operands_match_p (recog_data.operand[c],
2782 recog_data.operand[i]);
2784 /* An operand may not match itself. */
2785 gcc_assert (c != i);
2787 /* If C can be commuted with C+1, and C might need to match I,
2788 then C+1 might also need to match I. */
2789 if (commutative >= 0)
2791 if (c == commutative || c == commutative + 1)
2793 int other = c + (c == commutative ? 1 : -1);
2794 operands_match[other][i]
2795 = operands_match_p (recog_data.operand[other],
2796 recog_data.operand[i]);
2798 if (i == commutative || i == commutative + 1)
2800 int other = i + (i == commutative ? 1 : -1);
2801 operands_match[c][other]
2802 = operands_match_p (recog_data.operand[c],
2803 recog_data.operand[other]);
2805 /* Note that C is supposed to be less than I.
2806 No need to consider altering both C and I because in
2807 that case we would alter one into the other. */
2814 /* Examine each operand that is a memory reference or memory address
2815 and reload parts of the addresses into index registers.
2816 Also here any references to pseudo regs that didn't get hard regs
2817 but are equivalent to constants get replaced in the insn itself
2818 with those constants. Nobody will ever see them again.
2820 Finally, set up the preferred classes of each operand. */
2822 for (i = 0; i < noperands; i++)
2824 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2826 address_reloaded[i] = 0;
2827 address_operand_reloaded[i] = 0;
2828 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2829 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2830 : RELOAD_OTHER);
2831 address_type[i]
2832 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2833 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2834 : RELOAD_OTHER);
2836 if (*constraints[i] == 0)
2837 /* Ignore things like match_operator operands. */
2839 else if (insn_extra_address_constraint
2840 (lookup_constraint (constraints[i])))
2842 address_operand_reloaded[i]
2843 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2844 recog_data.operand[i],
2845 recog_data.operand_loc[i],
2846 i, operand_type[i], ind_levels, insn);
2848 /* If we now have a simple operand where we used to have a
2849 PLUS or MULT, re-recognize and try again. */
2850 if ((OBJECT_P (*recog_data.operand_loc[i])
2851 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2852 && (GET_CODE (recog_data.operand[i]) == MULT
2853 || GET_CODE (recog_data.operand[i]) == PLUS))
2855 INSN_CODE (insn) = -1;
2856 retval = find_reloads (insn, replace, ind_levels, live_known,
2857 reload_reg_p);
2858 return retval;
2861 recog_data.operand[i] = *recog_data.operand_loc[i];
2862 substed_operand[i] = recog_data.operand[i];
2864 /* Address operands are reloaded in their existing mode,
2865 no matter what is specified in the machine description. */
2866 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2868 /* If the address is a single CONST_INT pick address mode
2869 instead otherwise we will later not know in which mode
2870 the reload should be performed. */
2871 if (operand_mode[i] == VOIDmode)
2872 operand_mode[i] = Pmode;
2875 else if (code == MEM)
2877 address_reloaded[i]
2878 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2879 recog_data.operand_loc[i],
2880 XEXP (recog_data.operand[i], 0),
2881 &XEXP (recog_data.operand[i], 0),
2882 i, address_type[i], ind_levels, insn);
2883 recog_data.operand[i] = *recog_data.operand_loc[i];
2884 substed_operand[i] = recog_data.operand[i];
2886 else if (code == SUBREG)
2888 rtx reg = SUBREG_REG (recog_data.operand[i]);
2889 rtx op
2890 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2891 ind_levels,
2892 set != 0
2893 && &SET_DEST (set) == recog_data.operand_loc[i],
2894 insn,
2895 &address_reloaded[i]);
2897 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2898 that didn't get a hard register, emit a USE with a REG_EQUAL
2899 note in front so that we might inherit a previous, possibly
2900 wider reload. */
2902 if (replace
2903 && MEM_P (op)
2904 && REG_P (reg)
2905 && (GET_MODE_SIZE (GET_MODE (reg))
2906 >= GET_MODE_SIZE (GET_MODE (op)))
2907 && reg_equiv_constant (REGNO (reg)) == 0)
2908 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2909 insn),
2910 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2912 substed_operand[i] = recog_data.operand[i] = op;
2914 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2915 /* We can get a PLUS as an "operand" as a result of register
2916 elimination. See eliminate_regs and gen_reload. We handle
2917 a unary operator by reloading the operand. */
2918 substed_operand[i] = recog_data.operand[i]
2919 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2920 ind_levels, 0, insn,
2921 &address_reloaded[i]);
2922 else if (code == REG)
2924 /* This is equivalent to calling find_reloads_toplev.
2925 The code is duplicated for speed.
2926 When we find a pseudo always equivalent to a constant,
2927 we replace it by the constant. We must be sure, however,
2928 that we don't try to replace it in the insn in which it
2929 is being set. */
2930 int regno = REGNO (recog_data.operand[i]);
2931 if (reg_equiv_constant (regno) != 0
2932 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2934 /* Record the existing mode so that the check if constants are
2935 allowed will work when operand_mode isn't specified. */
2937 if (operand_mode[i] == VOIDmode)
2938 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2940 substed_operand[i] = recog_data.operand[i]
2941 = reg_equiv_constant (regno);
2943 if (reg_equiv_memory_loc (regno) != 0
2944 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2945 /* We need not give a valid is_set_dest argument since the case
2946 of a constant equivalence was checked above. */
2947 substed_operand[i] = recog_data.operand[i]
2948 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2949 ind_levels, 0, insn,
2950 &address_reloaded[i]);
2952 /* If the operand is still a register (we didn't replace it with an
2953 equivalent), get the preferred class to reload it into. */
2954 code = GET_CODE (recog_data.operand[i]);
2955 preferred_class[i]
2956 = ((code == REG && REGNO (recog_data.operand[i])
2957 >= FIRST_PSEUDO_REGISTER)
2958 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2959 : NO_REGS);
2960 pref_or_nothing[i]
2961 = (code == REG
2962 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2963 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2966 /* If this is simply a copy from operand 1 to operand 0, merge the
2967 preferred classes for the operands. */
2968 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2969 && recog_data.operand[1] == SET_SRC (set))
2971 preferred_class[0] = preferred_class[1]
2972 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2973 pref_or_nothing[0] |= pref_or_nothing[1];
2974 pref_or_nothing[1] |= pref_or_nothing[0];
2977 /* Now see what we need for pseudo-regs that didn't get hard regs
2978 or got the wrong kind of hard reg. For this, we must consider
2979 all the operands together against the register constraints. */
2981 best = MAX_RECOG_OPERANDS * 2 + 600;
2983 goal_alternative_swapped = 0;
2985 /* The constraints are made of several alternatives.
2986 Each operand's constraint looks like foo,bar,... with commas
2987 separating the alternatives. The first alternatives for all
2988 operands go together, the second alternatives go together, etc.
2990 First loop over alternatives. */
2992 alternative_mask enabled = get_enabled_alternatives (insn);
2993 for (this_alternative_number = 0;
2994 this_alternative_number < n_alternatives;
2995 this_alternative_number++)
2997 int swapped;
2999 if (!TEST_BIT (enabled, this_alternative_number))
3001 int i;
3003 for (i = 0; i < recog_data.n_operands; i++)
3004 constraints[i] = skip_alternative (constraints[i]);
3006 continue;
3009 /* If insn is commutative (it's safe to exchange a certain pair
3010 of operands) then we need to try each alternative twice, the
3011 second time matching those two operands as if we had
3012 exchanged them. To do this, really exchange them in
3013 operands. */
3014 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3016 /* Loop over operands for one constraint alternative. */
3017 /* LOSERS counts those that don't fit this alternative
3018 and would require loading. */
3019 int losers = 0;
3020 /* BAD is set to 1 if it some operand can't fit this alternative
3021 even after reloading. */
3022 int bad = 0;
3023 /* REJECT is a count of how undesirable this alternative says it is
3024 if any reloading is required. If the alternative matches exactly
3025 then REJECT is ignored, but otherwise it gets this much
3026 counted against it in addition to the reloading needed. Each
3027 ? counts three times here since we want the disparaging caused by
3028 a bad register class to only count 1/3 as much. */
3029 int reject = 0;
3031 if (swapped)
3033 recog_data.operand[commutative] = substed_operand[commutative + 1];
3034 recog_data.operand[commutative + 1] = substed_operand[commutative];
3035 /* Swap the duplicates too. */
3036 for (i = 0; i < recog_data.n_dups; i++)
3037 if (recog_data.dup_num[i] == commutative
3038 || recog_data.dup_num[i] == commutative + 1)
3039 *recog_data.dup_loc[i]
3040 = recog_data.operand[(int) recog_data.dup_num[i]];
3042 std::swap (preferred_class[commutative],
3043 preferred_class[commutative + 1]);
3044 std::swap (pref_or_nothing[commutative],
3045 pref_or_nothing[commutative + 1]);
3046 std::swap (address_reloaded[commutative],
3047 address_reloaded[commutative + 1]);
3050 this_earlyclobber = 0;
3052 for (i = 0; i < noperands; i++)
3054 const char *p = constraints[i];
3055 char *end;
3056 int len;
3057 int win = 0;
3058 int did_match = 0;
3059 /* 0 => this operand can be reloaded somehow for this alternative. */
3060 int badop = 1;
3061 /* 0 => this operand can be reloaded if the alternative allows regs. */
3062 int winreg = 0;
3063 int c;
3064 int m;
3065 rtx operand = recog_data.operand[i];
3066 int offset = 0;
3067 /* Nonzero means this is a MEM that must be reloaded into a reg
3068 regardless of what the constraint says. */
3069 int force_reload = 0;
3070 int offmemok = 0;
3071 /* Nonzero if a constant forced into memory would be OK for this
3072 operand. */
3073 int constmemok = 0;
3074 int earlyclobber = 0;
3075 enum constraint_num cn;
3076 enum reg_class cl;
3078 /* If the predicate accepts a unary operator, it means that
3079 we need to reload the operand, but do not do this for
3080 match_operator and friends. */
3081 if (UNARY_P (operand) && *p != 0)
3082 operand = XEXP (operand, 0);
3084 /* If the operand is a SUBREG, extract
3085 the REG or MEM (or maybe even a constant) within.
3086 (Constants can occur as a result of reg_equiv_constant.) */
3088 while (GET_CODE (operand) == SUBREG)
3090 /* Offset only matters when operand is a REG and
3091 it is a hard reg. This is because it is passed
3092 to reg_fits_class_p if it is a REG and all pseudos
3093 return 0 from that function. */
3094 if (REG_P (SUBREG_REG (operand))
3095 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3097 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3098 GET_MODE (SUBREG_REG (operand)),
3099 SUBREG_BYTE (operand),
3100 GET_MODE (operand)) < 0)
3101 force_reload = 1;
3102 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3103 GET_MODE (SUBREG_REG (operand)),
3104 SUBREG_BYTE (operand),
3105 GET_MODE (operand));
3107 operand = SUBREG_REG (operand);
3108 /* Force reload if this is a constant or PLUS or if there may
3109 be a problem accessing OPERAND in the outer mode. */
3110 scalar_int_mode inner_mode;
3111 if (CONSTANT_P (operand)
3112 || GET_CODE (operand) == PLUS
3113 /* We must force a reload of paradoxical SUBREGs
3114 of a MEM because the alignment of the inner value
3115 may not be enough to do the outer reference. On
3116 big-endian machines, it may also reference outside
3117 the object.
3119 On machines that extend byte operations and we have a
3120 SUBREG where both the inner and outer modes are no wider
3121 than a word and the inner mode is narrower, is integral,
3122 and gets extended when loaded from memory, combine.c has
3123 made assumptions about the behavior of the machine in such
3124 register access. If the data is, in fact, in memory we
3125 must always load using the size assumed to be in the
3126 register and let the insn do the different-sized
3127 accesses.
3129 This is doubly true if WORD_REGISTER_OPERATIONS. In
3130 this case eliminate_regs has left non-paradoxical
3131 subregs for push_reload to see. Make sure it does
3132 by forcing the reload.
3134 ??? When is it right at this stage to have a subreg
3135 of a mem that is _not_ to be handled specially? IMO
3136 those should have been reduced to just a mem. */
3137 || ((MEM_P (operand)
3138 || (REG_P (operand)
3139 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3140 && (WORD_REGISTER_OPERATIONS
3141 || ((GET_MODE_BITSIZE (GET_MODE (operand))
3142 < BIGGEST_ALIGNMENT)
3143 && paradoxical_subreg_p (operand_mode[i],
3144 GET_MODE (operand)))
3145 || BYTES_BIG_ENDIAN
3146 || ((GET_MODE_SIZE (operand_mode[i])
3147 <= UNITS_PER_WORD)
3148 && (is_a <scalar_int_mode>
3149 (GET_MODE (operand), &inner_mode))
3150 && (GET_MODE_SIZE (inner_mode)
3151 <= UNITS_PER_WORD)
3152 && paradoxical_subreg_p (operand_mode[i],
3153 inner_mode)
3154 && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)))
3156 force_reload = 1;
3159 this_alternative[i] = NO_REGS;
3160 this_alternative_win[i] = 0;
3161 this_alternative_match_win[i] = 0;
3162 this_alternative_offmemok[i] = 0;
3163 this_alternative_earlyclobber[i] = 0;
3164 this_alternative_matches[i] = -1;
3166 /* An empty constraint or empty alternative
3167 allows anything which matched the pattern. */
3168 if (*p == 0 || *p == ',')
3169 win = 1, badop = 0;
3171 /* Scan this alternative's specs for this operand;
3172 set WIN if the operand fits any letter in this alternative.
3173 Otherwise, clear BADOP if this operand could
3174 fit some letter after reloads,
3175 or set WINREG if this operand could fit after reloads
3176 provided the constraint allows some registers. */
3179 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3181 case '\0':
3182 len = 0;
3183 break;
3184 case ',':
3185 c = '\0';
3186 break;
3188 case '?':
3189 reject += 6;
3190 break;
3192 case '!':
3193 reject = 600;
3194 break;
3196 case '#':
3197 /* Ignore rest of this alternative as far as
3198 reloading is concerned. */
3200 p++;
3201 while (*p && *p != ',');
3202 len = 0;
3203 break;
3205 case '0': case '1': case '2': case '3': case '4':
3206 case '5': case '6': case '7': case '8': case '9':
3207 m = strtoul (p, &end, 10);
3208 p = end;
3209 len = 0;
3211 this_alternative_matches[i] = m;
3212 /* We are supposed to match a previous operand.
3213 If we do, we win if that one did.
3214 If we do not, count both of the operands as losers.
3215 (This is too conservative, since most of the time
3216 only a single reload insn will be needed to make
3217 the two operands win. As a result, this alternative
3218 may be rejected when it is actually desirable.) */
3219 if ((swapped && (m != commutative || i != commutative + 1))
3220 /* If we are matching as if two operands were swapped,
3221 also pretend that operands_match had been computed
3222 with swapped.
3223 But if I is the second of those and C is the first,
3224 don't exchange them, because operands_match is valid
3225 only on one side of its diagonal. */
3226 ? (operands_match
3227 [(m == commutative || m == commutative + 1)
3228 ? 2 * commutative + 1 - m : m]
3229 [(i == commutative || i == commutative + 1)
3230 ? 2 * commutative + 1 - i : i])
3231 : operands_match[m][i])
3233 /* If we are matching a non-offsettable address where an
3234 offsettable address was expected, then we must reject
3235 this combination, because we can't reload it. */
3236 if (this_alternative_offmemok[m]
3237 && MEM_P (recog_data.operand[m])
3238 && this_alternative[m] == NO_REGS
3239 && ! this_alternative_win[m])
3240 bad = 1;
3242 did_match = this_alternative_win[m];
3244 else
3246 /* Operands don't match. */
3247 rtx value;
3248 int loc1, loc2;
3249 /* Retroactively mark the operand we had to match
3250 as a loser, if it wasn't already. */
3251 if (this_alternative_win[m])
3252 losers++;
3253 this_alternative_win[m] = 0;
3254 if (this_alternative[m] == NO_REGS)
3255 bad = 1;
3256 /* But count the pair only once in the total badness of
3257 this alternative, if the pair can be a dummy reload.
3258 The pointers in operand_loc are not swapped; swap
3259 them by hand if necessary. */
3260 if (swapped && i == commutative)
3261 loc1 = commutative + 1;
3262 else if (swapped && i == commutative + 1)
3263 loc1 = commutative;
3264 else
3265 loc1 = i;
3266 if (swapped && m == commutative)
3267 loc2 = commutative + 1;
3268 else if (swapped && m == commutative + 1)
3269 loc2 = commutative;
3270 else
3271 loc2 = m;
3272 value
3273 = find_dummy_reload (recog_data.operand[i],
3274 recog_data.operand[m],
3275 recog_data.operand_loc[loc1],
3276 recog_data.operand_loc[loc2],
3277 operand_mode[i], operand_mode[m],
3278 this_alternative[m], -1,
3279 this_alternative_earlyclobber[m]);
3281 if (value != 0)
3282 losers--;
3284 /* This can be fixed with reloads if the operand
3285 we are supposed to match can be fixed with reloads. */
3286 badop = 0;
3287 this_alternative[i] = this_alternative[m];
3289 /* If we have to reload this operand and some previous
3290 operand also had to match the same thing as this
3291 operand, we don't know how to do that. So reject this
3292 alternative. */
3293 if (! did_match || force_reload)
3294 for (j = 0; j < i; j++)
3295 if (this_alternative_matches[j]
3296 == this_alternative_matches[i])
3298 badop = 1;
3299 break;
3301 break;
3303 case 'p':
3304 /* All necessary reloads for an address_operand
3305 were handled in find_reloads_address. */
3306 this_alternative[i]
3307 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3308 ADDRESS, SCRATCH);
3309 win = 1;
3310 badop = 0;
3311 break;
3313 case TARGET_MEM_CONSTRAINT:
3314 if (force_reload)
3315 break;
3316 if (MEM_P (operand)
3317 || (REG_P (operand)
3318 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3319 && reg_renumber[REGNO (operand)] < 0))
3320 win = 1;
3321 if (CONST_POOL_OK_P (operand_mode[i], operand))
3322 badop = 0;
3323 constmemok = 1;
3324 break;
3326 case '<':
3327 if (MEM_P (operand)
3328 && ! address_reloaded[i]
3329 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3330 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3331 win = 1;
3332 break;
3334 case '>':
3335 if (MEM_P (operand)
3336 && ! address_reloaded[i]
3337 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3338 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3339 win = 1;
3340 break;
3342 /* Memory operand whose address is not offsettable. */
3343 case 'V':
3344 if (force_reload)
3345 break;
3346 if (MEM_P (operand)
3347 && ! (ind_levels ? offsettable_memref_p (operand)
3348 : offsettable_nonstrict_memref_p (operand))
3349 /* Certain mem addresses will become offsettable
3350 after they themselves are reloaded. This is important;
3351 we don't want our own handling of unoffsettables
3352 to override the handling of reg_equiv_address. */
3353 && !(REG_P (XEXP (operand, 0))
3354 && (ind_levels == 0
3355 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3356 win = 1;
3357 break;
3359 /* Memory operand whose address is offsettable. */
3360 case 'o':
3361 if (force_reload)
3362 break;
3363 if ((MEM_P (operand)
3364 /* If IND_LEVELS, find_reloads_address won't reload a
3365 pseudo that didn't get a hard reg, so we have to
3366 reject that case. */
3367 && ((ind_levels ? offsettable_memref_p (operand)
3368 : offsettable_nonstrict_memref_p (operand))
3369 /* A reloaded address is offsettable because it is now
3370 just a simple register indirect. */
3371 || address_reloaded[i] == 1))
3372 || (REG_P (operand)
3373 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3374 && reg_renumber[REGNO (operand)] < 0
3375 /* If reg_equiv_address is nonzero, we will be
3376 loading it into a register; hence it will be
3377 offsettable, but we cannot say that reg_equiv_mem
3378 is offsettable without checking. */
3379 && ((reg_equiv_mem (REGNO (operand)) != 0
3380 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3381 || (reg_equiv_address (REGNO (operand)) != 0))))
3382 win = 1;
3383 if (CONST_POOL_OK_P (operand_mode[i], operand)
3384 || MEM_P (operand))
3385 badop = 0;
3386 constmemok = 1;
3387 offmemok = 1;
3388 break;
3390 case '&':
3391 /* Output operand that is stored before the need for the
3392 input operands (and their index registers) is over. */
3393 earlyclobber = 1, this_earlyclobber = 1;
3394 break;
3396 case 'X':
3397 force_reload = 0;
3398 win = 1;
3399 break;
3401 case 'g':
3402 if (! force_reload
3403 /* A PLUS is never a valid operand, but reload can make
3404 it from a register when eliminating registers. */
3405 && GET_CODE (operand) != PLUS
3406 /* A SCRATCH is not a valid operand. */
3407 && GET_CODE (operand) != SCRATCH
3408 && (! CONSTANT_P (operand)
3409 || ! flag_pic
3410 || LEGITIMATE_PIC_OPERAND_P (operand))
3411 && (GENERAL_REGS == ALL_REGS
3412 || !REG_P (operand)
3413 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3414 && reg_renumber[REGNO (operand)] < 0)))
3415 win = 1;
3416 cl = GENERAL_REGS;
3417 goto reg;
3419 default:
3420 cn = lookup_constraint (p);
3421 switch (get_constraint_type (cn))
3423 case CT_REGISTER:
3424 cl = reg_class_for_constraint (cn);
3425 if (cl != NO_REGS)
3426 goto reg;
3427 break;
3429 case CT_CONST_INT:
3430 if (CONST_INT_P (operand)
3431 && (insn_const_int_ok_for_constraint
3432 (INTVAL (operand), cn)))
3433 win = true;
3434 break;
3436 case CT_MEMORY:
3437 if (force_reload)
3438 break;
3439 if (constraint_satisfied_p (operand, cn))
3440 win = 1;
3441 /* If the address was already reloaded,
3442 we win as well. */
3443 else if (MEM_P (operand) && address_reloaded[i] == 1)
3444 win = 1;
3445 /* Likewise if the address will be reloaded because
3446 reg_equiv_address is nonzero. For reg_equiv_mem
3447 we have to check. */
3448 else if (REG_P (operand)
3449 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3450 && reg_renumber[REGNO (operand)] < 0
3451 && ((reg_equiv_mem (REGNO (operand)) != 0
3452 && (constraint_satisfied_p
3453 (reg_equiv_mem (REGNO (operand)),
3454 cn)))
3455 || (reg_equiv_address (REGNO (operand))
3456 != 0)))
3457 win = 1;
3459 /* If we didn't already win, we can reload
3460 constants via force_const_mem, and other
3461 MEMs by reloading the address like for 'o'. */
3462 if (CONST_POOL_OK_P (operand_mode[i], operand)
3463 || MEM_P (operand))
3464 badop = 0;
3465 constmemok = 1;
3466 offmemok = 1;
3467 break;
3469 case CT_SPECIAL_MEMORY:
3470 if (force_reload)
3471 break;
3472 if (constraint_satisfied_p (operand, cn))
3473 win = 1;
3474 /* Likewise if the address will be reloaded because
3475 reg_equiv_address is nonzero. For reg_equiv_mem
3476 we have to check. */
3477 else if (REG_P (operand)
3478 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3479 && reg_renumber[REGNO (operand)] < 0
3480 && reg_equiv_mem (REGNO (operand)) != 0
3481 && (constraint_satisfied_p
3482 (reg_equiv_mem (REGNO (operand)), cn)))
3483 win = 1;
3484 break;
3486 case CT_ADDRESS:
3487 if (constraint_satisfied_p (operand, cn))
3488 win = 1;
3490 /* If we didn't already win, we can reload
3491 the address into a base register. */
3492 this_alternative[i]
3493 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3494 ADDRESS, SCRATCH);
3495 badop = 0;
3496 break;
3498 case CT_FIXED_FORM:
3499 if (constraint_satisfied_p (operand, cn))
3500 win = 1;
3501 break;
3503 break;
3505 reg:
3506 this_alternative[i]
3507 = reg_class_subunion[this_alternative[i]][cl];
3508 if (GET_MODE (operand) == BLKmode)
3509 break;
3510 winreg = 1;
3511 if (REG_P (operand)
3512 && reg_fits_class_p (operand, this_alternative[i],
3513 offset, GET_MODE (recog_data.operand[i])))
3514 win = 1;
3515 break;
3517 while ((p += len), c);
3519 if (swapped == (commutative >= 0 ? 1 : 0))
3520 constraints[i] = p;
3522 /* If this operand could be handled with a reg,
3523 and some reg is allowed, then this operand can be handled. */
3524 if (winreg && this_alternative[i] != NO_REGS
3525 && (win || !class_only_fixed_regs[this_alternative[i]]))
3526 badop = 0;
3528 /* Record which operands fit this alternative. */
3529 this_alternative_earlyclobber[i] = earlyclobber;
3530 if (win && ! force_reload)
3531 this_alternative_win[i] = 1;
3532 else if (did_match && ! force_reload)
3533 this_alternative_match_win[i] = 1;
3534 else
3536 int const_to_mem = 0;
3538 this_alternative_offmemok[i] = offmemok;
3539 losers++;
3540 if (badop)
3541 bad = 1;
3542 /* Alternative loses if it has no regs for a reg operand. */
3543 if (REG_P (operand)
3544 && this_alternative[i] == NO_REGS
3545 && this_alternative_matches[i] < 0)
3546 bad = 1;
3548 /* If this is a constant that is reloaded into the desired
3549 class by copying it to memory first, count that as another
3550 reload. This is consistent with other code and is
3551 required to avoid choosing another alternative when
3552 the constant is moved into memory by this function on
3553 an early reload pass. Note that the test here is
3554 precisely the same as in the code below that calls
3555 force_const_mem. */
3556 if (CONST_POOL_OK_P (operand_mode[i], operand)
3557 && ((targetm.preferred_reload_class (operand,
3558 this_alternative[i])
3559 == NO_REGS)
3560 || no_input_reloads))
3562 const_to_mem = 1;
3563 if (this_alternative[i] != NO_REGS)
3564 losers++;
3567 /* Alternative loses if it requires a type of reload not
3568 permitted for this insn. We can always reload SCRATCH
3569 and objects with a REG_UNUSED note. */
3570 if (GET_CODE (operand) != SCRATCH
3571 && modified[i] != RELOAD_READ && no_output_reloads
3572 && ! find_reg_note (insn, REG_UNUSED, operand))
3573 bad = 1;
3574 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3575 && ! const_to_mem)
3576 bad = 1;
3578 /* If we can't reload this value at all, reject this
3579 alternative. Note that we could also lose due to
3580 LIMIT_RELOAD_CLASS, but we don't check that
3581 here. */
3583 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3585 if (targetm.preferred_reload_class (operand,
3586 this_alternative[i])
3587 == NO_REGS)
3588 reject = 600;
3590 if (operand_type[i] == RELOAD_FOR_OUTPUT
3591 && (targetm.preferred_output_reload_class (operand,
3592 this_alternative[i])
3593 == NO_REGS))
3594 reject = 600;
3597 /* We prefer to reload pseudos over reloading other things,
3598 since such reloads may be able to be eliminated later.
3599 If we are reloading a SCRATCH, we won't be generating any
3600 insns, just using a register, so it is also preferred.
3601 So bump REJECT in other cases. Don't do this in the
3602 case where we are forcing a constant into memory and
3603 it will then win since we don't want to have a different
3604 alternative match then. */
3605 if (! (REG_P (operand)
3606 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3607 && GET_CODE (operand) != SCRATCH
3608 && ! (const_to_mem && constmemok))
3609 reject += 2;
3611 /* Input reloads can be inherited more often than output
3612 reloads can be removed, so penalize output reloads. */
3613 if (operand_type[i] != RELOAD_FOR_INPUT
3614 && GET_CODE (operand) != SCRATCH)
3615 reject++;
3618 /* If this operand is a pseudo register that didn't get
3619 a hard reg and this alternative accepts some
3620 register, see if the class that we want is a subset
3621 of the preferred class for this register. If not,
3622 but it intersects that class, use the preferred class
3623 instead. If it does not intersect the preferred
3624 class, show that usage of this alternative should be
3625 discouraged; it will be discouraged more still if the
3626 register is `preferred or nothing'. We do this
3627 because it increases the chance of reusing our spill
3628 register in a later insn and avoiding a pair of
3629 memory stores and loads.
3631 Don't bother with this if this alternative will
3632 accept this operand.
3634 Don't do this for a multiword operand, since it is
3635 only a small win and has the risk of requiring more
3636 spill registers, which could cause a large loss.
3638 Don't do this if the preferred class has only one
3639 register because we might otherwise exhaust the
3640 class. */
3642 if (! win && ! did_match
3643 && this_alternative[i] != NO_REGS
3644 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3645 && reg_class_size [(int) preferred_class[i]] > 0
3646 && ! small_register_class_p (preferred_class[i]))
3648 if (! reg_class_subset_p (this_alternative[i],
3649 preferred_class[i]))
3651 /* Since we don't have a way of forming the intersection,
3652 we just do something special if the preferred class
3653 is a subset of the class we have; that's the most
3654 common case anyway. */
3655 if (reg_class_subset_p (preferred_class[i],
3656 this_alternative[i]))
3657 this_alternative[i] = preferred_class[i];
3658 else
3659 reject += (2 + 2 * pref_or_nothing[i]);
3664 /* Now see if any output operands that are marked "earlyclobber"
3665 in this alternative conflict with any input operands
3666 or any memory addresses. */
3668 for (i = 0; i < noperands; i++)
3669 if (this_alternative_earlyclobber[i]
3670 && (this_alternative_win[i] || this_alternative_match_win[i]))
3672 struct decomposition early_data;
3674 early_data = decompose (recog_data.operand[i]);
3676 gcc_assert (modified[i] != RELOAD_READ);
3678 if (this_alternative[i] == NO_REGS)
3680 this_alternative_earlyclobber[i] = 0;
3681 gcc_assert (this_insn_is_asm);
3682 error_for_asm (this_insn,
3683 "%<&%> constraint used with no register class");
3686 for (j = 0; j < noperands; j++)
3687 /* Is this an input operand or a memory ref? */
3688 if ((MEM_P (recog_data.operand[j])
3689 || modified[j] != RELOAD_WRITE)
3690 && j != i
3691 /* Ignore things like match_operator operands. */
3692 && !recog_data.is_operator[j]
3693 /* Don't count an input operand that is constrained to match
3694 the early clobber operand. */
3695 && ! (this_alternative_matches[j] == i
3696 && rtx_equal_p (recog_data.operand[i],
3697 recog_data.operand[j]))
3698 /* Is it altered by storing the earlyclobber operand? */
3699 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3700 early_data))
3702 /* If the output is in a non-empty few-regs class,
3703 it's costly to reload it, so reload the input instead. */
3704 if (small_register_class_p (this_alternative[i])
3705 && (REG_P (recog_data.operand[j])
3706 || GET_CODE (recog_data.operand[j]) == SUBREG))
3708 losers++;
3709 this_alternative_win[j] = 0;
3710 this_alternative_match_win[j] = 0;
3712 else
3713 break;
3715 /* If an earlyclobber operand conflicts with something,
3716 it must be reloaded, so request this and count the cost. */
3717 if (j != noperands)
3719 losers++;
3720 this_alternative_win[i] = 0;
3721 this_alternative_match_win[j] = 0;
3722 for (j = 0; j < noperands; j++)
3723 if (this_alternative_matches[j] == i
3724 && this_alternative_match_win[j])
3726 this_alternative_win[j] = 0;
3727 this_alternative_match_win[j] = 0;
3728 losers++;
3733 /* If one alternative accepts all the operands, no reload required,
3734 choose that alternative; don't consider the remaining ones. */
3735 if (losers == 0)
3737 /* Unswap these so that they are never swapped at `finish'. */
3738 if (swapped)
3740 recog_data.operand[commutative] = substed_operand[commutative];
3741 recog_data.operand[commutative + 1]
3742 = substed_operand[commutative + 1];
3744 for (i = 0; i < noperands; i++)
3746 goal_alternative_win[i] = this_alternative_win[i];
3747 goal_alternative_match_win[i] = this_alternative_match_win[i];
3748 goal_alternative[i] = this_alternative[i];
3749 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3750 goal_alternative_matches[i] = this_alternative_matches[i];
3751 goal_alternative_earlyclobber[i]
3752 = this_alternative_earlyclobber[i];
3754 goal_alternative_number = this_alternative_number;
3755 goal_alternative_swapped = swapped;
3756 goal_earlyclobber = this_earlyclobber;
3757 goto finish;
3760 /* REJECT, set by the ! and ? constraint characters and when a register
3761 would be reloaded into a non-preferred class, discourages the use of
3762 this alternative for a reload goal. REJECT is incremented by six
3763 for each ? and two for each non-preferred class. */
3764 losers = losers * 6 + reject;
3766 /* If this alternative can be made to work by reloading,
3767 and it needs less reloading than the others checked so far,
3768 record it as the chosen goal for reloading. */
3769 if (! bad)
3771 if (best > losers)
3773 for (i = 0; i < noperands; i++)
3775 goal_alternative[i] = this_alternative[i];
3776 goal_alternative_win[i] = this_alternative_win[i];
3777 goal_alternative_match_win[i]
3778 = this_alternative_match_win[i];
3779 goal_alternative_offmemok[i]
3780 = this_alternative_offmemok[i];
3781 goal_alternative_matches[i] = this_alternative_matches[i];
3782 goal_alternative_earlyclobber[i]
3783 = this_alternative_earlyclobber[i];
3785 goal_alternative_swapped = swapped;
3786 best = losers;
3787 goal_alternative_number = this_alternative_number;
3788 goal_earlyclobber = this_earlyclobber;
3792 if (swapped)
3794 /* If the commutative operands have been swapped, swap
3795 them back in order to check the next alternative. */
3796 recog_data.operand[commutative] = substed_operand[commutative];
3797 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3798 /* Unswap the duplicates too. */
3799 for (i = 0; i < recog_data.n_dups; i++)
3800 if (recog_data.dup_num[i] == commutative
3801 || recog_data.dup_num[i] == commutative + 1)
3802 *recog_data.dup_loc[i]
3803 = recog_data.operand[(int) recog_data.dup_num[i]];
3805 /* Unswap the operand related information as well. */
3806 std::swap (preferred_class[commutative],
3807 preferred_class[commutative + 1]);
3808 std::swap (pref_or_nothing[commutative],
3809 pref_or_nothing[commutative + 1]);
3810 std::swap (address_reloaded[commutative],
3811 address_reloaded[commutative + 1]);
3816 /* The operands don't meet the constraints.
3817 goal_alternative describes the alternative
3818 that we could reach by reloading the fewest operands.
3819 Reload so as to fit it. */
3821 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3823 /* No alternative works with reloads?? */
3824 if (insn_code_number >= 0)
3825 fatal_insn ("unable to generate reloads for:", insn);
3826 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3827 /* Avoid further trouble with this insn. */
3828 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3829 n_reloads = 0;
3830 return 0;
3833 /* Jump to `finish' from above if all operands are valid already.
3834 In that case, goal_alternative_win is all 1. */
3835 finish:
3837 /* Right now, for any pair of operands I and J that are required to match,
3838 with I < J,
3839 goal_alternative_matches[J] is I.
3840 Set up goal_alternative_matched as the inverse function:
3841 goal_alternative_matched[I] = J. */
3843 for (i = 0; i < noperands; i++)
3844 goal_alternative_matched[i] = -1;
3846 for (i = 0; i < noperands; i++)
3847 if (! goal_alternative_win[i]
3848 && goal_alternative_matches[i] >= 0)
3849 goal_alternative_matched[goal_alternative_matches[i]] = i;
3851 for (i = 0; i < noperands; i++)
3852 goal_alternative_win[i] |= goal_alternative_match_win[i];
3854 /* If the best alternative is with operands 1 and 2 swapped,
3855 consider them swapped before reporting the reloads. Update the
3856 operand numbers of any reloads already pushed. */
3858 if (goal_alternative_swapped)
3860 std::swap (substed_operand[commutative],
3861 substed_operand[commutative + 1]);
3862 std::swap (recog_data.operand[commutative],
3863 recog_data.operand[commutative + 1]);
3864 std::swap (*recog_data.operand_loc[commutative],
3865 *recog_data.operand_loc[commutative + 1]);
3867 for (i = 0; i < recog_data.n_dups; i++)
3868 if (recog_data.dup_num[i] == commutative
3869 || recog_data.dup_num[i] == commutative + 1)
3870 *recog_data.dup_loc[i]
3871 = recog_data.operand[(int) recog_data.dup_num[i]];
3873 for (i = 0; i < n_reloads; i++)
3875 if (rld[i].opnum == commutative)
3876 rld[i].opnum = commutative + 1;
3877 else if (rld[i].opnum == commutative + 1)
3878 rld[i].opnum = commutative;
3882 for (i = 0; i < noperands; i++)
3884 operand_reloadnum[i] = -1;
3886 /* If this is an earlyclobber operand, we need to widen the scope.
3887 The reload must remain valid from the start of the insn being
3888 reloaded until after the operand is stored into its destination.
3889 We approximate this with RELOAD_OTHER even though we know that we
3890 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3892 One special case that is worth checking is when we have an
3893 output that is earlyclobber but isn't used past the insn (typically
3894 a SCRATCH). In this case, we only need have the reload live
3895 through the insn itself, but not for any of our input or output
3896 reloads.
3897 But we must not accidentally narrow the scope of an existing
3898 RELOAD_OTHER reload - leave these alone.
3900 In any case, anything needed to address this operand can remain
3901 however they were previously categorized. */
3903 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3904 operand_type[i]
3905 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3906 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3909 /* Any constants that aren't allowed and can't be reloaded
3910 into registers are here changed into memory references. */
3911 for (i = 0; i < noperands; i++)
3912 if (! goal_alternative_win[i])
3914 rtx op = recog_data.operand[i];
3915 rtx subreg = NULL_RTX;
3916 rtx plus = NULL_RTX;
3917 machine_mode mode = operand_mode[i];
3919 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3920 push_reload so we have to let them pass here. */
3921 if (GET_CODE (op) == SUBREG)
3923 subreg = op;
3924 op = SUBREG_REG (op);
3925 mode = GET_MODE (op);
3928 if (GET_CODE (op) == PLUS)
3930 plus = op;
3931 op = XEXP (op, 1);
3934 if (CONST_POOL_OK_P (mode, op)
3935 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3936 == NO_REGS)
3937 || no_input_reloads))
3939 int this_address_reloaded;
3940 rtx tem = force_const_mem (mode, op);
3942 /* If we stripped a SUBREG or a PLUS above add it back. */
3943 if (plus != NULL_RTX)
3944 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3946 if (subreg != NULL_RTX)
3947 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3949 this_address_reloaded = 0;
3950 substed_operand[i] = recog_data.operand[i]
3951 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3952 0, insn, &this_address_reloaded);
3954 /* If the alternative accepts constant pool refs directly
3955 there will be no reload needed at all. */
3956 if (plus == NULL_RTX
3957 && subreg == NULL_RTX
3958 && alternative_allows_const_pool_ref (this_address_reloaded != 1
3959 ? substed_operand[i]
3960 : NULL,
3961 recog_data.constraints[i],
3962 goal_alternative_number))
3963 goal_alternative_win[i] = 1;
3967 /* Record the values of the earlyclobber operands for the caller. */
3968 if (goal_earlyclobber)
3969 for (i = 0; i < noperands; i++)
3970 if (goal_alternative_earlyclobber[i])
3971 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3973 /* Now record reloads for all the operands that need them. */
3974 for (i = 0; i < noperands; i++)
3975 if (! goal_alternative_win[i])
3977 /* Operands that match previous ones have already been handled. */
3978 if (goal_alternative_matches[i] >= 0)
3980 /* Handle an operand with a nonoffsettable address
3981 appearing where an offsettable address will do
3982 by reloading the address into a base register.
3984 ??? We can also do this when the operand is a register and
3985 reg_equiv_mem is not offsettable, but this is a bit tricky,
3986 so we don't bother with it. It may not be worth doing. */
3987 else if (goal_alternative_matched[i] == -1
3988 && goal_alternative_offmemok[i]
3989 && MEM_P (recog_data.operand[i]))
3991 /* If the address to be reloaded is a VOIDmode constant,
3992 use the default address mode as mode of the reload register,
3993 as would have been done by find_reloads_address. */
3994 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
3995 machine_mode address_mode;
3997 address_mode = get_address_mode (recog_data.operand[i]);
3998 operand_reloadnum[i]
3999 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
4000 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
4001 base_reg_class (VOIDmode, as, MEM, SCRATCH),
4002 address_mode,
4003 VOIDmode, 0, 0, i, RELOAD_OTHER);
4004 rld[operand_reloadnum[i]].inc
4005 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4007 /* If this operand is an output, we will have made any
4008 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4009 now we are treating part of the operand as an input, so
4010 we must change these to RELOAD_FOR_OTHER_ADDRESS. */
4012 if (modified[i] == RELOAD_WRITE)
4014 for (j = 0; j < n_reloads; j++)
4016 if (rld[j].opnum == i)
4018 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4019 rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4020 else if (rld[j].when_needed
4021 == RELOAD_FOR_OUTADDR_ADDRESS)
4022 rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4027 else if (goal_alternative_matched[i] == -1)
4029 operand_reloadnum[i]
4030 = push_reload ((modified[i] != RELOAD_WRITE
4031 ? recog_data.operand[i] : 0),
4032 (modified[i] != RELOAD_READ
4033 ? recog_data.operand[i] : 0),
4034 (modified[i] != RELOAD_WRITE
4035 ? recog_data.operand_loc[i] : 0),
4036 (modified[i] != RELOAD_READ
4037 ? recog_data.operand_loc[i] : 0),
4038 (enum reg_class) goal_alternative[i],
4039 (modified[i] == RELOAD_WRITE
4040 ? VOIDmode : operand_mode[i]),
4041 (modified[i] == RELOAD_READ
4042 ? VOIDmode : operand_mode[i]),
4043 (insn_code_number < 0 ? 0
4044 : insn_data[insn_code_number].operand[i].strict_low),
4045 0, i, operand_type[i]);
4047 /* In a matching pair of operands, one must be input only
4048 and the other must be output only.
4049 Pass the input operand as IN and the other as OUT. */
4050 else if (modified[i] == RELOAD_READ
4051 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4053 operand_reloadnum[i]
4054 = push_reload (recog_data.operand[i],
4055 recog_data.operand[goal_alternative_matched[i]],
4056 recog_data.operand_loc[i],
4057 recog_data.operand_loc[goal_alternative_matched[i]],
4058 (enum reg_class) goal_alternative[i],
4059 operand_mode[i],
4060 operand_mode[goal_alternative_matched[i]],
4061 0, 0, i, RELOAD_OTHER);
4062 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4064 else if (modified[i] == RELOAD_WRITE
4065 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4067 operand_reloadnum[goal_alternative_matched[i]]
4068 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4069 recog_data.operand[i],
4070 recog_data.operand_loc[goal_alternative_matched[i]],
4071 recog_data.operand_loc[i],
4072 (enum reg_class) goal_alternative[i],
4073 operand_mode[goal_alternative_matched[i]],
4074 operand_mode[i],
4075 0, 0, i, RELOAD_OTHER);
4076 operand_reloadnum[i] = output_reloadnum;
4078 else
4080 gcc_assert (insn_code_number < 0);
4081 error_for_asm (insn, "inconsistent operand constraints "
4082 "in an %<asm%>");
4083 /* Avoid further trouble with this insn. */
4084 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4085 n_reloads = 0;
4086 return 0;
4089 else if (goal_alternative_matched[i] < 0
4090 && goal_alternative_matches[i] < 0
4091 && address_operand_reloaded[i] != 1
4092 && optimize)
4094 /* For each non-matching operand that's a MEM or a pseudo-register
4095 that didn't get a hard register, make an optional reload.
4096 This may get done even if the insn needs no reloads otherwise. */
4098 rtx operand = recog_data.operand[i];
4100 while (GET_CODE (operand) == SUBREG)
4101 operand = SUBREG_REG (operand);
4102 if ((MEM_P (operand)
4103 || (REG_P (operand)
4104 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4105 /* If this is only for an output, the optional reload would not
4106 actually cause us to use a register now, just note that
4107 something is stored here. */
4108 && (goal_alternative[i] != NO_REGS
4109 || modified[i] == RELOAD_WRITE)
4110 && ! no_input_reloads
4111 /* An optional output reload might allow to delete INSN later.
4112 We mustn't make in-out reloads on insns that are not permitted
4113 output reloads.
4114 If this is an asm, we can't delete it; we must not even call
4115 push_reload for an optional output reload in this case,
4116 because we can't be sure that the constraint allows a register,
4117 and push_reload verifies the constraints for asms. */
4118 && (modified[i] == RELOAD_READ
4119 || (! no_output_reloads && ! this_insn_is_asm)))
4120 operand_reloadnum[i]
4121 = push_reload ((modified[i] != RELOAD_WRITE
4122 ? recog_data.operand[i] : 0),
4123 (modified[i] != RELOAD_READ
4124 ? recog_data.operand[i] : 0),
4125 (modified[i] != RELOAD_WRITE
4126 ? recog_data.operand_loc[i] : 0),
4127 (modified[i] != RELOAD_READ
4128 ? recog_data.operand_loc[i] : 0),
4129 (enum reg_class) goal_alternative[i],
4130 (modified[i] == RELOAD_WRITE
4131 ? VOIDmode : operand_mode[i]),
4132 (modified[i] == RELOAD_READ
4133 ? VOIDmode : operand_mode[i]),
4134 (insn_code_number < 0 ? 0
4135 : insn_data[insn_code_number].operand[i].strict_low),
4136 1, i, operand_type[i]);
4137 /* If a memory reference remains (either as a MEM or a pseudo that
4138 did not get a hard register), yet we can't make an optional
4139 reload, check if this is actually a pseudo register reference;
4140 we then need to emit a USE and/or a CLOBBER so that reload
4141 inheritance will do the right thing. */
4142 else if (replace
4143 && (MEM_P (operand)
4144 || (REG_P (operand)
4145 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4146 && reg_renumber [REGNO (operand)] < 0)))
4148 operand = *recog_data.operand_loc[i];
4150 while (GET_CODE (operand) == SUBREG)
4151 operand = SUBREG_REG (operand);
4152 if (REG_P (operand))
4154 if (modified[i] != RELOAD_WRITE)
4155 /* We mark the USE with QImode so that we recognize
4156 it as one that can be safely deleted at the end
4157 of reload. */
4158 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4159 insn), QImode);
4160 if (modified[i] != RELOAD_READ)
4161 emit_insn_after (gen_clobber (operand), insn);
4165 else if (goal_alternative_matches[i] >= 0
4166 && goal_alternative_win[goal_alternative_matches[i]]
4167 && modified[i] == RELOAD_READ
4168 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4169 && ! no_input_reloads && ! no_output_reloads
4170 && optimize)
4172 /* Similarly, make an optional reload for a pair of matching
4173 objects that are in MEM or a pseudo that didn't get a hard reg. */
4175 rtx operand = recog_data.operand[i];
4177 while (GET_CODE (operand) == SUBREG)
4178 operand = SUBREG_REG (operand);
4179 if ((MEM_P (operand)
4180 || (REG_P (operand)
4181 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4182 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4183 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4184 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4185 recog_data.operand[i],
4186 recog_data.operand_loc[goal_alternative_matches[i]],
4187 recog_data.operand_loc[i],
4188 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4189 operand_mode[goal_alternative_matches[i]],
4190 operand_mode[i],
4191 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4194 /* Perform whatever substitutions on the operands we are supposed
4195 to make due to commutativity or replacement of registers
4196 with equivalent constants or memory slots. */
4198 for (i = 0; i < noperands; i++)
4200 /* We only do this on the last pass through reload, because it is
4201 possible for some data (like reg_equiv_address) to be changed during
4202 later passes. Moreover, we lose the opportunity to get a useful
4203 reload_{in,out}_reg when we do these replacements. */
4205 if (replace)
4207 rtx substitution = substed_operand[i];
4209 *recog_data.operand_loc[i] = substitution;
4211 /* If we're replacing an operand with a LABEL_REF, we need to
4212 make sure that there's a REG_LABEL_OPERAND note attached to
4213 this instruction. */
4214 if (GET_CODE (substitution) == LABEL_REF
4215 && !find_reg_note (insn, REG_LABEL_OPERAND,
4216 label_ref_label (substitution))
4217 /* For a JUMP_P, if it was a branch target it must have
4218 already been recorded as such. */
4219 && (!JUMP_P (insn)
4220 || !label_is_jump_target_p (label_ref_label (substitution),
4221 insn)))
4223 add_reg_note (insn, REG_LABEL_OPERAND,
4224 label_ref_label (substitution));
4225 if (LABEL_P (label_ref_label (substitution)))
4226 ++LABEL_NUSES (label_ref_label (substitution));
4230 else
4231 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4234 /* If this insn pattern contains any MATCH_DUP's, make sure that
4235 they will be substituted if the operands they match are substituted.
4236 Also do now any substitutions we already did on the operands.
4238 Don't do this if we aren't making replacements because we might be
4239 propagating things allocated by frame pointer elimination into places
4240 it doesn't expect. */
4242 if (insn_code_number >= 0 && replace)
4243 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4245 int opno = recog_data.dup_num[i];
4246 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4247 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4250 #if 0
4251 /* This loses because reloading of prior insns can invalidate the equivalence
4252 (or at least find_equiv_reg isn't smart enough to find it any more),
4253 causing this insn to need more reload regs than it needed before.
4254 It may be too late to make the reload regs available.
4255 Now this optimization is done safely in choose_reload_regs. */
4257 /* For each reload of a reg into some other class of reg,
4258 search for an existing equivalent reg (same value now) in the right class.
4259 We can use it as long as we don't need to change its contents. */
4260 for (i = 0; i < n_reloads; i++)
4261 if (rld[i].reg_rtx == 0
4262 && rld[i].in != 0
4263 && REG_P (rld[i].in)
4264 && rld[i].out == 0)
4266 rld[i].reg_rtx
4267 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4268 static_reload_reg_p, 0, rld[i].inmode);
4269 /* Prevent generation of insn to load the value
4270 because the one we found already has the value. */
4271 if (rld[i].reg_rtx)
4272 rld[i].in = rld[i].reg_rtx;
4274 #endif
4276 /* If we detected error and replaced asm instruction by USE, forget about the
4277 reloads. */
4278 if (GET_CODE (PATTERN (insn)) == USE
4279 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4280 n_reloads = 0;
4282 /* Perhaps an output reload can be combined with another
4283 to reduce needs by one. */
4284 if (!goal_earlyclobber)
4285 combine_reloads ();
4287 /* If we have a pair of reloads for parts of an address, they are reloading
4288 the same object, the operands themselves were not reloaded, and they
4289 are for two operands that are supposed to match, merge the reloads and
4290 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4292 for (i = 0; i < n_reloads; i++)
4294 int k;
4296 for (j = i + 1; j < n_reloads; j++)
4297 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4298 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4299 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4300 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4301 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4302 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4303 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4304 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4305 && rtx_equal_p (rld[i].in, rld[j].in)
4306 && (operand_reloadnum[rld[i].opnum] < 0
4307 || rld[operand_reloadnum[rld[i].opnum]].optional)
4308 && (operand_reloadnum[rld[j].opnum] < 0
4309 || rld[operand_reloadnum[rld[j].opnum]].optional)
4310 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4311 || (goal_alternative_matches[rld[j].opnum]
4312 == rld[i].opnum)))
4314 for (k = 0; k < n_replacements; k++)
4315 if (replacements[k].what == j)
4316 replacements[k].what = i;
4318 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4319 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4320 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4321 else
4322 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4323 rld[j].in = 0;
4327 /* Scan all the reloads and update their type.
4328 If a reload is for the address of an operand and we didn't reload
4329 that operand, change the type. Similarly, change the operand number
4330 of a reload when two operands match. If a reload is optional, treat it
4331 as though the operand isn't reloaded.
4333 ??? This latter case is somewhat odd because if we do the optional
4334 reload, it means the object is hanging around. Thus we need only
4335 do the address reload if the optional reload was NOT done.
4337 Change secondary reloads to be the address type of their operand, not
4338 the normal type.
4340 If an operand's reload is now RELOAD_OTHER, change any
4341 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4342 RELOAD_FOR_OTHER_ADDRESS. */
4344 for (i = 0; i < n_reloads; i++)
4346 if (rld[i].secondary_p
4347 && rld[i].when_needed == operand_type[rld[i].opnum])
4348 rld[i].when_needed = address_type[rld[i].opnum];
4350 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4351 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4352 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4353 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4354 && (operand_reloadnum[rld[i].opnum] < 0
4355 || rld[operand_reloadnum[rld[i].opnum]].optional))
4357 /* If we have a secondary reload to go along with this reload,
4358 change its type to RELOAD_FOR_OPADDR_ADDR. */
4360 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4361 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4362 && rld[i].secondary_in_reload != -1)
4364 int secondary_in_reload = rld[i].secondary_in_reload;
4366 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4368 /* If there's a tertiary reload we have to change it also. */
4369 if (secondary_in_reload > 0
4370 && rld[secondary_in_reload].secondary_in_reload != -1)
4371 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4372 = RELOAD_FOR_OPADDR_ADDR;
4375 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4376 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4377 && rld[i].secondary_out_reload != -1)
4379 int secondary_out_reload = rld[i].secondary_out_reload;
4381 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4383 /* If there's a tertiary reload we have to change it also. */
4384 if (secondary_out_reload
4385 && rld[secondary_out_reload].secondary_out_reload != -1)
4386 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4387 = RELOAD_FOR_OPADDR_ADDR;
4390 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4391 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4392 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4393 else
4394 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4397 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4398 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4399 && operand_reloadnum[rld[i].opnum] >= 0
4400 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4401 == RELOAD_OTHER))
4402 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4404 if (goal_alternative_matches[rld[i].opnum] >= 0)
4405 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4408 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4409 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4410 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4412 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4413 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4414 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4415 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4416 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4417 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4418 This is complicated by the fact that a single operand can have more
4419 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4420 choose_reload_regs without affecting code quality, and cases that
4421 actually fail are extremely rare, so it turns out to be better to fix
4422 the problem here by not generating cases that choose_reload_regs will
4423 fail for. */
4424 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4425 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4426 a single operand.
4427 We can reduce the register pressure by exploiting that a
4428 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4429 does not conflict with any of them, if it is only used for the first of
4430 the RELOAD_FOR_X_ADDRESS reloads. */
4432 int first_op_addr_num = -2;
4433 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4434 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4435 int need_change = 0;
4436 /* We use last_op_addr_reload and the contents of the above arrays
4437 first as flags - -2 means no instance encountered, -1 means exactly
4438 one instance encountered.
4439 If more than one instance has been encountered, we store the reload
4440 number of the first reload of the kind in question; reload numbers
4441 are known to be non-negative. */
4442 for (i = 0; i < noperands; i++)
4443 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4444 for (i = n_reloads - 1; i >= 0; i--)
4446 switch (rld[i].when_needed)
4448 case RELOAD_FOR_OPERAND_ADDRESS:
4449 if (++first_op_addr_num >= 0)
4451 first_op_addr_num = i;
4452 need_change = 1;
4454 break;
4455 case RELOAD_FOR_INPUT_ADDRESS:
4456 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4458 first_inpaddr_num[rld[i].opnum] = i;
4459 need_change = 1;
4461 break;
4462 case RELOAD_FOR_OUTPUT_ADDRESS:
4463 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4465 first_outpaddr_num[rld[i].opnum] = i;
4466 need_change = 1;
4468 break;
4469 default:
4470 break;
4474 if (need_change)
4476 for (i = 0; i < n_reloads; i++)
4478 int first_num;
4479 enum reload_type type;
4481 switch (rld[i].when_needed)
4483 case RELOAD_FOR_OPADDR_ADDR:
4484 first_num = first_op_addr_num;
4485 type = RELOAD_FOR_OPERAND_ADDRESS;
4486 break;
4487 case RELOAD_FOR_INPADDR_ADDRESS:
4488 first_num = first_inpaddr_num[rld[i].opnum];
4489 type = RELOAD_FOR_INPUT_ADDRESS;
4490 break;
4491 case RELOAD_FOR_OUTADDR_ADDRESS:
4492 first_num = first_outpaddr_num[rld[i].opnum];
4493 type = RELOAD_FOR_OUTPUT_ADDRESS;
4494 break;
4495 default:
4496 continue;
4498 if (first_num < 0)
4499 continue;
4500 else if (i > first_num)
4501 rld[i].when_needed = type;
4502 else
4504 /* Check if the only TYPE reload that uses reload I is
4505 reload FIRST_NUM. */
4506 for (j = n_reloads - 1; j > first_num; j--)
4508 if (rld[j].when_needed == type
4509 && (rld[i].secondary_p
4510 ? rld[j].secondary_in_reload == i
4511 : reg_mentioned_p (rld[i].in, rld[j].in)))
4513 rld[i].when_needed = type;
4514 break;
4522 /* See if we have any reloads that are now allowed to be merged
4523 because we've changed when the reload is needed to
4524 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4525 check for the most common cases. */
4527 for (i = 0; i < n_reloads; i++)
4528 if (rld[i].in != 0 && rld[i].out == 0
4529 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4530 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4531 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4532 for (j = 0; j < n_reloads; j++)
4533 if (i != j && rld[j].in != 0 && rld[j].out == 0
4534 && rld[j].when_needed == rld[i].when_needed
4535 && MATCHES (rld[i].in, rld[j].in)
4536 && rld[i].rclass == rld[j].rclass
4537 && !rld[i].nocombine && !rld[j].nocombine
4538 && rld[i].reg_rtx == rld[j].reg_rtx)
4540 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4541 transfer_replacements (i, j);
4542 rld[j].in = 0;
4545 /* If we made any reloads for addresses, see if they violate a
4546 "no input reloads" requirement for this insn. But loads that we
4547 do after the insn (such as for output addresses) are fine. */
4548 if (HAVE_cc0 && no_input_reloads)
4549 for (i = 0; i < n_reloads; i++)
4550 gcc_assert (rld[i].in == 0
4551 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4552 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4554 /* Compute reload_mode and reload_nregs. */
4555 for (i = 0; i < n_reloads; i++)
4557 rld[i].mode
4558 = (rld[i].inmode == VOIDmode
4559 || (GET_MODE_SIZE (rld[i].outmode)
4560 > GET_MODE_SIZE (rld[i].inmode)))
4561 ? rld[i].outmode : rld[i].inmode;
4563 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4566 /* Special case a simple move with an input reload and a
4567 destination of a hard reg, if the hard reg is ok, use it. */
4568 for (i = 0; i < n_reloads; i++)
4569 if (rld[i].when_needed == RELOAD_FOR_INPUT
4570 && GET_CODE (PATTERN (insn)) == SET
4571 && REG_P (SET_DEST (PATTERN (insn)))
4572 && (SET_SRC (PATTERN (insn)) == rld[i].in
4573 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4574 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4576 rtx dest = SET_DEST (PATTERN (insn));
4577 unsigned int regno = REGNO (dest);
4579 if (regno < FIRST_PSEUDO_REGISTER
4580 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4581 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4583 int nr = hard_regno_nregs[regno][rld[i].mode];
4584 int ok = 1, nri;
4586 for (nri = 1; nri < nr; nri ++)
4587 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4589 ok = 0;
4590 break;
4593 if (ok)
4594 rld[i].reg_rtx = dest;
4598 return retval;
4601 /* Return true if alternative number ALTNUM in constraint-string
4602 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4603 MEM gives the reference if its address hasn't been fully reloaded,
4604 otherwise it is NULL. */
4606 static bool
4607 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4608 const char *constraint, int altnum)
4610 int c;
4612 /* Skip alternatives before the one requested. */
4613 while (altnum > 0)
4615 while (*constraint++ != ',')
4617 altnum--;
4619 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4620 If one of them is present, this alternative accepts the result of
4621 passing a constant-pool reference through find_reloads_toplev.
4623 The same is true of extra memory constraints if the address
4624 was reloaded into a register. However, the target may elect
4625 to disallow the original constant address, forcing it to be
4626 reloaded into a register instead. */
4627 for (; (c = *constraint) && c != ',' && c != '#';
4628 constraint += CONSTRAINT_LEN (c, constraint))
4630 enum constraint_num cn = lookup_constraint (constraint);
4631 if (insn_extra_memory_constraint (cn)
4632 && (mem == NULL || constraint_satisfied_p (mem, cn)))
4633 return true;
4635 return false;
4638 /* Scan X for memory references and scan the addresses for reloading.
4639 Also checks for references to "constant" regs that we want to eliminate
4640 and replaces them with the values they stand for.
4641 We may alter X destructively if it contains a reference to such.
4642 If X is just a constant reg, we return the equivalent value
4643 instead of X.
4645 IND_LEVELS says how many levels of indirect addressing this machine
4646 supports.
4648 OPNUM and TYPE identify the purpose of the reload.
4650 IS_SET_DEST is true if X is the destination of a SET, which is not
4651 appropriate to be replaced by a constant.
4653 INSN, if nonzero, is the insn in which we do the reload. It is used
4654 to determine if we may generate output reloads, and where to put USEs
4655 for pseudos that we have to replace with stack slots.
4657 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4658 result of find_reloads_address. */
4660 static rtx
4661 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4662 int ind_levels, int is_set_dest, rtx_insn *insn,
4663 int *address_reloaded)
4665 RTX_CODE code = GET_CODE (x);
4667 const char *fmt = GET_RTX_FORMAT (code);
4668 int i;
4669 int copied;
4671 if (code == REG)
4673 /* This code is duplicated for speed in find_reloads. */
4674 int regno = REGNO (x);
4675 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4676 x = reg_equiv_constant (regno);
4677 #if 0
4678 /* This creates (subreg (mem...)) which would cause an unnecessary
4679 reload of the mem. */
4680 else if (reg_equiv_mem (regno) != 0)
4681 x = reg_equiv_mem (regno);
4682 #endif
4683 else if (reg_equiv_memory_loc (regno)
4684 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4686 rtx mem = make_memloc (x, regno);
4687 if (reg_equiv_address (regno)
4688 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4690 /* If this is not a toplevel operand, find_reloads doesn't see
4691 this substitution. We have to emit a USE of the pseudo so
4692 that delete_output_reload can see it. */
4693 if (replace_reloads && recog_data.operand[opnum] != x)
4694 /* We mark the USE with QImode so that we recognize it
4695 as one that can be safely deleted at the end of
4696 reload. */
4697 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4698 QImode);
4699 x = mem;
4700 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4701 opnum, type, ind_levels, insn);
4702 if (!rtx_equal_p (x, mem))
4703 push_reg_equiv_alt_mem (regno, x);
4704 if (address_reloaded)
4705 *address_reloaded = i;
4708 return x;
4710 if (code == MEM)
4712 rtx tem = x;
4714 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4715 opnum, type, ind_levels, insn);
4716 if (address_reloaded)
4717 *address_reloaded = i;
4719 return tem;
4722 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4724 /* Check for SUBREG containing a REG that's equivalent to a
4725 constant. If the constant has a known value, truncate it
4726 right now. Similarly if we are extracting a single-word of a
4727 multi-word constant. If the constant is symbolic, allow it
4728 to be substituted normally. push_reload will strip the
4729 subreg later. The constant must not be VOIDmode, because we
4730 will lose the mode of the register (this should never happen
4731 because one of the cases above should handle it). */
4733 int regno = REGNO (SUBREG_REG (x));
4734 rtx tem;
4736 if (regno >= FIRST_PSEUDO_REGISTER
4737 && reg_renumber[regno] < 0
4738 && reg_equiv_constant (regno) != 0)
4740 tem =
4741 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4742 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4743 gcc_assert (tem);
4744 if (CONSTANT_P (tem)
4745 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4747 tem = force_const_mem (GET_MODE (x), tem);
4748 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4749 &XEXP (tem, 0), opnum, type,
4750 ind_levels, insn);
4751 if (address_reloaded)
4752 *address_reloaded = i;
4754 return tem;
4757 /* If the subreg contains a reg that will be converted to a mem,
4758 attempt to convert the whole subreg to a (narrower or wider)
4759 memory reference instead. If this succeeds, we're done --
4760 otherwise fall through to check whether the inner reg still
4761 needs address reloads anyway. */
4763 if (regno >= FIRST_PSEUDO_REGISTER
4764 && reg_equiv_memory_loc (regno) != 0)
4766 tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4767 insn, address_reloaded);
4768 if (tem)
4769 return tem;
4773 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4775 if (fmt[i] == 'e')
4777 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4778 ind_levels, is_set_dest, insn,
4779 address_reloaded);
4780 /* If we have replaced a reg with it's equivalent memory loc -
4781 that can still be handled here e.g. if it's in a paradoxical
4782 subreg - we must make the change in a copy, rather than using
4783 a destructive change. This way, find_reloads can still elect
4784 not to do the change. */
4785 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4787 x = shallow_copy_rtx (x);
4788 copied = 1;
4790 XEXP (x, i) = new_part;
4793 return x;
4796 /* Return a mem ref for the memory equivalent of reg REGNO.
4797 This mem ref is not shared with anything. */
4799 static rtx
4800 make_memloc (rtx ad, int regno)
4802 /* We must rerun eliminate_regs, in case the elimination
4803 offsets have changed. */
4804 rtx tem
4805 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4808 /* If TEM might contain a pseudo, we must copy it to avoid
4809 modifying it when we do the substitution for the reload. */
4810 if (rtx_varies_p (tem, 0))
4811 tem = copy_rtx (tem);
4813 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4814 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4816 /* Copy the result if it's still the same as the equivalence, to avoid
4817 modifying it when we do the substitution for the reload. */
4818 if (tem == reg_equiv_memory_loc (regno))
4819 tem = copy_rtx (tem);
4820 return tem;
4823 /* Returns true if AD could be turned into a valid memory reference
4824 to mode MODE in address space AS by reloading the part pointed to
4825 by PART into a register. */
4827 static int
4828 maybe_memory_address_addr_space_p (machine_mode mode, rtx ad,
4829 addr_space_t as, rtx *part)
4831 int retv;
4832 rtx tem = *part;
4833 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4835 *part = reg;
4836 retv = memory_address_addr_space_p (mode, ad, as);
4837 *part = tem;
4839 return retv;
4842 /* Record all reloads needed for handling memory address AD
4843 which appears in *LOC in a memory reference to mode MODE
4844 which itself is found in location *MEMREFLOC.
4845 Note that we take shortcuts assuming that no multi-reg machine mode
4846 occurs as part of an address.
4848 OPNUM and TYPE specify the purpose of this reload.
4850 IND_LEVELS says how many levels of indirect addressing this machine
4851 supports.
4853 INSN, if nonzero, is the insn in which we do the reload. It is used
4854 to determine if we may generate output reloads, and where to put USEs
4855 for pseudos that we have to replace with stack slots.
4857 Value is one if this address is reloaded or replaced as a whole; it is
4858 zero if the top level of this address was not reloaded or replaced, and
4859 it is -1 if it may or may not have been reloaded or replaced.
4861 Note that there is no verification that the address will be valid after
4862 this routine does its work. Instead, we rely on the fact that the address
4863 was valid when reload started. So we need only undo things that reload
4864 could have broken. These are wrong register types, pseudos not allocated
4865 to a hard register, and frame pointer elimination. */
4867 static int
4868 find_reloads_address (machine_mode mode, rtx *memrefloc, rtx ad,
4869 rtx *loc, int opnum, enum reload_type type,
4870 int ind_levels, rtx_insn *insn)
4872 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4873 : ADDR_SPACE_GENERIC;
4874 int regno;
4875 int removed_and = 0;
4876 int op_index;
4877 rtx tem;
4879 /* If the address is a register, see if it is a legitimate address and
4880 reload if not. We first handle the cases where we need not reload
4881 or where we must reload in a non-standard way. */
4883 if (REG_P (ad))
4885 regno = REGNO (ad);
4887 if (reg_equiv_constant (regno) != 0)
4889 find_reloads_address_part (reg_equiv_constant (regno), loc,
4890 base_reg_class (mode, as, MEM, SCRATCH),
4891 GET_MODE (ad), opnum, type, ind_levels);
4892 return 1;
4895 tem = reg_equiv_memory_loc (regno);
4896 if (tem != 0)
4898 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4900 tem = make_memloc (ad, regno);
4901 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4902 XEXP (tem, 0),
4903 MEM_ADDR_SPACE (tem)))
4905 rtx orig = tem;
4907 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4908 &XEXP (tem, 0), opnum,
4909 ADDR_TYPE (type), ind_levels, insn);
4910 if (!rtx_equal_p (tem, orig))
4911 push_reg_equiv_alt_mem (regno, tem);
4913 /* We can avoid a reload if the register's equivalent memory
4914 expression is valid as an indirect memory address.
4915 But not all addresses are valid in a mem used as an indirect
4916 address: only reg or reg+constant. */
4918 if (ind_levels > 0
4919 && strict_memory_address_addr_space_p (mode, tem, as)
4920 && (REG_P (XEXP (tem, 0))
4921 || (GET_CODE (XEXP (tem, 0)) == PLUS
4922 && REG_P (XEXP (XEXP (tem, 0), 0))
4923 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4925 /* TEM is not the same as what we'll be replacing the
4926 pseudo with after reload, put a USE in front of INSN
4927 in the final reload pass. */
4928 if (replace_reloads
4929 && num_not_at_initial_offset
4930 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4932 *loc = tem;
4933 /* We mark the USE with QImode so that we
4934 recognize it as one that can be safely
4935 deleted at the end of reload. */
4936 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4937 insn), QImode);
4939 /* This doesn't really count as replacing the address
4940 as a whole, since it is still a memory access. */
4942 return 0;
4944 ad = tem;
4948 /* The only remaining case where we can avoid a reload is if this is a
4949 hard register that is valid as a base register and which is not the
4950 subject of a CLOBBER in this insn. */
4952 else if (regno < FIRST_PSEUDO_REGISTER
4953 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4954 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4955 return 0;
4957 /* If we do not have one of the cases above, we must do the reload. */
4958 push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4959 base_reg_class (mode, as, MEM, SCRATCH),
4960 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4961 return 1;
4964 if (strict_memory_address_addr_space_p (mode, ad, as))
4966 /* The address appears valid, so reloads are not needed.
4967 But the address may contain an eliminable register.
4968 This can happen because a machine with indirect addressing
4969 may consider a pseudo register by itself a valid address even when
4970 it has failed to get a hard reg.
4971 So do a tree-walk to find and eliminate all such regs. */
4973 /* But first quickly dispose of a common case. */
4974 if (GET_CODE (ad) == PLUS
4975 && CONST_INT_P (XEXP (ad, 1))
4976 && REG_P (XEXP (ad, 0))
4977 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4978 return 0;
4980 subst_reg_equivs_changed = 0;
4981 *loc = subst_reg_equivs (ad, insn);
4983 if (! subst_reg_equivs_changed)
4984 return 0;
4986 /* Check result for validity after substitution. */
4987 if (strict_memory_address_addr_space_p (mode, ad, as))
4988 return 0;
4991 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4994 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
4996 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
4997 ind_levels, win);
4999 break;
5000 win:
5001 *memrefloc = copy_rtx (*memrefloc);
5002 XEXP (*memrefloc, 0) = ad;
5003 move_replacements (&ad, &XEXP (*memrefloc, 0));
5004 return -1;
5006 while (0);
5007 #endif
5009 /* The address is not valid. We have to figure out why. First see if
5010 we have an outer AND and remove it if so. Then analyze what's inside. */
5012 if (GET_CODE (ad) == AND)
5014 removed_and = 1;
5015 loc = &XEXP (ad, 0);
5016 ad = *loc;
5019 /* One possibility for why the address is invalid is that it is itself
5020 a MEM. This can happen when the frame pointer is being eliminated, a
5021 pseudo is not allocated to a hard register, and the offset between the
5022 frame and stack pointers is not its initial value. In that case the
5023 pseudo will have been replaced by a MEM referring to the
5024 stack pointer. */
5025 if (MEM_P (ad))
5027 /* First ensure that the address in this MEM is valid. Then, unless
5028 indirect addresses are valid, reload the MEM into a register. */
5029 tem = ad;
5030 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5031 opnum, ADDR_TYPE (type),
5032 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5034 /* If tem was changed, then we must create a new memory reference to
5035 hold it and store it back into memrefloc. */
5036 if (tem != ad && memrefloc)
5038 *memrefloc = copy_rtx (*memrefloc);
5039 copy_replacements (tem, XEXP (*memrefloc, 0));
5040 loc = &XEXP (*memrefloc, 0);
5041 if (removed_and)
5042 loc = &XEXP (*loc, 0);
5045 /* Check similar cases as for indirect addresses as above except
5046 that we can allow pseudos and a MEM since they should have been
5047 taken care of above. */
5049 if (ind_levels == 0
5050 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5051 || MEM_P (XEXP (tem, 0))
5052 || ! (REG_P (XEXP (tem, 0))
5053 || (GET_CODE (XEXP (tem, 0)) == PLUS
5054 && REG_P (XEXP (XEXP (tem, 0), 0))
5055 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5057 /* Must use TEM here, not AD, since it is the one that will
5058 have any subexpressions reloaded, if needed. */
5059 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5060 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5061 VOIDmode, 0,
5062 0, opnum, type);
5063 return ! removed_and;
5065 else
5066 return 0;
5069 /* If we have address of a stack slot but it's not valid because the
5070 displacement is too large, compute the sum in a register.
5071 Handle all base registers here, not just fp/ap/sp, because on some
5072 targets (namely SH) we can also get too large displacements from
5073 big-endian corrections. */
5074 else if (GET_CODE (ad) == PLUS
5075 && REG_P (XEXP (ad, 0))
5076 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5077 && CONST_INT_P (XEXP (ad, 1))
5078 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5079 CONST_INT)
5080 /* Similarly, if we were to reload the base register and the
5081 mem+offset address is still invalid, then we want to reload
5082 the whole address, not just the base register. */
5083 || ! maybe_memory_address_addr_space_p
5084 (mode, ad, as, &(XEXP (ad, 0)))))
5087 /* Unshare the MEM rtx so we can safely alter it. */
5088 if (memrefloc)
5090 *memrefloc = copy_rtx (*memrefloc);
5091 loc = &XEXP (*memrefloc, 0);
5092 if (removed_and)
5093 loc = &XEXP (*loc, 0);
5096 if (double_reg_address_ok[mode]
5097 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5098 PLUS, CONST_INT))
5100 /* Unshare the sum as well. */
5101 *loc = ad = copy_rtx (ad);
5103 /* Reload the displacement into an index reg.
5104 We assume the frame pointer or arg pointer is a base reg. */
5105 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5106 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5107 type, ind_levels);
5108 return 0;
5110 else
5112 /* If the sum of two regs is not necessarily valid,
5113 reload the sum into a base reg.
5114 That will at least work. */
5115 find_reloads_address_part (ad, loc,
5116 base_reg_class (mode, as, MEM, SCRATCH),
5117 GET_MODE (ad), opnum, type, ind_levels);
5119 return ! removed_and;
5122 /* If we have an indexed stack slot, there are three possible reasons why
5123 it might be invalid: The index might need to be reloaded, the address
5124 might have been made by frame pointer elimination and hence have a
5125 constant out of range, or both reasons might apply.
5127 We can easily check for an index needing reload, but even if that is the
5128 case, we might also have an invalid constant. To avoid making the
5129 conservative assumption and requiring two reloads, we see if this address
5130 is valid when not interpreted strictly. If it is, the only problem is
5131 that the index needs a reload and find_reloads_address_1 will take care
5132 of it.
5134 Handle all base registers here, not just fp/ap/sp, because on some
5135 targets (namely SPARC) we can also get invalid addresses from preventive
5136 subreg big-endian corrections made by find_reloads_toplev. We
5137 can also get expressions involving LO_SUM (rather than PLUS) from
5138 find_reloads_subreg_address.
5140 If we decide to do something, it must be that `double_reg_address_ok'
5141 is true. We generate a reload of the base register + constant and
5142 rework the sum so that the reload register will be added to the index.
5143 This is safe because we know the address isn't shared.
5145 We check for the base register as both the first and second operand of
5146 the innermost PLUS and/or LO_SUM. */
5148 for (op_index = 0; op_index < 2; ++op_index)
5150 rtx operand, addend;
5151 enum rtx_code inner_code;
5153 if (GET_CODE (ad) != PLUS)
5154 continue;
5156 inner_code = GET_CODE (XEXP (ad, 0));
5157 if (!(GET_CODE (ad) == PLUS
5158 && CONST_INT_P (XEXP (ad, 1))
5159 && (inner_code == PLUS || inner_code == LO_SUM)))
5160 continue;
5162 operand = XEXP (XEXP (ad, 0), op_index);
5163 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5164 continue;
5166 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5168 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5169 GET_CODE (addend))
5170 || operand == frame_pointer_rtx
5171 || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
5172 && operand == hard_frame_pointer_rtx)
5173 || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5174 && operand == arg_pointer_rtx)
5175 || operand == stack_pointer_rtx)
5176 && ! maybe_memory_address_addr_space_p
5177 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5179 rtx offset_reg;
5180 enum reg_class cls;
5182 offset_reg = plus_constant (GET_MODE (ad), operand,
5183 INTVAL (XEXP (ad, 1)));
5185 /* Form the adjusted address. */
5186 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5187 ad = gen_rtx_PLUS (GET_MODE (ad),
5188 op_index == 0 ? offset_reg : addend,
5189 op_index == 0 ? addend : offset_reg);
5190 else
5191 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5192 op_index == 0 ? offset_reg : addend,
5193 op_index == 0 ? addend : offset_reg);
5194 *loc = ad;
5196 cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5197 find_reloads_address_part (XEXP (ad, op_index),
5198 &XEXP (ad, op_index), cls,
5199 GET_MODE (ad), opnum, type, ind_levels);
5200 find_reloads_address_1 (mode, as,
5201 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5202 GET_CODE (XEXP (ad, op_index)),
5203 &XEXP (ad, 1 - op_index), opnum,
5204 type, 0, insn);
5206 return 0;
5210 /* See if address becomes valid when an eliminable register
5211 in a sum is replaced. */
5213 tem = ad;
5214 if (GET_CODE (ad) == PLUS)
5215 tem = subst_indexed_address (ad);
5216 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5218 /* Ok, we win that way. Replace any additional eliminable
5219 registers. */
5221 subst_reg_equivs_changed = 0;
5222 tem = subst_reg_equivs (tem, insn);
5224 /* Make sure that didn't make the address invalid again. */
5226 if (! subst_reg_equivs_changed
5227 || strict_memory_address_addr_space_p (mode, tem, as))
5229 *loc = tem;
5230 return 0;
5234 /* If constants aren't valid addresses, reload the constant address
5235 into a register. */
5236 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5238 machine_mode address_mode = GET_MODE (ad);
5239 if (address_mode == VOIDmode)
5240 address_mode = targetm.addr_space.address_mode (as);
5242 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5243 Unshare it so we can safely alter it. */
5244 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5245 && CONSTANT_POOL_ADDRESS_P (ad))
5247 *memrefloc = copy_rtx (*memrefloc);
5248 loc = &XEXP (*memrefloc, 0);
5249 if (removed_and)
5250 loc = &XEXP (*loc, 0);
5253 find_reloads_address_part (ad, loc,
5254 base_reg_class (mode, as, MEM, SCRATCH),
5255 address_mode, opnum, type, ind_levels);
5256 return ! removed_and;
5259 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5260 opnum, type, ind_levels, insn);
5263 /* Find all pseudo regs appearing in AD
5264 that are eliminable in favor of equivalent values
5265 and do not have hard regs; replace them by their equivalents.
5266 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5267 front of it for pseudos that we have to replace with stack slots. */
5269 static rtx
5270 subst_reg_equivs (rtx ad, rtx_insn *insn)
5272 RTX_CODE code = GET_CODE (ad);
5273 int i;
5274 const char *fmt;
5276 switch (code)
5278 case HIGH:
5279 case CONST:
5280 CASE_CONST_ANY:
5281 case SYMBOL_REF:
5282 case LABEL_REF:
5283 case PC:
5284 case CC0:
5285 return ad;
5287 case REG:
5289 int regno = REGNO (ad);
5291 if (reg_equiv_constant (regno) != 0)
5293 subst_reg_equivs_changed = 1;
5294 return reg_equiv_constant (regno);
5296 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5298 rtx mem = make_memloc (ad, regno);
5299 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5301 subst_reg_equivs_changed = 1;
5302 /* We mark the USE with QImode so that we recognize it
5303 as one that can be safely deleted at the end of
5304 reload. */
5305 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5306 QImode);
5307 return mem;
5311 return ad;
5313 case PLUS:
5314 /* Quickly dispose of a common case. */
5315 if (XEXP (ad, 0) == frame_pointer_rtx
5316 && CONST_INT_P (XEXP (ad, 1)))
5317 return ad;
5318 break;
5320 default:
5321 break;
5324 fmt = GET_RTX_FORMAT (code);
5325 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5326 if (fmt[i] == 'e')
5327 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5328 return ad;
5331 /* Compute the sum of X and Y, making canonicalizations assumed in an
5332 address, namely: sum constant integers, surround the sum of two
5333 constants with a CONST, put the constant as the second operand, and
5334 group the constant on the outermost sum.
5336 This routine assumes both inputs are already in canonical form. */
5339 form_sum (machine_mode mode, rtx x, rtx y)
5341 rtx tem;
5343 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5344 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5346 if (CONST_INT_P (x))
5347 return plus_constant (mode, y, INTVAL (x));
5348 else if (CONST_INT_P (y))
5349 return plus_constant (mode, x, INTVAL (y));
5350 else if (CONSTANT_P (x))
5351 tem = x, x = y, y = tem;
5353 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5354 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5356 /* Note that if the operands of Y are specified in the opposite
5357 order in the recursive calls below, infinite recursion will occur. */
5358 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5359 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5361 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5362 constant will have been placed second. */
5363 if (CONSTANT_P (x) && CONSTANT_P (y))
5365 if (GET_CODE (x) == CONST)
5366 x = XEXP (x, 0);
5367 if (GET_CODE (y) == CONST)
5368 y = XEXP (y, 0);
5370 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5373 return gen_rtx_PLUS (mode, x, y);
5376 /* If ADDR is a sum containing a pseudo register that should be
5377 replaced with a constant (from reg_equiv_constant),
5378 return the result of doing so, and also apply the associative
5379 law so that the result is more likely to be a valid address.
5380 (But it is not guaranteed to be one.)
5382 Note that at most one register is replaced, even if more are
5383 replaceable. Also, we try to put the result into a canonical form
5384 so it is more likely to be a valid address.
5386 In all other cases, return ADDR. */
5388 static rtx
5389 subst_indexed_address (rtx addr)
5391 rtx op0 = 0, op1 = 0, op2 = 0;
5392 rtx tem;
5393 int regno;
5395 if (GET_CODE (addr) == PLUS)
5397 /* Try to find a register to replace. */
5398 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5399 if (REG_P (op0)
5400 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5401 && reg_renumber[regno] < 0
5402 && reg_equiv_constant (regno) != 0)
5403 op0 = reg_equiv_constant (regno);
5404 else if (REG_P (op1)
5405 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5406 && reg_renumber[regno] < 0
5407 && reg_equiv_constant (regno) != 0)
5408 op1 = reg_equiv_constant (regno);
5409 else if (GET_CODE (op0) == PLUS
5410 && (tem = subst_indexed_address (op0)) != op0)
5411 op0 = tem;
5412 else if (GET_CODE (op1) == PLUS
5413 && (tem = subst_indexed_address (op1)) != op1)
5414 op1 = tem;
5415 else
5416 return addr;
5418 /* Pick out up to three things to add. */
5419 if (GET_CODE (op1) == PLUS)
5420 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5421 else if (GET_CODE (op0) == PLUS)
5422 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5424 /* Compute the sum. */
5425 if (op2 != 0)
5426 op1 = form_sum (GET_MODE (addr), op1, op2);
5427 if (op1 != 0)
5428 op0 = form_sum (GET_MODE (addr), op0, op1);
5430 return op0;
5432 return addr;
5435 /* Update the REG_INC notes for an insn. It updates all REG_INC
5436 notes for the instruction which refer to REGNO the to refer
5437 to the reload number.
5439 INSN is the insn for which any REG_INC notes need updating.
5441 REGNO is the register number which has been reloaded.
5443 RELOADNUM is the reload number. */
5445 static void
5446 update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5447 int reloadnum ATTRIBUTE_UNUSED)
5449 if (!AUTO_INC_DEC)
5450 return;
5452 for (rtx link = REG_NOTES (insn); link; link = XEXP (link, 1))
5453 if (REG_NOTE_KIND (link) == REG_INC
5454 && (int) REGNO (XEXP (link, 0)) == regno)
5455 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5458 /* Record the pseudo registers we must reload into hard registers in a
5459 subexpression of a would-be memory address, X referring to a value
5460 in mode MODE. (This function is not called if the address we find
5461 is strictly valid.)
5463 CONTEXT = 1 means we are considering regs as index regs,
5464 = 0 means we are considering them as base regs.
5465 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5466 or an autoinc code.
5467 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5468 is the code of the index part of the address. Otherwise, pass SCRATCH
5469 for this argument.
5470 OPNUM and TYPE specify the purpose of any reloads made.
5472 IND_LEVELS says how many levels of indirect addressing are
5473 supported at this point in the address.
5475 INSN, if nonzero, is the insn in which we do the reload. It is used
5476 to determine if we may generate output reloads.
5478 We return nonzero if X, as a whole, is reloaded or replaced. */
5480 /* Note that we take shortcuts assuming that no multi-reg machine mode
5481 occurs as part of an address.
5482 Also, this is not fully machine-customizable; it works for machines
5483 such as VAXen and 68000's and 32000's, but other possible machines
5484 could have addressing modes that this does not handle right.
5485 If you add push_reload calls here, you need to make sure gen_reload
5486 handles those cases gracefully. */
5488 static int
5489 find_reloads_address_1 (machine_mode mode, addr_space_t as,
5490 rtx x, int context,
5491 enum rtx_code outer_code, enum rtx_code index_code,
5492 rtx *loc, int opnum, enum reload_type type,
5493 int ind_levels, rtx_insn *insn)
5495 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5496 ((CONTEXT) == 0 \
5497 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5498 : REGNO_OK_FOR_INDEX_P (REGNO))
5500 enum reg_class context_reg_class;
5501 RTX_CODE code = GET_CODE (x);
5502 bool reloaded_inner_of_autoinc = false;
5504 if (context == 1)
5505 context_reg_class = INDEX_REG_CLASS;
5506 else
5507 context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5509 switch (code)
5511 case PLUS:
5513 rtx orig_op0 = XEXP (x, 0);
5514 rtx orig_op1 = XEXP (x, 1);
5515 RTX_CODE code0 = GET_CODE (orig_op0);
5516 RTX_CODE code1 = GET_CODE (orig_op1);
5517 rtx op0 = orig_op0;
5518 rtx op1 = orig_op1;
5520 if (GET_CODE (op0) == SUBREG)
5522 op0 = SUBREG_REG (op0);
5523 code0 = GET_CODE (op0);
5524 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5525 op0 = gen_rtx_REG (word_mode,
5526 (REGNO (op0) +
5527 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5528 GET_MODE (SUBREG_REG (orig_op0)),
5529 SUBREG_BYTE (orig_op0),
5530 GET_MODE (orig_op0))));
5533 if (GET_CODE (op1) == SUBREG)
5535 op1 = SUBREG_REG (op1);
5536 code1 = GET_CODE (op1);
5537 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5538 /* ??? Why is this given op1's mode and above for
5539 ??? op0 SUBREGs we use word_mode? */
5540 op1 = gen_rtx_REG (GET_MODE (op1),
5541 (REGNO (op1) +
5542 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5543 GET_MODE (SUBREG_REG (orig_op1)),
5544 SUBREG_BYTE (orig_op1),
5545 GET_MODE (orig_op1))));
5547 /* Plus in the index register may be created only as a result of
5548 register rematerialization for expression like &localvar*4. Reload it.
5549 It may be possible to combine the displacement on the outer level,
5550 but it is probably not worthwhile to do so. */
5551 if (context == 1)
5553 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5554 opnum, ADDR_TYPE (type), ind_levels, insn);
5555 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5556 context_reg_class,
5557 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5558 return 1;
5561 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5562 || code0 == ZERO_EXTEND || code1 == MEM)
5564 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5565 &XEXP (x, 0), opnum, type, ind_levels,
5566 insn);
5567 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5568 &XEXP (x, 1), opnum, type, ind_levels,
5569 insn);
5572 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5573 || code1 == ZERO_EXTEND || code0 == MEM)
5575 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5576 &XEXP (x, 0), opnum, type, ind_levels,
5577 insn);
5578 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5579 &XEXP (x, 1), opnum, type, ind_levels,
5580 insn);
5583 else if (code0 == CONST_INT || code0 == CONST
5584 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5585 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5586 &XEXP (x, 1), opnum, type, ind_levels,
5587 insn);
5589 else if (code1 == CONST_INT || code1 == CONST
5590 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5591 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5592 &XEXP (x, 0), opnum, type, ind_levels,
5593 insn);
5595 else if (code0 == REG && code1 == REG)
5597 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5598 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5599 return 0;
5600 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5601 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5602 return 0;
5603 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5604 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5605 &XEXP (x, 1), opnum, type, ind_levels,
5606 insn);
5607 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5608 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5609 &XEXP (x, 0), opnum, type, ind_levels,
5610 insn);
5611 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5612 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5613 &XEXP (x, 0), opnum, type, ind_levels,
5614 insn);
5615 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5616 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5617 &XEXP (x, 1), opnum, type, ind_levels,
5618 insn);
5619 else
5621 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5622 &XEXP (x, 0), opnum, type, ind_levels,
5623 insn);
5624 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5625 &XEXP (x, 1), opnum, type, ind_levels,
5626 insn);
5630 else if (code0 == REG)
5632 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5633 &XEXP (x, 0), opnum, type, ind_levels,
5634 insn);
5635 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5636 &XEXP (x, 1), opnum, type, ind_levels,
5637 insn);
5640 else if (code1 == REG)
5642 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5643 &XEXP (x, 1), opnum, type, ind_levels,
5644 insn);
5645 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5646 &XEXP (x, 0), opnum, type, ind_levels,
5647 insn);
5651 return 0;
5653 case POST_MODIFY:
5654 case PRE_MODIFY:
5656 rtx op0 = XEXP (x, 0);
5657 rtx op1 = XEXP (x, 1);
5658 enum rtx_code index_code;
5659 int regno;
5660 int reloadnum;
5662 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5663 return 0;
5665 /* Currently, we only support {PRE,POST}_MODIFY constructs
5666 where a base register is {inc,dec}remented by the contents
5667 of another register or by a constant value. Thus, these
5668 operands must match. */
5669 gcc_assert (op0 == XEXP (op1, 0));
5671 /* Require index register (or constant). Let's just handle the
5672 register case in the meantime... If the target allows
5673 auto-modify by a constant then we could try replacing a pseudo
5674 register with its equivalent constant where applicable.
5676 We also handle the case where the register was eliminated
5677 resulting in a PLUS subexpression.
5679 If we later decide to reload the whole PRE_MODIFY or
5680 POST_MODIFY, inc_for_reload might clobber the reload register
5681 before reading the index. The index register might therefore
5682 need to live longer than a TYPE reload normally would, so be
5683 conservative and class it as RELOAD_OTHER. */
5684 if ((REG_P (XEXP (op1, 1))
5685 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5686 || GET_CODE (XEXP (op1, 1)) == PLUS)
5687 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5688 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5689 ind_levels, insn);
5691 gcc_assert (REG_P (XEXP (op1, 0)));
5693 regno = REGNO (XEXP (op1, 0));
5694 index_code = GET_CODE (XEXP (op1, 1));
5696 /* A register that is incremented cannot be constant! */
5697 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5698 || reg_equiv_constant (regno) == 0);
5700 /* Handle a register that is equivalent to a memory location
5701 which cannot be addressed directly. */
5702 if (reg_equiv_memory_loc (regno) != 0
5703 && (reg_equiv_address (regno) != 0
5704 || num_not_at_initial_offset))
5706 rtx tem = make_memloc (XEXP (x, 0), regno);
5708 if (reg_equiv_address (regno)
5709 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5711 rtx orig = tem;
5713 /* First reload the memory location's address.
5714 We can't use ADDR_TYPE (type) here, because we need to
5715 write back the value after reading it, hence we actually
5716 need two registers. */
5717 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5718 &XEXP (tem, 0), opnum,
5719 RELOAD_OTHER,
5720 ind_levels, insn);
5722 if (!rtx_equal_p (tem, orig))
5723 push_reg_equiv_alt_mem (regno, tem);
5725 /* Then reload the memory location into a base
5726 register. */
5727 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5728 &XEXP (op1, 0),
5729 base_reg_class (mode, as,
5730 code, index_code),
5731 GET_MODE (x), GET_MODE (x), 0,
5732 0, opnum, RELOAD_OTHER);
5734 update_auto_inc_notes (this_insn, regno, reloadnum);
5735 return 0;
5739 if (reg_renumber[regno] >= 0)
5740 regno = reg_renumber[regno];
5742 /* We require a base register here... */
5743 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5745 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5746 &XEXP (op1, 0), &XEXP (x, 0),
5747 base_reg_class (mode, as,
5748 code, index_code),
5749 GET_MODE (x), GET_MODE (x), 0, 0,
5750 opnum, RELOAD_OTHER);
5752 update_auto_inc_notes (this_insn, regno, reloadnum);
5753 return 0;
5756 return 0;
5758 case POST_INC:
5759 case POST_DEC:
5760 case PRE_INC:
5761 case PRE_DEC:
5762 if (REG_P (XEXP (x, 0)))
5764 int regno = REGNO (XEXP (x, 0));
5765 int value = 0;
5766 rtx x_orig = x;
5768 /* A register that is incremented cannot be constant! */
5769 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5770 || reg_equiv_constant (regno) == 0);
5772 /* Handle a register that is equivalent to a memory location
5773 which cannot be addressed directly. */
5774 if (reg_equiv_memory_loc (regno) != 0
5775 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5777 rtx tem = make_memloc (XEXP (x, 0), regno);
5778 if (reg_equiv_address (regno)
5779 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5781 rtx orig = tem;
5783 /* First reload the memory location's address.
5784 We can't use ADDR_TYPE (type) here, because we need to
5785 write back the value after reading it, hence we actually
5786 need two registers. */
5787 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5788 &XEXP (tem, 0), opnum, type,
5789 ind_levels, insn);
5790 reloaded_inner_of_autoinc = true;
5791 if (!rtx_equal_p (tem, orig))
5792 push_reg_equiv_alt_mem (regno, tem);
5793 /* Put this inside a new increment-expression. */
5794 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5795 /* Proceed to reload that, as if it contained a register. */
5799 /* If we have a hard register that is ok in this incdec context,
5800 don't make a reload. If the register isn't nice enough for
5801 autoincdec, we can reload it. But, if an autoincrement of a
5802 register that we here verified as playing nice, still outside
5803 isn't "valid", it must be that no autoincrement is "valid".
5804 If that is true and something made an autoincrement anyway,
5805 this must be a special context where one is allowed.
5806 (For example, a "push" instruction.)
5807 We can't improve this address, so leave it alone. */
5809 /* Otherwise, reload the autoincrement into a suitable hard reg
5810 and record how much to increment by. */
5812 if (reg_renumber[regno] >= 0)
5813 regno = reg_renumber[regno];
5814 if (regno >= FIRST_PSEUDO_REGISTER
5815 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5816 index_code))
5818 int reloadnum;
5820 /* If we can output the register afterwards, do so, this
5821 saves the extra update.
5822 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5823 CALL_INSN - and it does not set CC0.
5824 But don't do this if we cannot directly address the
5825 memory location, since this will make it harder to
5826 reuse address reloads, and increases register pressure.
5827 Also don't do this if we can probably update x directly. */
5828 rtx equiv = (MEM_P (XEXP (x, 0))
5829 ? XEXP (x, 0)
5830 : reg_equiv_mem (regno));
5831 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5832 if (insn && NONJUMP_INSN_P (insn)
5833 #if HAVE_cc0
5834 && ! sets_cc0_p (PATTERN (insn))
5835 #endif
5836 && (regno < FIRST_PSEUDO_REGISTER
5837 || (equiv
5838 && memory_operand (equiv, GET_MODE (equiv))
5839 && ! (icode != CODE_FOR_nothing
5840 && insn_operand_matches (icode, 0, equiv)
5841 && insn_operand_matches (icode, 1, equiv))))
5842 /* Using RELOAD_OTHER means we emit this and the reload we
5843 made earlier in the wrong order. */
5844 && !reloaded_inner_of_autoinc)
5846 /* We use the original pseudo for loc, so that
5847 emit_reload_insns() knows which pseudo this
5848 reload refers to and updates the pseudo rtx, not
5849 its equivalent memory location, as well as the
5850 corresponding entry in reg_last_reload_reg. */
5851 loc = &XEXP (x_orig, 0);
5852 x = XEXP (x, 0);
5853 reloadnum
5854 = push_reload (x, x, loc, loc,
5855 context_reg_class,
5856 GET_MODE (x), GET_MODE (x), 0, 0,
5857 opnum, RELOAD_OTHER);
5859 else
5861 reloadnum
5862 = push_reload (x, x, loc, (rtx*) 0,
5863 context_reg_class,
5864 GET_MODE (x), GET_MODE (x), 0, 0,
5865 opnum, type);
5866 rld[reloadnum].inc
5867 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5869 value = 1;
5872 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5873 reloadnum);
5875 return value;
5877 return 0;
5879 case TRUNCATE:
5880 case SIGN_EXTEND:
5881 case ZERO_EXTEND:
5882 /* Look for parts to reload in the inner expression and reload them
5883 too, in addition to this operation. Reloading all inner parts in
5884 addition to this one shouldn't be necessary, but at this point,
5885 we don't know if we can possibly omit any part that *can* be
5886 reloaded. Targets that are better off reloading just either part
5887 (or perhaps even a different part of an outer expression), should
5888 define LEGITIMIZE_RELOAD_ADDRESS. */
5889 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5890 context, code, SCRATCH, &XEXP (x, 0), opnum,
5891 type, ind_levels, insn);
5892 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5893 context_reg_class,
5894 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5895 return 1;
5897 case MEM:
5898 /* This is probably the result of a substitution, by eliminate_regs, of
5899 an equivalent address for a pseudo that was not allocated to a hard
5900 register. Verify that the specified address is valid and reload it
5901 into a register.
5903 Since we know we are going to reload this item, don't decrement for
5904 the indirection level.
5906 Note that this is actually conservative: it would be slightly more
5907 efficient to use the value of SPILL_INDIRECT_LEVELS from
5908 reload1.c here. */
5910 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5911 opnum, ADDR_TYPE (type), ind_levels, insn);
5912 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5913 context_reg_class,
5914 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5915 return 1;
5917 case REG:
5919 int regno = REGNO (x);
5921 if (reg_equiv_constant (regno) != 0)
5923 find_reloads_address_part (reg_equiv_constant (regno), loc,
5924 context_reg_class,
5925 GET_MODE (x), opnum, type, ind_levels);
5926 return 1;
5929 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5930 that feeds this insn. */
5931 if (reg_equiv_mem (regno) != 0)
5933 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5934 context_reg_class,
5935 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5936 return 1;
5938 #endif
5940 if (reg_equiv_memory_loc (regno)
5941 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5943 rtx tem = make_memloc (x, regno);
5944 if (reg_equiv_address (regno) != 0
5945 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5947 x = tem;
5948 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5949 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5950 ind_levels, insn);
5951 if (!rtx_equal_p (x, tem))
5952 push_reg_equiv_alt_mem (regno, x);
5956 if (reg_renumber[regno] >= 0)
5957 regno = reg_renumber[regno];
5959 if (regno >= FIRST_PSEUDO_REGISTER
5960 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5961 index_code))
5963 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5964 context_reg_class,
5965 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5966 return 1;
5969 /* If a register appearing in an address is the subject of a CLOBBER
5970 in this insn, reload it into some other register to be safe.
5971 The CLOBBER is supposed to make the register unavailable
5972 from before this insn to after it. */
5973 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5975 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5976 context_reg_class,
5977 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5978 return 1;
5981 return 0;
5983 case SUBREG:
5984 if (REG_P (SUBREG_REG (x)))
5986 /* If this is a SUBREG of a hard register and the resulting register
5987 is of the wrong class, reload the whole SUBREG. This avoids
5988 needless copies if SUBREG_REG is multi-word. */
5989 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5991 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5993 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5994 index_code))
5996 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5997 context_reg_class,
5998 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5999 return 1;
6002 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6003 is larger than the class size, then reload the whole SUBREG. */
6004 else
6006 enum reg_class rclass = context_reg_class;
6007 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6008 > reg_class_size[(int) rclass])
6010 /* If the inner register will be replaced by a memory
6011 reference, we can do this only if we can replace the
6012 whole subreg by a (narrower) memory reference. If
6013 this is not possible, fall through and reload just
6014 the inner register (including address reloads). */
6015 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6017 rtx tem = find_reloads_subreg_address (x, opnum,
6018 ADDR_TYPE (type),
6019 ind_levels, insn,
6020 NULL);
6021 if (tem)
6023 push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6024 GET_MODE (tem), VOIDmode, 0, 0,
6025 opnum, type);
6026 return 1;
6029 else
6031 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6032 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6033 return 1;
6038 break;
6040 default:
6041 break;
6045 const char *fmt = GET_RTX_FORMAT (code);
6046 int i;
6048 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6050 if (fmt[i] == 'e')
6051 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6052 we get here. */
6053 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6054 code, SCRATCH, &XEXP (x, i),
6055 opnum, type, ind_levels, insn);
6059 #undef REG_OK_FOR_CONTEXT
6060 return 0;
6063 /* X, which is found at *LOC, is a part of an address that needs to be
6064 reloaded into a register of class RCLASS. If X is a constant, or if
6065 X is a PLUS that contains a constant, check that the constant is a
6066 legitimate operand and that we are supposed to be able to load
6067 it into the register.
6069 If not, force the constant into memory and reload the MEM instead.
6071 MODE is the mode to use, in case X is an integer constant.
6073 OPNUM and TYPE describe the purpose of any reloads made.
6075 IND_LEVELS says how many levels of indirect addressing this machine
6076 supports. */
6078 static void
6079 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6080 machine_mode mode, int opnum,
6081 enum reload_type type, int ind_levels)
6083 if (CONSTANT_P (x)
6084 && (!targetm.legitimate_constant_p (mode, x)
6085 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6087 x = force_const_mem (mode, x);
6088 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6089 opnum, type, ind_levels, 0);
6092 else if (GET_CODE (x) == PLUS
6093 && CONSTANT_P (XEXP (x, 1))
6094 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6095 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6096 == NO_REGS))
6098 rtx tem;
6100 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6101 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6102 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6103 opnum, type, ind_levels, 0);
6106 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6107 mode, VOIDmode, 0, 0, opnum, type);
6110 /* X, a subreg of a pseudo, is a part of an address that needs to be
6111 reloaded, and the pseusdo is equivalent to a memory location.
6113 Attempt to replace the whole subreg by a (possibly narrower or wider)
6114 memory reference. If this is possible, return this new memory
6115 reference, and push all required address reloads. Otherwise,
6116 return NULL.
6118 OPNUM and TYPE identify the purpose of the reload.
6120 IND_LEVELS says how many levels of indirect addressing are
6121 supported at this point in the address.
6123 INSN, if nonzero, is the insn in which we do the reload. It is used
6124 to determine where to put USEs for pseudos that we have to replace with
6125 stack slots. */
6127 static rtx
6128 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6129 int ind_levels, rtx_insn *insn,
6130 int *address_reloaded)
6132 machine_mode outer_mode = GET_MODE (x);
6133 machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6134 int regno = REGNO (SUBREG_REG (x));
6135 int reloaded = 0;
6136 rtx tem, orig;
6137 int offset;
6139 gcc_assert (reg_equiv_memory_loc (regno) != 0);
6141 /* We cannot replace the subreg with a modified memory reference if:
6143 - we have a paradoxical subreg that implicitly acts as a zero or
6144 sign extension operation due to LOAD_EXTEND_OP;
6146 - we have a subreg that is implicitly supposed to act on the full
6147 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6149 - the address of the equivalent memory location is mode-dependent; or
6151 - we have a paradoxical subreg and the resulting memory is not
6152 sufficiently aligned to allow access in the wider mode.
6154 In addition, we choose not to perform the replacement for *any*
6155 paradoxical subreg, even if it were possible in principle. This
6156 is to avoid generating wider memory references than necessary.
6158 This corresponds to how previous versions of reload used to handle
6159 paradoxical subregs where no address reload was required. */
6161 if (paradoxical_subreg_p (x))
6162 return NULL;
6164 if (WORD_REGISTER_OPERATIONS
6165 && GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode)
6166 && ((GET_MODE_SIZE (outer_mode) - 1) / UNITS_PER_WORD
6167 == (GET_MODE_SIZE (inner_mode) - 1) / UNITS_PER_WORD))
6168 return NULL;
6170 /* Since we don't attempt to handle paradoxical subregs, we can just
6171 call into simplify_subreg, which will handle all remaining checks
6172 for us. */
6173 orig = make_memloc (SUBREG_REG (x), regno);
6174 offset = SUBREG_BYTE (x);
6175 tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6176 if (!tem || !MEM_P (tem))
6177 return NULL;
6179 /* Now push all required address reloads, if any. */
6180 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6181 XEXP (tem, 0), &XEXP (tem, 0),
6182 opnum, type, ind_levels, insn);
6183 /* ??? Do we need to handle nonzero offsets somehow? */
6184 if (!offset && !rtx_equal_p (tem, orig))
6185 push_reg_equiv_alt_mem (regno, tem);
6187 /* For some processors an address may be valid in the original mode but
6188 not in a smaller mode. For example, ARM accepts a scaled index register
6189 in SImode but not in HImode. Note that this is only a problem if the
6190 address in reg_equiv_mem is already invalid in the new mode; other
6191 cases would be fixed by find_reloads_address as usual.
6193 ??? We attempt to handle such cases here by doing an additional reload
6194 of the full address after the usual processing by find_reloads_address.
6195 Note that this may not work in the general case, but it seems to cover
6196 the cases where this situation currently occurs. A more general fix
6197 might be to reload the *value* instead of the address, but this would
6198 not be expected by the callers of this routine as-is.
6200 If find_reloads_address already completed replaced the address, there
6201 is nothing further to do. */
6202 if (reloaded == 0
6203 && reg_equiv_mem (regno) != 0
6204 && !strict_memory_address_addr_space_p
6205 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6206 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6208 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6209 base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6210 MEM, SCRATCH),
6211 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6212 reloaded = 1;
6215 /* If this is not a toplevel operand, find_reloads doesn't see this
6216 substitution. We have to emit a USE of the pseudo so that
6217 delete_output_reload can see it. */
6218 if (replace_reloads && recog_data.operand[opnum] != x)
6219 /* We mark the USE with QImode so that we recognize it as one that
6220 can be safely deleted at the end of reload. */
6221 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6222 QImode);
6224 if (address_reloaded)
6225 *address_reloaded = reloaded;
6227 return tem;
6230 /* Substitute into the current INSN the registers into which we have reloaded
6231 the things that need reloading. The array `replacements'
6232 contains the locations of all pointers that must be changed
6233 and says what to replace them with.
6235 Return the rtx that X translates into; usually X, but modified. */
6237 void
6238 subst_reloads (rtx_insn *insn)
6240 int i;
6242 for (i = 0; i < n_replacements; i++)
6244 struct replacement *r = &replacements[i];
6245 rtx reloadreg = rld[r->what].reg_rtx;
6246 if (reloadreg)
6248 #ifdef DEBUG_RELOAD
6249 /* This checking takes a very long time on some platforms
6250 causing the gcc.c-torture/compile/limits-fnargs.c test
6251 to time out during testing. See PR 31850.
6253 Internal consistency test. Check that we don't modify
6254 anything in the equivalence arrays. Whenever something from
6255 those arrays needs to be reloaded, it must be unshared before
6256 being substituted into; the equivalence must not be modified.
6257 Otherwise, if the equivalence is used after that, it will
6258 have been modified, and the thing substituted (probably a
6259 register) is likely overwritten and not a usable equivalence. */
6260 int check_regno;
6262 for (check_regno = 0; check_regno < max_regno; check_regno++)
6264 #define CHECK_MODF(ARRAY) \
6265 gcc_assert (!(*reg_equivs)[check_regno].ARRAY \
6266 || !loc_mentioned_in_p (r->where, \
6267 (*reg_equivs)[check_regno].ARRAY))
6269 CHECK_MODF (constant);
6270 CHECK_MODF (memory_loc);
6271 CHECK_MODF (address);
6272 CHECK_MODF (mem);
6273 #undef CHECK_MODF
6275 #endif /* DEBUG_RELOAD */
6277 /* If we're replacing a LABEL_REF with a register, there must
6278 already be an indication (to e.g. flow) which label this
6279 register refers to. */
6280 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6281 || !JUMP_P (insn)
6282 || find_reg_note (insn,
6283 REG_LABEL_OPERAND,
6284 XEXP (*r->where, 0))
6285 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6287 /* Encapsulate RELOADREG so its machine mode matches what
6288 used to be there. Note that gen_lowpart_common will
6289 do the wrong thing if RELOADREG is multi-word. RELOADREG
6290 will always be a REG here. */
6291 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6292 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6294 *r->where = reloadreg;
6296 /* If reload got no reg and isn't optional, something's wrong. */
6297 else
6298 gcc_assert (rld[r->what].optional);
6302 /* Make a copy of any replacements being done into X and move those
6303 copies to locations in Y, a copy of X. */
6305 void
6306 copy_replacements (rtx x, rtx y)
6308 copy_replacements_1 (&x, &y, n_replacements);
6311 static void
6312 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6314 int i, j;
6315 rtx x, y;
6316 struct replacement *r;
6317 enum rtx_code code;
6318 const char *fmt;
6320 for (j = 0; j < orig_replacements; j++)
6321 if (replacements[j].where == px)
6323 r = &replacements[n_replacements++];
6324 r->where = py;
6325 r->what = replacements[j].what;
6326 r->mode = replacements[j].mode;
6329 x = *px;
6330 y = *py;
6331 code = GET_CODE (x);
6332 fmt = GET_RTX_FORMAT (code);
6334 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6336 if (fmt[i] == 'e')
6337 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6338 else if (fmt[i] == 'E')
6339 for (j = XVECLEN (x, i); --j >= 0; )
6340 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6341 orig_replacements);
6345 /* Change any replacements being done to *X to be done to *Y. */
6347 void
6348 move_replacements (rtx *x, rtx *y)
6350 int i;
6352 for (i = 0; i < n_replacements; i++)
6353 if (replacements[i].where == x)
6354 replacements[i].where = y;
6357 /* If LOC was scheduled to be replaced by something, return the replacement.
6358 Otherwise, return *LOC. */
6361 find_replacement (rtx *loc)
6363 struct replacement *r;
6365 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6367 rtx reloadreg = rld[r->what].reg_rtx;
6369 if (reloadreg && r->where == loc)
6371 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6372 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6374 return reloadreg;
6376 else if (reloadreg && GET_CODE (*loc) == SUBREG
6377 && r->where == &SUBREG_REG (*loc))
6379 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6380 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6382 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6383 GET_MODE (SUBREG_REG (*loc)),
6384 SUBREG_BYTE (*loc));
6388 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6389 what's inside and make a new rtl if so. */
6390 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6391 || GET_CODE (*loc) == MULT)
6393 rtx x = find_replacement (&XEXP (*loc, 0));
6394 rtx y = find_replacement (&XEXP (*loc, 1));
6396 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6397 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6400 return *loc;
6403 /* Return nonzero if register in range [REGNO, ENDREGNO)
6404 appears either explicitly or implicitly in X
6405 other than being stored into (except for earlyclobber operands).
6407 References contained within the substructure at LOC do not count.
6408 LOC may be zero, meaning don't ignore anything.
6410 This is similar to refers_to_regno_p in rtlanal.c except that we
6411 look at equivalences for pseudos that didn't get hard registers. */
6413 static int
6414 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6415 rtx x, rtx *loc)
6417 int i;
6418 unsigned int r;
6419 RTX_CODE code;
6420 const char *fmt;
6422 if (x == 0)
6423 return 0;
6425 repeat:
6426 code = GET_CODE (x);
6428 switch (code)
6430 case REG:
6431 r = REGNO (x);
6433 /* If this is a pseudo, a hard register must not have been allocated.
6434 X must therefore either be a constant or be in memory. */
6435 if (r >= FIRST_PSEUDO_REGISTER)
6437 if (reg_equiv_memory_loc (r))
6438 return refers_to_regno_for_reload_p (regno, endregno,
6439 reg_equiv_memory_loc (r),
6440 (rtx*) 0);
6442 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6443 return 0;
6446 return (endregno > r
6447 && regno < r + (r < FIRST_PSEUDO_REGISTER
6448 ? hard_regno_nregs[r][GET_MODE (x)]
6449 : 1));
6451 case SUBREG:
6452 /* If this is a SUBREG of a hard reg, we can see exactly which
6453 registers are being modified. Otherwise, handle normally. */
6454 if (REG_P (SUBREG_REG (x))
6455 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6457 unsigned int inner_regno = subreg_regno (x);
6458 unsigned int inner_endregno
6459 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6460 ? subreg_nregs (x) : 1);
6462 return endregno > inner_regno && regno < inner_endregno;
6464 break;
6466 case CLOBBER:
6467 case SET:
6468 if (&SET_DEST (x) != loc
6469 /* Note setting a SUBREG counts as referring to the REG it is in for
6470 a pseudo but not for hard registers since we can
6471 treat each word individually. */
6472 && ((GET_CODE (SET_DEST (x)) == SUBREG
6473 && loc != &SUBREG_REG (SET_DEST (x))
6474 && REG_P (SUBREG_REG (SET_DEST (x)))
6475 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6476 && refers_to_regno_for_reload_p (regno, endregno,
6477 SUBREG_REG (SET_DEST (x)),
6478 loc))
6479 /* If the output is an earlyclobber operand, this is
6480 a conflict. */
6481 || ((!REG_P (SET_DEST (x))
6482 || earlyclobber_operand_p (SET_DEST (x)))
6483 && refers_to_regno_for_reload_p (regno, endregno,
6484 SET_DEST (x), loc))))
6485 return 1;
6487 if (code == CLOBBER || loc == &SET_SRC (x))
6488 return 0;
6489 x = SET_SRC (x);
6490 goto repeat;
6492 default:
6493 break;
6496 /* X does not match, so try its subexpressions. */
6498 fmt = GET_RTX_FORMAT (code);
6499 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6501 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6503 if (i == 0)
6505 x = XEXP (x, 0);
6506 goto repeat;
6508 else
6509 if (refers_to_regno_for_reload_p (regno, endregno,
6510 XEXP (x, i), loc))
6511 return 1;
6513 else if (fmt[i] == 'E')
6515 int j;
6516 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6517 if (loc != &XVECEXP (x, i, j)
6518 && refers_to_regno_for_reload_p (regno, endregno,
6519 XVECEXP (x, i, j), loc))
6520 return 1;
6523 return 0;
6526 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6527 we check if any register number in X conflicts with the relevant register
6528 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6529 contains a MEM (we don't bother checking for memory addresses that can't
6530 conflict because we expect this to be a rare case.
6532 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6533 that we look at equivalences for pseudos that didn't get hard registers. */
6536 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6538 int regno, endregno;
6540 /* Overly conservative. */
6541 if (GET_CODE (x) == STRICT_LOW_PART
6542 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6543 x = XEXP (x, 0);
6545 /* If either argument is a constant, then modifying X can not affect IN. */
6546 if (CONSTANT_P (x) || CONSTANT_P (in))
6547 return 0;
6548 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6549 return refers_to_mem_for_reload_p (in);
6550 else if (GET_CODE (x) == SUBREG)
6552 regno = REGNO (SUBREG_REG (x));
6553 if (regno < FIRST_PSEUDO_REGISTER)
6554 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6555 GET_MODE (SUBREG_REG (x)),
6556 SUBREG_BYTE (x),
6557 GET_MODE (x));
6558 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6559 ? subreg_nregs (x) : 1);
6561 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6563 else if (REG_P (x))
6565 regno = REGNO (x);
6567 /* If this is a pseudo, it must not have been assigned a hard register.
6568 Therefore, it must either be in memory or be a constant. */
6570 if (regno >= FIRST_PSEUDO_REGISTER)
6572 if (reg_equiv_memory_loc (regno))
6573 return refers_to_mem_for_reload_p (in);
6574 gcc_assert (reg_equiv_constant (regno));
6575 return 0;
6578 endregno = END_REGNO (x);
6580 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6582 else if (MEM_P (x))
6583 return refers_to_mem_for_reload_p (in);
6584 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6585 || GET_CODE (x) == CC0)
6586 return reg_mentioned_p (x, in);
6587 else
6589 gcc_assert (GET_CODE (x) == PLUS);
6591 /* We actually want to know if X is mentioned somewhere inside IN.
6592 We must not say that (plus (sp) (const_int 124)) is in
6593 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6594 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6595 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6596 while (MEM_P (in))
6597 in = XEXP (in, 0);
6598 if (REG_P (in))
6599 return 0;
6600 else if (GET_CODE (in) == PLUS)
6601 return (rtx_equal_p (x, in)
6602 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6603 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6604 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6605 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6608 gcc_unreachable ();
6611 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6612 registers. */
6614 static int
6615 refers_to_mem_for_reload_p (rtx x)
6617 const char *fmt;
6618 int i;
6620 if (MEM_P (x))
6621 return 1;
6623 if (REG_P (x))
6624 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6625 && reg_equiv_memory_loc (REGNO (x)));
6627 fmt = GET_RTX_FORMAT (GET_CODE (x));
6628 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6629 if (fmt[i] == 'e'
6630 && (MEM_P (XEXP (x, i))
6631 || refers_to_mem_for_reload_p (XEXP (x, i))))
6632 return 1;
6634 return 0;
6637 /* Check the insns before INSN to see if there is a suitable register
6638 containing the same value as GOAL.
6639 If OTHER is -1, look for a register in class RCLASS.
6640 Otherwise, just see if register number OTHER shares GOAL's value.
6642 Return an rtx for the register found, or zero if none is found.
6644 If RELOAD_REG_P is (short *)1,
6645 we reject any hard reg that appears in reload_reg_rtx
6646 because such a hard reg is also needed coming into this insn.
6648 If RELOAD_REG_P is any other nonzero value,
6649 it is a vector indexed by hard reg number
6650 and we reject any hard reg whose element in the vector is nonnegative
6651 as well as any that appears in reload_reg_rtx.
6653 If GOAL is zero, then GOALREG is a register number; we look
6654 for an equivalent for that register.
6656 MODE is the machine mode of the value we want an equivalence for.
6657 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6659 This function is used by jump.c as well as in the reload pass.
6661 If GOAL is the sum of the stack pointer and a constant, we treat it
6662 as if it were a constant except that sp is required to be unchanging. */
6665 find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6666 short *reload_reg_p, int goalreg, machine_mode mode)
6668 rtx_insn *p = insn;
6669 rtx goaltry, valtry, value;
6670 rtx_insn *where;
6671 rtx pat;
6672 int regno = -1;
6673 int valueno;
6674 int goal_mem = 0;
6675 int goal_const = 0;
6676 int goal_mem_addr_varies = 0;
6677 int need_stable_sp = 0;
6678 int nregs;
6679 int valuenregs;
6680 int num = 0;
6682 if (goal == 0)
6683 regno = goalreg;
6684 else if (REG_P (goal))
6685 regno = REGNO (goal);
6686 else if (MEM_P (goal))
6688 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6689 if (MEM_VOLATILE_P (goal))
6690 return 0;
6691 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6692 return 0;
6693 /* An address with side effects must be reexecuted. */
6694 switch (code)
6696 case POST_INC:
6697 case PRE_INC:
6698 case POST_DEC:
6699 case PRE_DEC:
6700 case POST_MODIFY:
6701 case PRE_MODIFY:
6702 return 0;
6703 default:
6704 break;
6706 goal_mem = 1;
6708 else if (CONSTANT_P (goal))
6709 goal_const = 1;
6710 else if (GET_CODE (goal) == PLUS
6711 && XEXP (goal, 0) == stack_pointer_rtx
6712 && CONSTANT_P (XEXP (goal, 1)))
6713 goal_const = need_stable_sp = 1;
6714 else if (GET_CODE (goal) == PLUS
6715 && XEXP (goal, 0) == frame_pointer_rtx
6716 && CONSTANT_P (XEXP (goal, 1)))
6717 goal_const = 1;
6718 else
6719 return 0;
6721 num = 0;
6722 /* Scan insns back from INSN, looking for one that copies
6723 a value into or out of GOAL.
6724 Stop and give up if we reach a label. */
6726 while (1)
6728 p = PREV_INSN (p);
6729 if (p && DEBUG_INSN_P (p))
6730 continue;
6731 num++;
6732 if (p == 0 || LABEL_P (p)
6733 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6734 return 0;
6736 /* Don't reuse register contents from before a setjmp-type
6737 function call; on the second return (from the longjmp) it
6738 might have been clobbered by a later reuse. It doesn't
6739 seem worthwhile to actually go and see if it is actually
6740 reused even if that information would be readily available;
6741 just don't reuse it across the setjmp call. */
6742 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6743 return 0;
6745 if (NONJUMP_INSN_P (p)
6746 /* If we don't want spill regs ... */
6747 && (! (reload_reg_p != 0
6748 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6749 /* ... then ignore insns introduced by reload; they aren't
6750 useful and can cause results in reload_as_needed to be
6751 different from what they were when calculating the need for
6752 spills. If we notice an input-reload insn here, we will
6753 reject it below, but it might hide a usable equivalent.
6754 That makes bad code. It may even fail: perhaps no reg was
6755 spilled for this insn because it was assumed we would find
6756 that equivalent. */
6757 || INSN_UID (p) < reload_first_uid))
6759 rtx tem;
6760 pat = single_set (p);
6762 /* First check for something that sets some reg equal to GOAL. */
6763 if (pat != 0
6764 && ((regno >= 0
6765 && true_regnum (SET_SRC (pat)) == regno
6766 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6768 (regno >= 0
6769 && true_regnum (SET_DEST (pat)) == regno
6770 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6772 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6773 /* When looking for stack pointer + const,
6774 make sure we don't use a stack adjust. */
6775 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6776 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6777 || (goal_mem
6778 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6779 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6780 || (goal_mem
6781 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6782 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6783 /* If we are looking for a constant,
6784 and something equivalent to that constant was copied
6785 into a reg, we can use that reg. */
6786 || (goal_const && REG_NOTES (p) != 0
6787 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6788 && ((rtx_equal_p (XEXP (tem, 0), goal)
6789 && (valueno
6790 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6791 || (REG_P (SET_DEST (pat))
6792 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6793 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6794 && CONST_INT_P (goal)
6795 && 0 != (goaltry
6796 = operand_subword (XEXP (tem, 0), 0, 0,
6797 VOIDmode))
6798 && rtx_equal_p (goal, goaltry)
6799 && (valtry
6800 = operand_subword (SET_DEST (pat), 0, 0,
6801 VOIDmode))
6802 && (valueno = true_regnum (valtry)) >= 0)))
6803 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6804 NULL_RTX))
6805 && REG_P (SET_DEST (pat))
6806 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6807 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6808 && CONST_INT_P (goal)
6809 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6810 VOIDmode))
6811 && rtx_equal_p (goal, goaltry)
6812 && (valtry
6813 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6814 && (valueno = true_regnum (valtry)) >= 0)))
6816 if (other >= 0)
6818 if (valueno != other)
6819 continue;
6821 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6822 continue;
6823 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6824 mode, valueno))
6825 continue;
6826 value = valtry;
6827 where = p;
6828 break;
6833 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6834 (or copying VALUE into GOAL, if GOAL is also a register).
6835 Now verify that VALUE is really valid. */
6837 /* VALUENO is the register number of VALUE; a hard register. */
6839 /* Don't try to re-use something that is killed in this insn. We want
6840 to be able to trust REG_UNUSED notes. */
6841 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6842 return 0;
6844 /* If we propose to get the value from the stack pointer or if GOAL is
6845 a MEM based on the stack pointer, we need a stable SP. */
6846 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6847 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6848 goal)))
6849 need_stable_sp = 1;
6851 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6852 if (GET_MODE (value) != mode)
6853 return 0;
6855 /* Reject VALUE if it was loaded from GOAL
6856 and is also a register that appears in the address of GOAL. */
6858 if (goal_mem && value == SET_DEST (single_set (where))
6859 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6860 goal, (rtx*) 0))
6861 return 0;
6863 /* Reject registers that overlap GOAL. */
6865 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6866 nregs = hard_regno_nregs[regno][mode];
6867 else
6868 nregs = 1;
6869 valuenregs = hard_regno_nregs[valueno][mode];
6871 if (!goal_mem && !goal_const
6872 && regno + nregs > valueno && regno < valueno + valuenregs)
6873 return 0;
6875 /* Reject VALUE if it is one of the regs reserved for reloads.
6876 Reload1 knows how to reuse them anyway, and it would get
6877 confused if we allocated one without its knowledge.
6878 (Now that insns introduced by reload are ignored above,
6879 this case shouldn't happen, but I'm not positive.) */
6881 if (reload_reg_p != 0 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6883 int i;
6884 for (i = 0; i < valuenregs; ++i)
6885 if (reload_reg_p[valueno + i] >= 0)
6886 return 0;
6889 /* Reject VALUE if it is a register being used for an input reload
6890 even if it is not one of those reserved. */
6892 if (reload_reg_p != 0)
6894 int i;
6895 for (i = 0; i < n_reloads; i++)
6896 if (rld[i].reg_rtx != 0 && rld[i].in)
6898 int regno1 = REGNO (rld[i].reg_rtx);
6899 int nregs1 = hard_regno_nregs[regno1]
6900 [GET_MODE (rld[i].reg_rtx)];
6901 if (regno1 < valueno + valuenregs
6902 && regno1 + nregs1 > valueno)
6903 return 0;
6907 if (goal_mem)
6908 /* We must treat frame pointer as varying here,
6909 since it can vary--in a nonlocal goto as generated by expand_goto. */
6910 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6912 /* Now verify that the values of GOAL and VALUE remain unaltered
6913 until INSN is reached. */
6915 p = insn;
6916 while (1)
6918 p = PREV_INSN (p);
6919 if (p == where)
6920 return value;
6922 /* Don't trust the conversion past a function call
6923 if either of the two is in a call-clobbered register, or memory. */
6924 if (CALL_P (p))
6926 int i;
6928 if (goal_mem || need_stable_sp)
6929 return 0;
6931 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6932 for (i = 0; i < nregs; ++i)
6933 if (call_used_regs[regno + i]
6934 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6935 return 0;
6937 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6938 for (i = 0; i < valuenregs; ++i)
6939 if (call_used_regs[valueno + i]
6940 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6941 return 0;
6944 if (INSN_P (p))
6946 pat = PATTERN (p);
6948 /* Watch out for unspec_volatile, and volatile asms. */
6949 if (volatile_insn_p (pat))
6950 return 0;
6952 /* If this insn P stores in either GOAL or VALUE, return 0.
6953 If GOAL is a memory ref and this insn writes memory, return 0.
6954 If GOAL is a memory ref and its address is not constant,
6955 and this insn P changes a register used in GOAL, return 0. */
6957 if (GET_CODE (pat) == COND_EXEC)
6958 pat = COND_EXEC_CODE (pat);
6959 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6961 rtx dest = SET_DEST (pat);
6962 while (GET_CODE (dest) == SUBREG
6963 || GET_CODE (dest) == ZERO_EXTRACT
6964 || GET_CODE (dest) == STRICT_LOW_PART)
6965 dest = XEXP (dest, 0);
6966 if (REG_P (dest))
6968 int xregno = REGNO (dest);
6969 int xnregs;
6970 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6971 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6972 else
6973 xnregs = 1;
6974 if (xregno < regno + nregs && xregno + xnregs > regno)
6975 return 0;
6976 if (xregno < valueno + valuenregs
6977 && xregno + xnregs > valueno)
6978 return 0;
6979 if (goal_mem_addr_varies
6980 && reg_overlap_mentioned_for_reload_p (dest, goal))
6981 return 0;
6982 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6983 return 0;
6985 else if (goal_mem && MEM_P (dest)
6986 && ! push_operand (dest, GET_MODE (dest)))
6987 return 0;
6988 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6989 && reg_equiv_memory_loc (regno) != 0)
6990 return 0;
6991 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
6992 return 0;
6994 else if (GET_CODE (pat) == PARALLEL)
6996 int i;
6997 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
6999 rtx v1 = XVECEXP (pat, 0, i);
7000 if (GET_CODE (v1) == COND_EXEC)
7001 v1 = COND_EXEC_CODE (v1);
7002 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7004 rtx dest = SET_DEST (v1);
7005 while (GET_CODE (dest) == SUBREG
7006 || GET_CODE (dest) == ZERO_EXTRACT
7007 || GET_CODE (dest) == STRICT_LOW_PART)
7008 dest = XEXP (dest, 0);
7009 if (REG_P (dest))
7011 int xregno = REGNO (dest);
7012 int xnregs;
7013 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7014 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7015 else
7016 xnregs = 1;
7017 if (xregno < regno + nregs
7018 && xregno + xnregs > regno)
7019 return 0;
7020 if (xregno < valueno + valuenregs
7021 && xregno + xnregs > valueno)
7022 return 0;
7023 if (goal_mem_addr_varies
7024 && reg_overlap_mentioned_for_reload_p (dest,
7025 goal))
7026 return 0;
7027 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7028 return 0;
7030 else if (goal_mem && MEM_P (dest)
7031 && ! push_operand (dest, GET_MODE (dest)))
7032 return 0;
7033 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7034 && reg_equiv_memory_loc (regno) != 0)
7035 return 0;
7036 else if (need_stable_sp
7037 && push_operand (dest, GET_MODE (dest)))
7038 return 0;
7043 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7045 rtx link;
7047 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7048 link = XEXP (link, 1))
7050 pat = XEXP (link, 0);
7051 if (GET_CODE (pat) == CLOBBER)
7053 rtx dest = SET_DEST (pat);
7055 if (REG_P (dest))
7057 int xregno = REGNO (dest);
7058 int xnregs
7059 = hard_regno_nregs[xregno][GET_MODE (dest)];
7061 if (xregno < regno + nregs
7062 && xregno + xnregs > regno)
7063 return 0;
7064 else if (xregno < valueno + valuenregs
7065 && xregno + xnregs > valueno)
7066 return 0;
7067 else if (goal_mem_addr_varies
7068 && reg_overlap_mentioned_for_reload_p (dest,
7069 goal))
7070 return 0;
7073 else if (goal_mem && MEM_P (dest)
7074 && ! push_operand (dest, GET_MODE (dest)))
7075 return 0;
7076 else if (need_stable_sp
7077 && push_operand (dest, GET_MODE (dest)))
7078 return 0;
7083 #if AUTO_INC_DEC
7084 /* If this insn auto-increments or auto-decrements
7085 either regno or valueno, return 0 now.
7086 If GOAL is a memory ref and its address is not constant,
7087 and this insn P increments a register used in GOAL, return 0. */
7089 rtx link;
7091 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7092 if (REG_NOTE_KIND (link) == REG_INC
7093 && REG_P (XEXP (link, 0)))
7095 int incno = REGNO (XEXP (link, 0));
7096 if (incno < regno + nregs && incno >= regno)
7097 return 0;
7098 if (incno < valueno + valuenregs && incno >= valueno)
7099 return 0;
7100 if (goal_mem_addr_varies
7101 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7102 goal))
7103 return 0;
7106 #endif
7111 /* Find a place where INCED appears in an increment or decrement operator
7112 within X, and return the amount INCED is incremented or decremented by.
7113 The value is always positive. */
7115 static int
7116 find_inc_amount (rtx x, rtx inced)
7118 enum rtx_code code = GET_CODE (x);
7119 const char *fmt;
7120 int i;
7122 if (code == MEM)
7124 rtx addr = XEXP (x, 0);
7125 if ((GET_CODE (addr) == PRE_DEC
7126 || GET_CODE (addr) == POST_DEC
7127 || GET_CODE (addr) == PRE_INC
7128 || GET_CODE (addr) == POST_INC)
7129 && XEXP (addr, 0) == inced)
7130 return GET_MODE_SIZE (GET_MODE (x));
7131 else if ((GET_CODE (addr) == PRE_MODIFY
7132 || GET_CODE (addr) == POST_MODIFY)
7133 && GET_CODE (XEXP (addr, 1)) == PLUS
7134 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7135 && XEXP (addr, 0) == inced
7136 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7138 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7139 return i < 0 ? -i : i;
7143 fmt = GET_RTX_FORMAT (code);
7144 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7146 if (fmt[i] == 'e')
7148 int tem = find_inc_amount (XEXP (x, i), inced);
7149 if (tem != 0)
7150 return tem;
7152 if (fmt[i] == 'E')
7154 int j;
7155 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7157 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7158 if (tem != 0)
7159 return tem;
7164 return 0;
7167 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7168 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7170 static int
7171 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7172 rtx insn)
7174 rtx link;
7176 if (!AUTO_INC_DEC)
7177 return 0;
7179 gcc_assert (insn);
7181 if (! INSN_P (insn))
7182 return 0;
7184 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7185 if (REG_NOTE_KIND (link) == REG_INC)
7187 unsigned int test = (int) REGNO (XEXP (link, 0));
7188 if (test >= regno && test < endregno)
7189 return 1;
7191 return 0;
7194 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7195 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7196 REG_INC. REGNO must refer to a hard register. */
7199 regno_clobbered_p (unsigned int regno, rtx_insn *insn, machine_mode mode,
7200 int sets)
7202 unsigned int nregs, endregno;
7204 /* regno must be a hard register. */
7205 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7207 nregs = hard_regno_nregs[regno][mode];
7208 endregno = regno + nregs;
7210 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7211 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7212 && REG_P (XEXP (PATTERN (insn), 0)))
7214 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7216 return test >= regno && test < endregno;
7219 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7220 return 1;
7222 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7224 int i = XVECLEN (PATTERN (insn), 0) - 1;
7226 for (; i >= 0; i--)
7228 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7229 if ((GET_CODE (elt) == CLOBBER
7230 || (sets == 1 && GET_CODE (elt) == SET))
7231 && REG_P (XEXP (elt, 0)))
7233 unsigned int test = REGNO (XEXP (elt, 0));
7235 if (test >= regno && test < endregno)
7236 return 1;
7238 if (sets == 2
7239 && reg_inc_found_and_valid_p (regno, endregno, elt))
7240 return 1;
7244 return 0;
7247 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7249 reload_adjust_reg_for_mode (rtx reloadreg, machine_mode mode)
7251 int regno;
7253 if (GET_MODE (reloadreg) == mode)
7254 return reloadreg;
7256 regno = REGNO (reloadreg);
7258 if (REG_WORDS_BIG_ENDIAN)
7259 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7260 - (int) hard_regno_nregs[regno][mode];
7262 return gen_rtx_REG (mode, regno);
7265 static const char *const reload_when_needed_name[] =
7267 "RELOAD_FOR_INPUT",
7268 "RELOAD_FOR_OUTPUT",
7269 "RELOAD_FOR_INSN",
7270 "RELOAD_FOR_INPUT_ADDRESS",
7271 "RELOAD_FOR_INPADDR_ADDRESS",
7272 "RELOAD_FOR_OUTPUT_ADDRESS",
7273 "RELOAD_FOR_OUTADDR_ADDRESS",
7274 "RELOAD_FOR_OPERAND_ADDRESS",
7275 "RELOAD_FOR_OPADDR_ADDR",
7276 "RELOAD_OTHER",
7277 "RELOAD_FOR_OTHER_ADDRESS"
7280 /* These functions are used to print the variables set by 'find_reloads' */
7282 DEBUG_FUNCTION void
7283 debug_reload_to_stream (FILE *f)
7285 int r;
7286 const char *prefix;
7288 if (! f)
7289 f = stderr;
7290 for (r = 0; r < n_reloads; r++)
7292 fprintf (f, "Reload %d: ", r);
7294 if (rld[r].in != 0)
7296 fprintf (f, "reload_in (%s) = ",
7297 GET_MODE_NAME (rld[r].inmode));
7298 print_inline_rtx (f, rld[r].in, 24);
7299 fprintf (f, "\n\t");
7302 if (rld[r].out != 0)
7304 fprintf (f, "reload_out (%s) = ",
7305 GET_MODE_NAME (rld[r].outmode));
7306 print_inline_rtx (f, rld[r].out, 24);
7307 fprintf (f, "\n\t");
7310 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7312 fprintf (f, "%s (opnum = %d)",
7313 reload_when_needed_name[(int) rld[r].when_needed],
7314 rld[r].opnum);
7316 if (rld[r].optional)
7317 fprintf (f, ", optional");
7319 if (rld[r].nongroup)
7320 fprintf (f, ", nongroup");
7322 if (rld[r].inc != 0)
7323 fprintf (f, ", inc by %d", rld[r].inc);
7325 if (rld[r].nocombine)
7326 fprintf (f, ", can't combine");
7328 if (rld[r].secondary_p)
7329 fprintf (f, ", secondary_reload_p");
7331 if (rld[r].in_reg != 0)
7333 fprintf (f, "\n\treload_in_reg: ");
7334 print_inline_rtx (f, rld[r].in_reg, 24);
7337 if (rld[r].out_reg != 0)
7339 fprintf (f, "\n\treload_out_reg: ");
7340 print_inline_rtx (f, rld[r].out_reg, 24);
7343 if (rld[r].reg_rtx != 0)
7345 fprintf (f, "\n\treload_reg_rtx: ");
7346 print_inline_rtx (f, rld[r].reg_rtx, 24);
7349 prefix = "\n\t";
7350 if (rld[r].secondary_in_reload != -1)
7352 fprintf (f, "%ssecondary_in_reload = %d",
7353 prefix, rld[r].secondary_in_reload);
7354 prefix = ", ";
7357 if (rld[r].secondary_out_reload != -1)
7358 fprintf (f, "%ssecondary_out_reload = %d\n",
7359 prefix, rld[r].secondary_out_reload);
7361 prefix = "\n\t";
7362 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7364 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7365 insn_data[rld[r].secondary_in_icode].name);
7366 prefix = ", ";
7369 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7370 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7371 insn_data[rld[r].secondary_out_icode].name);
7373 fprintf (f, "\n");
7377 DEBUG_FUNCTION void
7378 debug_reload (void)
7380 debug_reload_to_stream (stderr);