Concretize gimple_call_nothrow_p
[official-gcc.git] / gcc / reload.c
blob3e563d0041c8edf12c34c9edbb55e3f3c0b53b3a
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains subroutines used only from the file reload1.c.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
56 NOTE SIDE EFFECTS:
58 find_reloads can alter the operands of the instruction it is called on.
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
63 better that way.
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
72 Using a reload register for several reloads in one insn:
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload. */
86 #define REG_OK_STRICT
88 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
89 #undef DEBUG_RELOAD
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "tm.h"
95 #include "rtl-error.h"
96 #include "tm_p.h"
97 #include "insn-config.h"
98 #include "expr.h"
99 #include "optabs.h"
100 #include "recog.h"
101 #include "df.h"
102 #include "reload.h"
103 #include "regs.h"
104 #include "addresses.h"
105 #include "hard-reg-set.h"
106 #include "flags.h"
107 #include "function.h"
108 #include "params.h"
109 #include "target.h"
110 #include "ira.h"
112 /* True if X is a constant that can be forced into the constant pool.
113 MODE is the mode of the operand, or VOIDmode if not known. */
114 #define CONST_POOL_OK_P(MODE, X) \
115 ((MODE) != VOIDmode \
116 && CONSTANT_P (X) \
117 && GET_CODE (X) != HIGH \
118 && !targetm.cannot_force_const_mem (MODE, X))
120 /* True if C is a non-empty register class that has too few registers
121 to be safely used as a reload target class. */
123 static inline bool
124 small_register_class_p (reg_class_t rclass)
126 return (reg_class_size [(int) rclass] == 1
127 || (reg_class_size [(int) rclass] >= 1
128 && targetm.class_likely_spilled_p (rclass)));
132 /* All reloads of the current insn are recorded here. See reload.h for
133 comments. */
134 int n_reloads;
135 struct reload rld[MAX_RELOADS];
137 /* All the "earlyclobber" operands of the current insn
138 are recorded here. */
139 int n_earlyclobbers;
140 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
142 int reload_n_operands;
144 /* Replacing reloads.
146 If `replace_reloads' is nonzero, then as each reload is recorded
147 an entry is made for it in the table `replacements'.
148 Then later `subst_reloads' can look through that table and
149 perform all the replacements needed. */
151 /* Nonzero means record the places to replace. */
152 static int replace_reloads;
154 /* Each replacement is recorded with a structure like this. */
155 struct replacement
157 rtx *where; /* Location to store in */
158 int what; /* which reload this is for */
159 enum machine_mode mode; /* mode it must have */
162 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
164 /* Number of replacements currently recorded. */
165 static int n_replacements;
167 /* Used to track what is modified by an operand. */
168 struct decomposition
170 int reg_flag; /* Nonzero if referencing a register. */
171 int safe; /* Nonzero if this can't conflict with anything. */
172 rtx base; /* Base address for MEM. */
173 HOST_WIDE_INT start; /* Starting offset or register number. */
174 HOST_WIDE_INT end; /* Ending offset or register number. */
177 #ifdef SECONDARY_MEMORY_NEEDED
179 /* Save MEMs needed to copy from one class of registers to another. One MEM
180 is used per mode, but normally only one or two modes are ever used.
182 We keep two versions, before and after register elimination. The one
183 after register elimination is record separately for each operand. This
184 is done in case the address is not valid to be sure that we separately
185 reload each. */
187 static rtx secondary_memlocs[NUM_MACHINE_MODES];
188 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
189 static int secondary_memlocs_elim_used = 0;
190 #endif
192 /* The instruction we are doing reloads for;
193 so we can test whether a register dies in it. */
194 static rtx_insn *this_insn;
196 /* Nonzero if this instruction is a user-specified asm with operands. */
197 static int this_insn_is_asm;
199 /* If hard_regs_live_known is nonzero,
200 we can tell which hard regs are currently live,
201 at least enough to succeed in choosing dummy reloads. */
202 static int hard_regs_live_known;
204 /* Indexed by hard reg number,
205 element is nonnegative if hard reg has been spilled.
206 This vector is passed to `find_reloads' as an argument
207 and is not changed here. */
208 static short *static_reload_reg_p;
210 /* Set to 1 in subst_reg_equivs if it changes anything. */
211 static int subst_reg_equivs_changed;
213 /* On return from push_reload, holds the reload-number for the OUT
214 operand, which can be different for that from the input operand. */
215 static int output_reloadnum;
217 /* Compare two RTX's. */
218 #define MATCHES(x, y) \
219 (x == y || (x != 0 && (REG_P (x) \
220 ? REG_P (y) && REGNO (x) == REGNO (y) \
221 : rtx_equal_p (x, y) && ! side_effects_p (x))))
223 /* Indicates if two reloads purposes are for similar enough things that we
224 can merge their reloads. */
225 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
226 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
227 || ((when1) == (when2) && (op1) == (op2)) \
228 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
229 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
230 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
231 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
232 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
234 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
235 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
236 ((when1) != (when2) \
237 || ! ((op1) == (op2) \
238 || (when1) == RELOAD_FOR_INPUT \
239 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
240 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
242 /* If we are going to reload an address, compute the reload type to
243 use. */
244 #define ADDR_TYPE(type) \
245 ((type) == RELOAD_FOR_INPUT_ADDRESS \
246 ? RELOAD_FOR_INPADDR_ADDRESS \
247 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
248 ? RELOAD_FOR_OUTADDR_ADDRESS \
249 : (type)))
251 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
252 enum machine_mode, enum reload_type,
253 enum insn_code *, secondary_reload_info *);
254 static enum reg_class find_valid_class (enum machine_mode, enum machine_mode,
255 int, unsigned int);
256 static void push_replacement (rtx *, int, enum machine_mode);
257 static void dup_replacements (rtx *, rtx *);
258 static void combine_reloads (void);
259 static int find_reusable_reload (rtx *, rtx, enum reg_class,
260 enum reload_type, int, int);
261 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
262 enum machine_mode, reg_class_t, int, int);
263 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
264 static struct decomposition decompose (rtx);
265 static int immune_p (rtx, rtx, struct decomposition);
266 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
267 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
268 rtx_insn *, int *);
269 static rtx make_memloc (rtx, int);
270 static int maybe_memory_address_addr_space_p (enum machine_mode, rtx,
271 addr_space_t, rtx *);
272 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
273 int, enum reload_type, int, rtx_insn *);
274 static rtx subst_reg_equivs (rtx, rtx_insn *);
275 static rtx subst_indexed_address (rtx);
276 static void update_auto_inc_notes (rtx_insn *, int, int);
277 static int find_reloads_address_1 (enum machine_mode, addr_space_t, rtx, int,
278 enum rtx_code, enum rtx_code, rtx *,
279 int, enum reload_type,int, rtx_insn *);
280 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
281 enum machine_mode, int,
282 enum reload_type, int);
283 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
284 int, rtx_insn *, int *);
285 static void copy_replacements_1 (rtx *, rtx *, int);
286 static int find_inc_amount (rtx, rtx);
287 static int refers_to_mem_for_reload_p (rtx);
288 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
289 rtx, rtx *);
291 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
292 list yet. */
294 static void
295 push_reg_equiv_alt_mem (int regno, rtx mem)
297 rtx it;
299 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
300 if (rtx_equal_p (XEXP (it, 0), mem))
301 return;
303 reg_equiv_alt_mem_list (regno)
304 = alloc_EXPR_LIST (REG_EQUIV, mem,
305 reg_equiv_alt_mem_list (regno));
308 /* Determine if any secondary reloads are needed for loading (if IN_P is
309 nonzero) or storing (if IN_P is zero) X to or from a reload register of
310 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
311 are needed, push them.
313 Return the reload number of the secondary reload we made, or -1 if
314 we didn't need one. *PICODE is set to the insn_code to use if we do
315 need a secondary reload. */
317 static int
318 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
319 enum reg_class reload_class,
320 enum machine_mode reload_mode, enum reload_type type,
321 enum insn_code *picode, secondary_reload_info *prev_sri)
323 enum reg_class rclass = NO_REGS;
324 enum reg_class scratch_class;
325 enum machine_mode mode = reload_mode;
326 enum insn_code icode = CODE_FOR_nothing;
327 enum insn_code t_icode = CODE_FOR_nothing;
328 enum reload_type secondary_type;
329 int s_reload, t_reload = -1;
330 const char *scratch_constraint;
331 secondary_reload_info sri;
333 if (type == RELOAD_FOR_INPUT_ADDRESS
334 || type == RELOAD_FOR_OUTPUT_ADDRESS
335 || type == RELOAD_FOR_INPADDR_ADDRESS
336 || type == RELOAD_FOR_OUTADDR_ADDRESS)
337 secondary_type = type;
338 else
339 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
341 *picode = CODE_FOR_nothing;
343 /* If X is a paradoxical SUBREG, use the inner value to determine both the
344 mode and object being reloaded. */
345 if (paradoxical_subreg_p (x))
347 x = SUBREG_REG (x);
348 reload_mode = GET_MODE (x);
351 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
352 is still a pseudo-register by now, it *must* have an equivalent MEM
353 but we don't want to assume that), use that equivalent when seeing if
354 a secondary reload is needed since whether or not a reload is needed
355 might be sensitive to the form of the MEM. */
357 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
358 && reg_equiv_mem (REGNO (x)))
359 x = reg_equiv_mem (REGNO (x));
361 sri.icode = CODE_FOR_nothing;
362 sri.prev_sri = prev_sri;
363 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
364 reload_mode, &sri);
365 icode = (enum insn_code) sri.icode;
367 /* If we don't need any secondary registers, done. */
368 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
369 return -1;
371 if (rclass != NO_REGS)
372 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
373 reload_mode, type, &t_icode, &sri);
375 /* If we will be using an insn, the secondary reload is for a
376 scratch register. */
378 if (icode != CODE_FOR_nothing)
380 /* If IN_P is nonzero, the reload register will be the output in
381 operand 0. If IN_P is zero, the reload register will be the input
382 in operand 1. Outputs should have an initial "=", which we must
383 skip. */
385 /* ??? It would be useful to be able to handle only two, or more than
386 three, operands, but for now we can only handle the case of having
387 exactly three: output, input and one temp/scratch. */
388 gcc_assert (insn_data[(int) icode].n_operands == 3);
390 /* ??? We currently have no way to represent a reload that needs
391 an icode to reload from an intermediate tertiary reload register.
392 We should probably have a new field in struct reload to tag a
393 chain of scratch operand reloads onto. */
394 gcc_assert (rclass == NO_REGS);
396 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
397 gcc_assert (*scratch_constraint == '=');
398 scratch_constraint++;
399 if (*scratch_constraint == '&')
400 scratch_constraint++;
401 scratch_class = (reg_class_for_constraint
402 (lookup_constraint (scratch_constraint)));
404 rclass = scratch_class;
405 mode = insn_data[(int) icode].operand[2].mode;
408 /* This case isn't valid, so fail. Reload is allowed to use the same
409 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
410 in the case of a secondary register, we actually need two different
411 registers for correct code. We fail here to prevent the possibility of
412 silently generating incorrect code later.
414 The convention is that secondary input reloads are valid only if the
415 secondary_class is different from class. If you have such a case, you
416 can not use secondary reloads, you must work around the problem some
417 other way.
419 Allow this when a reload_in/out pattern is being used. I.e. assume
420 that the generated code handles this case. */
422 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
423 || t_icode != CODE_FOR_nothing);
425 /* See if we can reuse an existing secondary reload. */
426 for (s_reload = 0; s_reload < n_reloads; s_reload++)
427 if (rld[s_reload].secondary_p
428 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
429 || reg_class_subset_p (rld[s_reload].rclass, rclass))
430 && ((in_p && rld[s_reload].inmode == mode)
431 || (! in_p && rld[s_reload].outmode == mode))
432 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
433 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
434 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
435 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
436 && (small_register_class_p (rclass)
437 || targetm.small_register_classes_for_mode_p (VOIDmode))
438 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
439 opnum, rld[s_reload].opnum))
441 if (in_p)
442 rld[s_reload].inmode = mode;
443 if (! in_p)
444 rld[s_reload].outmode = mode;
446 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
447 rld[s_reload].rclass = rclass;
449 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
450 rld[s_reload].optional &= optional;
451 rld[s_reload].secondary_p = 1;
452 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
453 opnum, rld[s_reload].opnum))
454 rld[s_reload].when_needed = RELOAD_OTHER;
456 break;
459 if (s_reload == n_reloads)
461 #ifdef SECONDARY_MEMORY_NEEDED
462 /* If we need a memory location to copy between the two reload regs,
463 set it up now. Note that we do the input case before making
464 the reload and the output case after. This is due to the
465 way reloads are output. */
467 if (in_p && icode == CODE_FOR_nothing
468 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
470 get_secondary_mem (x, reload_mode, opnum, type);
472 /* We may have just added new reloads. Make sure we add
473 the new reload at the end. */
474 s_reload = n_reloads;
476 #endif
478 /* We need to make a new secondary reload for this register class. */
479 rld[s_reload].in = rld[s_reload].out = 0;
480 rld[s_reload].rclass = rclass;
482 rld[s_reload].inmode = in_p ? mode : VOIDmode;
483 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
484 rld[s_reload].reg_rtx = 0;
485 rld[s_reload].optional = optional;
486 rld[s_reload].inc = 0;
487 /* Maybe we could combine these, but it seems too tricky. */
488 rld[s_reload].nocombine = 1;
489 rld[s_reload].in_reg = 0;
490 rld[s_reload].out_reg = 0;
491 rld[s_reload].opnum = opnum;
492 rld[s_reload].when_needed = secondary_type;
493 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
494 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
495 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
496 rld[s_reload].secondary_out_icode
497 = ! in_p ? t_icode : CODE_FOR_nothing;
498 rld[s_reload].secondary_p = 1;
500 n_reloads++;
502 #ifdef SECONDARY_MEMORY_NEEDED
503 if (! in_p && icode == CODE_FOR_nothing
504 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
505 get_secondary_mem (x, mode, opnum, type);
506 #endif
509 *picode = icode;
510 return s_reload;
513 /* If a secondary reload is needed, return its class. If both an intermediate
514 register and a scratch register is needed, we return the class of the
515 intermediate register. */
516 reg_class_t
517 secondary_reload_class (bool in_p, reg_class_t rclass, enum machine_mode mode,
518 rtx x)
520 enum insn_code icode;
521 secondary_reload_info sri;
523 sri.icode = CODE_FOR_nothing;
524 sri.prev_sri = NULL;
525 rclass
526 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
527 icode = (enum insn_code) sri.icode;
529 /* If there are no secondary reloads at all, we return NO_REGS.
530 If an intermediate register is needed, we return its class. */
531 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
532 return rclass;
534 /* No intermediate register is needed, but we have a special reload
535 pattern, which we assume for now needs a scratch register. */
536 return scratch_reload_class (icode);
539 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
540 three operands, verify that operand 2 is an output operand, and return
541 its register class.
542 ??? We'd like to be able to handle any pattern with at least 2 operands,
543 for zero or more scratch registers, but that needs more infrastructure. */
544 enum reg_class
545 scratch_reload_class (enum insn_code icode)
547 const char *scratch_constraint;
548 enum reg_class rclass;
550 gcc_assert (insn_data[(int) icode].n_operands == 3);
551 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
552 gcc_assert (*scratch_constraint == '=');
553 scratch_constraint++;
554 if (*scratch_constraint == '&')
555 scratch_constraint++;
556 rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
557 gcc_assert (rclass != NO_REGS);
558 return rclass;
561 #ifdef SECONDARY_MEMORY_NEEDED
563 /* Return a memory location that will be used to copy X in mode MODE.
564 If we haven't already made a location for this mode in this insn,
565 call find_reloads_address on the location being returned. */
568 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
569 int opnum, enum reload_type type)
571 rtx loc;
572 int mem_valid;
574 /* By default, if MODE is narrower than a word, widen it to a word.
575 This is required because most machines that require these memory
576 locations do not support short load and stores from all registers
577 (e.g., FP registers). */
579 #ifdef SECONDARY_MEMORY_NEEDED_MODE
580 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
581 #else
582 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
583 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
584 #endif
586 /* If we already have made a MEM for this operand in MODE, return it. */
587 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
588 return secondary_memlocs_elim[(int) mode][opnum];
590 /* If this is the first time we've tried to get a MEM for this mode,
591 allocate a new one. `something_changed' in reload will get set
592 by noticing that the frame size has changed. */
594 if (secondary_memlocs[(int) mode] == 0)
596 #ifdef SECONDARY_MEMORY_NEEDED_RTX
597 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
598 #else
599 secondary_memlocs[(int) mode]
600 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
601 #endif
604 /* Get a version of the address doing any eliminations needed. If that
605 didn't give us a new MEM, make a new one if it isn't valid. */
607 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
608 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
609 MEM_ADDR_SPACE (loc));
611 if (! mem_valid && loc == secondary_memlocs[(int) mode])
612 loc = copy_rtx (loc);
614 /* The only time the call below will do anything is if the stack
615 offset is too large. In that case IND_LEVELS doesn't matter, so we
616 can just pass a zero. Adjust the type to be the address of the
617 corresponding object. If the address was valid, save the eliminated
618 address. If it wasn't valid, we need to make a reload each time, so
619 don't save it. */
621 if (! mem_valid)
623 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
624 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
625 : RELOAD_OTHER);
627 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
628 opnum, type, 0, 0);
631 secondary_memlocs_elim[(int) mode][opnum] = loc;
632 if (secondary_memlocs_elim_used <= (int)mode)
633 secondary_memlocs_elim_used = (int)mode + 1;
634 return loc;
637 /* Clear any secondary memory locations we've made. */
639 void
640 clear_secondary_mem (void)
642 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
644 #endif /* SECONDARY_MEMORY_NEEDED */
647 /* Find the largest class which has at least one register valid in
648 mode INNER, and which for every such register, that register number
649 plus N is also valid in OUTER (if in range) and is cheap to move
650 into REGNO. Such a class must exist. */
652 static enum reg_class
653 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
654 enum machine_mode inner ATTRIBUTE_UNUSED, int n,
655 unsigned int dest_regno ATTRIBUTE_UNUSED)
657 int best_cost = -1;
658 int rclass;
659 int regno;
660 enum reg_class best_class = NO_REGS;
661 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
662 unsigned int best_size = 0;
663 int cost;
665 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
667 int bad = 0;
668 int good = 0;
669 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
670 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
672 if (HARD_REGNO_MODE_OK (regno, inner))
674 good = 1;
675 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
676 && ! HARD_REGNO_MODE_OK (regno + n, outer))
677 bad = 1;
681 if (bad || !good)
682 continue;
683 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
685 if ((reg_class_size[rclass] > best_size
686 && (best_cost < 0 || best_cost >= cost))
687 || best_cost > cost)
689 best_class = (enum reg_class) rclass;
690 best_size = reg_class_size[rclass];
691 best_cost = register_move_cost (outer, (enum reg_class) rclass,
692 dest_class);
696 gcc_assert (best_size != 0);
698 return best_class;
701 /* We are trying to reload a subreg of something that is not a register.
702 Find the largest class which contains only registers valid in
703 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
704 which we would eventually like to obtain the object. */
706 static enum reg_class
707 find_valid_class_1 (enum machine_mode outer ATTRIBUTE_UNUSED,
708 enum machine_mode mode ATTRIBUTE_UNUSED,
709 enum reg_class dest_class ATTRIBUTE_UNUSED)
711 int best_cost = -1;
712 int rclass;
713 int regno;
714 enum reg_class best_class = NO_REGS;
715 unsigned int best_size = 0;
716 int cost;
718 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
720 int bad = 0;
721 for (regno = 0; regno < FIRST_PSEUDO_REGISTER && !bad; regno++)
723 if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
724 && !HARD_REGNO_MODE_OK (regno, mode))
725 bad = 1;
728 if (bad)
729 continue;
731 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
733 if ((reg_class_size[rclass] > best_size
734 && (best_cost < 0 || best_cost >= cost))
735 || best_cost > cost)
737 best_class = (enum reg_class) rclass;
738 best_size = reg_class_size[rclass];
739 best_cost = register_move_cost (outer, (enum reg_class) rclass,
740 dest_class);
744 gcc_assert (best_size != 0);
746 #ifdef LIMIT_RELOAD_CLASS
747 best_class = LIMIT_RELOAD_CLASS (mode, best_class);
748 #endif
749 return best_class;
752 /* Return the number of a previously made reload that can be combined with
753 a new one, or n_reloads if none of the existing reloads can be used.
754 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
755 push_reload, they determine the kind of the new reload that we try to
756 combine. P_IN points to the corresponding value of IN, which can be
757 modified by this function.
758 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
760 static int
761 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
762 enum reload_type type, int opnum, int dont_share)
764 rtx in = *p_in;
765 int i;
766 /* We can't merge two reloads if the output of either one is
767 earlyclobbered. */
769 if (earlyclobber_operand_p (out))
770 return n_reloads;
772 /* We can use an existing reload if the class is right
773 and at least one of IN and OUT is a match
774 and the other is at worst neutral.
775 (A zero compared against anything is neutral.)
777 For targets with small register classes, don't use existing reloads
778 unless they are for the same thing since that can cause us to need
779 more reload registers than we otherwise would. */
781 for (i = 0; i < n_reloads; i++)
782 if ((reg_class_subset_p (rclass, rld[i].rclass)
783 || reg_class_subset_p (rld[i].rclass, rclass))
784 /* If the existing reload has a register, it must fit our class. */
785 && (rld[i].reg_rtx == 0
786 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
787 true_regnum (rld[i].reg_rtx)))
788 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
789 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
790 || (out != 0 && MATCHES (rld[i].out, out)
791 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
792 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
793 && (small_register_class_p (rclass)
794 || targetm.small_register_classes_for_mode_p (VOIDmode))
795 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
796 return i;
798 /* Reloading a plain reg for input can match a reload to postincrement
799 that reg, since the postincrement's value is the right value.
800 Likewise, it can match a preincrement reload, since we regard
801 the preincrementation as happening before any ref in this insn
802 to that register. */
803 for (i = 0; i < n_reloads; i++)
804 if ((reg_class_subset_p (rclass, rld[i].rclass)
805 || reg_class_subset_p (rld[i].rclass, rclass))
806 /* If the existing reload has a register, it must fit our
807 class. */
808 && (rld[i].reg_rtx == 0
809 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
810 true_regnum (rld[i].reg_rtx)))
811 && out == 0 && rld[i].out == 0 && rld[i].in != 0
812 && ((REG_P (in)
813 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
814 && MATCHES (XEXP (rld[i].in, 0), in))
815 || (REG_P (rld[i].in)
816 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
817 && MATCHES (XEXP (in, 0), rld[i].in)))
818 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
819 && (small_register_class_p (rclass)
820 || targetm.small_register_classes_for_mode_p (VOIDmode))
821 && MERGABLE_RELOADS (type, rld[i].when_needed,
822 opnum, rld[i].opnum))
824 /* Make sure reload_in ultimately has the increment,
825 not the plain register. */
826 if (REG_P (in))
827 *p_in = rld[i].in;
828 return i;
830 return n_reloads;
833 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
834 expression. MODE is the mode that X will be used in. OUTPUT is true if
835 the function is invoked for the output part of an enclosing reload. */
837 static bool
838 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, bool output)
840 rtx inner;
842 /* Only SUBREGs are problematical. */
843 if (GET_CODE (x) != SUBREG)
844 return false;
846 inner = SUBREG_REG (x);
848 /* If INNER is a constant or PLUS, then INNER will need reloading. */
849 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
850 return true;
852 /* If INNER is not a hard register, then INNER will not need reloading. */
853 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
854 return false;
856 /* If INNER is not ok for MODE, then INNER will need reloading. */
857 if (!HARD_REGNO_MODE_OK (subreg_regno (x), mode))
858 return true;
860 /* If this is for an output, and the outer part is a word or smaller,
861 INNER is larger than a word and the number of registers in INNER is
862 not the same as the number of words in INNER, then INNER will need
863 reloading (with an in-out reload). */
864 return (output
865 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
866 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
867 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
868 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
871 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
872 requiring an extra reload register. The caller has already found that
873 IN contains some reference to REGNO, so check that we can produce the
874 new value in a single step. E.g. if we have
875 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
876 instruction that adds one to a register, this should succeed.
877 However, if we have something like
878 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
879 needs to be loaded into a register first, we need a separate reload
880 register.
881 Such PLUS reloads are generated by find_reload_address_part.
882 The out-of-range PLUS expressions are usually introduced in the instruction
883 patterns by register elimination and substituting pseudos without a home
884 by their function-invariant equivalences. */
885 static int
886 can_reload_into (rtx in, int regno, enum machine_mode mode)
888 rtx dst;
889 rtx_insn *test_insn;
890 int r = 0;
891 struct recog_data_d save_recog_data;
893 /* For matching constraints, we often get notional input reloads where
894 we want to use the original register as the reload register. I.e.
895 technically this is a non-optional input-output reload, but IN is
896 already a valid register, and has been chosen as the reload register.
897 Speed this up, since it trivially works. */
898 if (REG_P (in))
899 return 1;
901 /* To test MEMs properly, we'd have to take into account all the reloads
902 that are already scheduled, which can become quite complicated.
903 And since we've already handled address reloads for this MEM, it
904 should always succeed anyway. */
905 if (MEM_P (in))
906 return 1;
908 /* If we can make a simple SET insn that does the job, everything should
909 be fine. */
910 dst = gen_rtx_REG (mode, regno);
911 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
912 save_recog_data = recog_data;
913 if (recog_memoized (test_insn) >= 0)
915 extract_insn (test_insn);
916 r = constrain_operands (1);
918 recog_data = save_recog_data;
919 return r;
922 /* Record one reload that needs to be performed.
923 IN is an rtx saying where the data are to be found before this instruction.
924 OUT says where they must be stored after the instruction.
925 (IN is zero for data not read, and OUT is zero for data not written.)
926 INLOC and OUTLOC point to the places in the instructions where
927 IN and OUT were found.
928 If IN and OUT are both nonzero, it means the same register must be used
929 to reload both IN and OUT.
931 RCLASS is a register class required for the reloaded data.
932 INMODE is the machine mode that the instruction requires
933 for the reg that replaces IN and OUTMODE is likewise for OUT.
935 If IN is zero, then OUT's location and mode should be passed as
936 INLOC and INMODE.
938 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
940 OPTIONAL nonzero means this reload does not need to be performed:
941 it can be discarded if that is more convenient.
943 OPNUM and TYPE say what the purpose of this reload is.
945 The return value is the reload-number for this reload.
947 If both IN and OUT are nonzero, in some rare cases we might
948 want to make two separate reloads. (Actually we never do this now.)
949 Therefore, the reload-number for OUT is stored in
950 output_reloadnum when we return; the return value applies to IN.
951 Usually (presently always), when IN and OUT are nonzero,
952 the two reload-numbers are equal, but the caller should be careful to
953 distinguish them. */
956 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
957 enum reg_class rclass, enum machine_mode inmode,
958 enum machine_mode outmode, int strict_low, int optional,
959 int opnum, enum reload_type type)
961 int i;
962 int dont_share = 0;
963 int dont_remove_subreg = 0;
964 #ifdef LIMIT_RELOAD_CLASS
965 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
966 #endif
967 int secondary_in_reload = -1, secondary_out_reload = -1;
968 enum insn_code secondary_in_icode = CODE_FOR_nothing;
969 enum insn_code secondary_out_icode = CODE_FOR_nothing;
970 enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
971 subreg_in_class = NO_REGS;
973 /* INMODE and/or OUTMODE could be VOIDmode if no mode
974 has been specified for the operand. In that case,
975 use the operand's mode as the mode to reload. */
976 if (inmode == VOIDmode && in != 0)
977 inmode = GET_MODE (in);
978 if (outmode == VOIDmode && out != 0)
979 outmode = GET_MODE (out);
981 /* If find_reloads and friends until now missed to replace a pseudo
982 with a constant of reg_equiv_constant something went wrong
983 beforehand.
984 Note that it can't simply be done here if we missed it earlier
985 since the constant might need to be pushed into the literal pool
986 and the resulting memref would probably need further
987 reloading. */
988 if (in != 0 && REG_P (in))
990 int regno = REGNO (in);
992 gcc_assert (regno < FIRST_PSEUDO_REGISTER
993 || reg_renumber[regno] >= 0
994 || reg_equiv_constant (regno) == NULL_RTX);
997 /* reg_equiv_constant only contains constants which are obviously
998 not appropriate as destination. So if we would need to replace
999 the destination pseudo with a constant we are in real
1000 trouble. */
1001 if (out != 0 && REG_P (out))
1003 int regno = REGNO (out);
1005 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1006 || reg_renumber[regno] >= 0
1007 || reg_equiv_constant (regno) == NULL_RTX);
1010 /* If we have a read-write operand with an address side-effect,
1011 change either IN or OUT so the side-effect happens only once. */
1012 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1013 switch (GET_CODE (XEXP (in, 0)))
1015 case POST_INC: case POST_DEC: case POST_MODIFY:
1016 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1017 break;
1019 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1020 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1021 break;
1023 default:
1024 break;
1027 /* If we are reloading a (SUBREG constant ...), really reload just the
1028 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1029 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1030 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1031 register is a pseudo, also reload the inside expression.
1032 For machines that extend byte loads, do this for any SUBREG of a pseudo
1033 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1034 M2 is an integral mode that gets extended when loaded.
1035 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1036 where either M1 is not valid for R or M2 is wider than a word but we
1037 only need one register to store an M2-sized quantity in R.
1038 (However, if OUT is nonzero, we need to reload the reg *and*
1039 the subreg, so do nothing here, and let following statement handle it.)
1041 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1042 we can't handle it here because CONST_INT does not indicate a mode.
1044 Similarly, we must reload the inside expression if we have a
1045 STRICT_LOW_PART (presumably, in == out in this case).
1047 Also reload the inner expression if it does not require a secondary
1048 reload but the SUBREG does.
1050 Finally, reload the inner expression if it is a register that is in
1051 the class whose registers cannot be referenced in a different size
1052 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1053 cannot reload just the inside since we might end up with the wrong
1054 register class. But if it is inside a STRICT_LOW_PART, we have
1055 no choice, so we hope we do get the right register class there. */
1057 if (in != 0 && GET_CODE (in) == SUBREG
1058 && (subreg_lowpart_p (in) || strict_low)
1059 #ifdef CANNOT_CHANGE_MODE_CLASS
1060 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1061 #endif
1062 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (in))]
1063 && (CONSTANT_P (SUBREG_REG (in))
1064 || GET_CODE (SUBREG_REG (in)) == PLUS
1065 || strict_low
1066 || (((REG_P (SUBREG_REG (in))
1067 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1068 || MEM_P (SUBREG_REG (in)))
1069 && ((GET_MODE_PRECISION (inmode)
1070 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1071 #ifdef LOAD_EXTEND_OP
1072 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1073 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1074 <= UNITS_PER_WORD)
1075 && (GET_MODE_PRECISION (inmode)
1076 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1077 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1078 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1079 #endif
1080 #ifdef WORD_REGISTER_OPERATIONS
1081 || ((GET_MODE_PRECISION (inmode)
1082 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1083 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1084 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1085 / UNITS_PER_WORD)))
1086 #endif
1088 || (REG_P (SUBREG_REG (in))
1089 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1090 /* The case where out is nonzero
1091 is handled differently in the following statement. */
1092 && (out == 0 || subreg_lowpart_p (in))
1093 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1094 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1095 > UNITS_PER_WORD)
1096 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1097 / UNITS_PER_WORD)
1098 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1099 [GET_MODE (SUBREG_REG (in))]))
1100 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1101 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1102 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1103 SUBREG_REG (in))
1104 == NO_REGS))
1105 #ifdef CANNOT_CHANGE_MODE_CLASS
1106 || (REG_P (SUBREG_REG (in))
1107 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1108 && REG_CANNOT_CHANGE_MODE_P
1109 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1110 #endif
1113 #ifdef LIMIT_RELOAD_CLASS
1114 in_subreg_loc = inloc;
1115 #endif
1116 inloc = &SUBREG_REG (in);
1117 in = *inloc;
1118 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1119 if (MEM_P (in))
1120 /* This is supposed to happen only for paradoxical subregs made by
1121 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1122 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1123 #endif
1124 inmode = GET_MODE (in);
1127 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1128 where M1 is not valid for R if it was not handled by the code above.
1130 Similar issue for (SUBREG constant ...) if it was not handled by the
1131 code above. This can happen if SUBREG_BYTE != 0.
1133 However, we must reload the inner reg *as well as* the subreg in
1134 that case. */
1136 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1138 if (REG_P (SUBREG_REG (in)))
1139 subreg_in_class
1140 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1141 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1142 GET_MODE (SUBREG_REG (in)),
1143 SUBREG_BYTE (in),
1144 GET_MODE (in)),
1145 REGNO (SUBREG_REG (in)));
1146 else if (GET_CODE (SUBREG_REG (in)) == SYMBOL_REF)
1147 subreg_in_class = find_valid_class_1 (inmode,
1148 GET_MODE (SUBREG_REG (in)),
1149 rclass);
1151 /* This relies on the fact that emit_reload_insns outputs the
1152 instructions for input reloads of type RELOAD_OTHER in the same
1153 order as the reloads. Thus if the outer reload is also of type
1154 RELOAD_OTHER, we are guaranteed that this inner reload will be
1155 output before the outer reload. */
1156 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1157 subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1158 dont_remove_subreg = 1;
1161 /* Similarly for paradoxical and problematical SUBREGs on the output.
1162 Note that there is no reason we need worry about the previous value
1163 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1164 entitled to clobber it all (except in the case of a word mode subreg
1165 or of a STRICT_LOW_PART, in that latter case the constraint should
1166 label it input-output.) */
1167 if (out != 0 && GET_CODE (out) == SUBREG
1168 && (subreg_lowpart_p (out) || strict_low)
1169 #ifdef CANNOT_CHANGE_MODE_CLASS
1170 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1171 #endif
1172 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (out))]
1173 && (CONSTANT_P (SUBREG_REG (out))
1174 || strict_low
1175 || (((REG_P (SUBREG_REG (out))
1176 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1177 || MEM_P (SUBREG_REG (out)))
1178 && ((GET_MODE_PRECISION (outmode)
1179 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1180 #ifdef WORD_REGISTER_OPERATIONS
1181 || ((GET_MODE_PRECISION (outmode)
1182 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1183 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1184 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1185 / UNITS_PER_WORD)))
1186 #endif
1188 || (REG_P (SUBREG_REG (out))
1189 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1190 /* The case of a word mode subreg
1191 is handled differently in the following statement. */
1192 && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1193 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1194 > UNITS_PER_WORD))
1195 && ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode))
1196 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1197 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1198 SUBREG_REG (out))
1199 == NO_REGS))
1200 #ifdef CANNOT_CHANGE_MODE_CLASS
1201 || (REG_P (SUBREG_REG (out))
1202 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1203 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1204 GET_MODE (SUBREG_REG (out)),
1205 outmode))
1206 #endif
1209 #ifdef LIMIT_RELOAD_CLASS
1210 out_subreg_loc = outloc;
1211 #endif
1212 outloc = &SUBREG_REG (out);
1213 out = *outloc;
1214 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1215 gcc_assert (!MEM_P (out)
1216 || GET_MODE_SIZE (GET_MODE (out))
1217 <= GET_MODE_SIZE (outmode));
1218 #endif
1219 outmode = GET_MODE (out);
1222 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1223 where either M1 is not valid for R or M2 is wider than a word but we
1224 only need one register to store an M2-sized quantity in R.
1226 However, we must reload the inner reg *as well as* the subreg in
1227 that case and the inner reg is an in-out reload. */
1229 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1231 enum reg_class in_out_class
1232 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1233 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1234 GET_MODE (SUBREG_REG (out)),
1235 SUBREG_BYTE (out),
1236 GET_MODE (out)),
1237 REGNO (SUBREG_REG (out)));
1239 /* This relies on the fact that emit_reload_insns outputs the
1240 instructions for output reloads of type RELOAD_OTHER in reverse
1241 order of the reloads. Thus if the outer reload is also of type
1242 RELOAD_OTHER, we are guaranteed that this inner reload will be
1243 output after the outer reload. */
1244 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1245 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1246 0, 0, opnum, RELOAD_OTHER);
1247 dont_remove_subreg = 1;
1250 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1251 if (in != 0 && out != 0 && MEM_P (out)
1252 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1253 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1254 dont_share = 1;
1256 /* If IN is a SUBREG of a hard register, make a new REG. This
1257 simplifies some of the cases below. */
1259 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1260 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1261 && ! dont_remove_subreg)
1262 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1264 /* Similarly for OUT. */
1265 if (out != 0 && GET_CODE (out) == SUBREG
1266 && REG_P (SUBREG_REG (out))
1267 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1268 && ! dont_remove_subreg)
1269 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1271 /* Narrow down the class of register wanted if that is
1272 desirable on this machine for efficiency. */
1274 reg_class_t preferred_class = rclass;
1276 if (in != 0)
1277 preferred_class = targetm.preferred_reload_class (in, rclass);
1279 /* Output reloads may need analogous treatment, different in detail. */
1280 if (out != 0)
1281 preferred_class
1282 = targetm.preferred_output_reload_class (out, preferred_class);
1284 /* Discard what the target said if we cannot do it. */
1285 if (preferred_class != NO_REGS
1286 || (optional && type == RELOAD_FOR_OUTPUT))
1287 rclass = (enum reg_class) preferred_class;
1290 /* Make sure we use a class that can handle the actual pseudo
1291 inside any subreg. For example, on the 386, QImode regs
1292 can appear within SImode subregs. Although GENERAL_REGS
1293 can handle SImode, QImode needs a smaller class. */
1294 #ifdef LIMIT_RELOAD_CLASS
1295 if (in_subreg_loc)
1296 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1297 else if (in != 0 && GET_CODE (in) == SUBREG)
1298 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1300 if (out_subreg_loc)
1301 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1302 if (out != 0 && GET_CODE (out) == SUBREG)
1303 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1304 #endif
1306 /* Verify that this class is at least possible for the mode that
1307 is specified. */
1308 if (this_insn_is_asm)
1310 enum machine_mode mode;
1311 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1312 mode = inmode;
1313 else
1314 mode = outmode;
1315 if (mode == VOIDmode)
1317 error_for_asm (this_insn, "cannot reload integer constant "
1318 "operand in %<asm%>");
1319 mode = word_mode;
1320 if (in != 0)
1321 inmode = word_mode;
1322 if (out != 0)
1323 outmode = word_mode;
1325 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1326 if (HARD_REGNO_MODE_OK (i, mode)
1327 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1328 break;
1329 if (i == FIRST_PSEUDO_REGISTER)
1331 error_for_asm (this_insn, "impossible register constraint "
1332 "in %<asm%>");
1333 /* Avoid further trouble with this insn. */
1334 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1335 /* We used to continue here setting class to ALL_REGS, but it triggers
1336 sanity check on i386 for:
1337 void foo(long double d)
1339 asm("" :: "a" (d));
1341 Returning zero here ought to be safe as we take care in
1342 find_reloads to not process the reloads when instruction was
1343 replaced by USE. */
1345 return 0;
1349 /* Optional output reloads are always OK even if we have no register class,
1350 since the function of these reloads is only to have spill_reg_store etc.
1351 set, so that the storing insn can be deleted later. */
1352 gcc_assert (rclass != NO_REGS
1353 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1355 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1357 if (i == n_reloads)
1359 /* See if we need a secondary reload register to move between CLASS
1360 and IN or CLASS and OUT. Get the icode and push any required reloads
1361 needed for each of them if so. */
1363 if (in != 0)
1364 secondary_in_reload
1365 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1366 &secondary_in_icode, NULL);
1367 if (out != 0 && GET_CODE (out) != SCRATCH)
1368 secondary_out_reload
1369 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1370 type, &secondary_out_icode, NULL);
1372 /* We found no existing reload suitable for re-use.
1373 So add an additional reload. */
1375 #ifdef SECONDARY_MEMORY_NEEDED
1376 if (subreg_in_class == NO_REGS
1377 && in != 0
1378 && (REG_P (in)
1379 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1380 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1381 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1382 /* If a memory location is needed for the copy, make one. */
1383 if (subreg_in_class != NO_REGS
1384 && SECONDARY_MEMORY_NEEDED (subreg_in_class, rclass, inmode))
1385 get_secondary_mem (in, inmode, opnum, type);
1386 #endif
1388 i = n_reloads;
1389 rld[i].in = in;
1390 rld[i].out = out;
1391 rld[i].rclass = rclass;
1392 rld[i].inmode = inmode;
1393 rld[i].outmode = outmode;
1394 rld[i].reg_rtx = 0;
1395 rld[i].optional = optional;
1396 rld[i].inc = 0;
1397 rld[i].nocombine = 0;
1398 rld[i].in_reg = inloc ? *inloc : 0;
1399 rld[i].out_reg = outloc ? *outloc : 0;
1400 rld[i].opnum = opnum;
1401 rld[i].when_needed = type;
1402 rld[i].secondary_in_reload = secondary_in_reload;
1403 rld[i].secondary_out_reload = secondary_out_reload;
1404 rld[i].secondary_in_icode = secondary_in_icode;
1405 rld[i].secondary_out_icode = secondary_out_icode;
1406 rld[i].secondary_p = 0;
1408 n_reloads++;
1410 #ifdef SECONDARY_MEMORY_NEEDED
1411 if (out != 0
1412 && (REG_P (out)
1413 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1414 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1415 && SECONDARY_MEMORY_NEEDED (rclass,
1416 REGNO_REG_CLASS (reg_or_subregno (out)),
1417 outmode))
1418 get_secondary_mem (out, outmode, opnum, type);
1419 #endif
1421 else
1423 /* We are reusing an existing reload,
1424 but we may have additional information for it.
1425 For example, we may now have both IN and OUT
1426 while the old one may have just one of them. */
1428 /* The modes can be different. If they are, we want to reload in
1429 the larger mode, so that the value is valid for both modes. */
1430 if (inmode != VOIDmode
1431 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1432 rld[i].inmode = inmode;
1433 if (outmode != VOIDmode
1434 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1435 rld[i].outmode = outmode;
1436 if (in != 0)
1438 rtx in_reg = inloc ? *inloc : 0;
1439 /* If we merge reloads for two distinct rtl expressions that
1440 are identical in content, there might be duplicate address
1441 reloads. Remove the extra set now, so that if we later find
1442 that we can inherit this reload, we can get rid of the
1443 address reloads altogether.
1445 Do not do this if both reloads are optional since the result
1446 would be an optional reload which could potentially leave
1447 unresolved address replacements.
1449 It is not sufficient to call transfer_replacements since
1450 choose_reload_regs will remove the replacements for address
1451 reloads of inherited reloads which results in the same
1452 problem. */
1453 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1454 && ! (rld[i].optional && optional))
1456 /* We must keep the address reload with the lower operand
1457 number alive. */
1458 if (opnum > rld[i].opnum)
1460 remove_address_replacements (in);
1461 in = rld[i].in;
1462 in_reg = rld[i].in_reg;
1464 else
1465 remove_address_replacements (rld[i].in);
1467 /* When emitting reloads we don't necessarily look at the in-
1468 and outmode, but also directly at the operands (in and out).
1469 So we can't simply overwrite them with whatever we have found
1470 for this (to-be-merged) reload, we have to "merge" that too.
1471 Reusing another reload already verified that we deal with the
1472 same operands, just possibly in different modes. So we
1473 overwrite the operands only when the new mode is larger.
1474 See also PR33613. */
1475 if (!rld[i].in
1476 || GET_MODE_SIZE (GET_MODE (in))
1477 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1478 rld[i].in = in;
1479 if (!rld[i].in_reg
1480 || (in_reg
1481 && GET_MODE_SIZE (GET_MODE (in_reg))
1482 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1483 rld[i].in_reg = in_reg;
1485 if (out != 0)
1487 if (!rld[i].out
1488 || (out
1489 && GET_MODE_SIZE (GET_MODE (out))
1490 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1491 rld[i].out = out;
1492 if (outloc
1493 && (!rld[i].out_reg
1494 || GET_MODE_SIZE (GET_MODE (*outloc))
1495 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1496 rld[i].out_reg = *outloc;
1498 if (reg_class_subset_p (rclass, rld[i].rclass))
1499 rld[i].rclass = rclass;
1500 rld[i].optional &= optional;
1501 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1502 opnum, rld[i].opnum))
1503 rld[i].when_needed = RELOAD_OTHER;
1504 rld[i].opnum = MIN (rld[i].opnum, opnum);
1507 /* If the ostensible rtx being reloaded differs from the rtx found
1508 in the location to substitute, this reload is not safe to combine
1509 because we cannot reliably tell whether it appears in the insn. */
1511 if (in != 0 && in != *inloc)
1512 rld[i].nocombine = 1;
1514 #if 0
1515 /* This was replaced by changes in find_reloads_address_1 and the new
1516 function inc_for_reload, which go with a new meaning of reload_inc. */
1518 /* If this is an IN/OUT reload in an insn that sets the CC,
1519 it must be for an autoincrement. It doesn't work to store
1520 the incremented value after the insn because that would clobber the CC.
1521 So we must do the increment of the value reloaded from,
1522 increment it, store it back, then decrement again. */
1523 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1525 out = 0;
1526 rld[i].out = 0;
1527 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1528 /* If we did not find a nonzero amount-to-increment-by,
1529 that contradicts the belief that IN is being incremented
1530 in an address in this insn. */
1531 gcc_assert (rld[i].inc != 0);
1533 #endif
1535 /* If we will replace IN and OUT with the reload-reg,
1536 record where they are located so that substitution need
1537 not do a tree walk. */
1539 if (replace_reloads)
1541 if (inloc != 0)
1543 struct replacement *r = &replacements[n_replacements++];
1544 r->what = i;
1545 r->where = inloc;
1546 r->mode = inmode;
1548 if (outloc != 0 && outloc != inloc)
1550 struct replacement *r = &replacements[n_replacements++];
1551 r->what = i;
1552 r->where = outloc;
1553 r->mode = outmode;
1557 /* If this reload is just being introduced and it has both
1558 an incoming quantity and an outgoing quantity that are
1559 supposed to be made to match, see if either one of the two
1560 can serve as the place to reload into.
1562 If one of them is acceptable, set rld[i].reg_rtx
1563 to that one. */
1565 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1567 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1568 inmode, outmode,
1569 rld[i].rclass, i,
1570 earlyclobber_operand_p (out));
1572 /* If the outgoing register already contains the same value
1573 as the incoming one, we can dispense with loading it.
1574 The easiest way to tell the caller that is to give a phony
1575 value for the incoming operand (same as outgoing one). */
1576 if (rld[i].reg_rtx == out
1577 && (REG_P (in) || CONSTANT_P (in))
1578 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1579 static_reload_reg_p, i, inmode))
1580 rld[i].in = out;
1583 /* If this is an input reload and the operand contains a register that
1584 dies in this insn and is used nowhere else, see if it is the right class
1585 to be used for this reload. Use it if so. (This occurs most commonly
1586 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1587 this if it is also an output reload that mentions the register unless
1588 the output is a SUBREG that clobbers an entire register.
1590 Note that the operand might be one of the spill regs, if it is a
1591 pseudo reg and we are in a block where spilling has not taken place.
1592 But if there is no spilling in this block, that is OK.
1593 An explicitly used hard reg cannot be a spill reg. */
1595 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1597 rtx note;
1598 int regno;
1599 enum machine_mode rel_mode = inmode;
1601 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1602 rel_mode = outmode;
1604 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1605 if (REG_NOTE_KIND (note) == REG_DEAD
1606 && REG_P (XEXP (note, 0))
1607 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1608 && reg_mentioned_p (XEXP (note, 0), in)
1609 /* Check that a former pseudo is valid; see find_dummy_reload. */
1610 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1611 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1612 ORIGINAL_REGNO (XEXP (note, 0)))
1613 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1614 && ! refers_to_regno_for_reload_p (regno,
1615 end_hard_regno (rel_mode,
1616 regno),
1617 PATTERN (this_insn), inloc)
1618 /* If this is also an output reload, IN cannot be used as
1619 the reload register if it is set in this insn unless IN
1620 is also OUT. */
1621 && (out == 0 || in == out
1622 || ! hard_reg_set_here_p (regno,
1623 end_hard_regno (rel_mode, regno),
1624 PATTERN (this_insn)))
1625 /* ??? Why is this code so different from the previous?
1626 Is there any simple coherent way to describe the two together?
1627 What's going on here. */
1628 && (in != out
1629 || (GET_CODE (in) == SUBREG
1630 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1631 / UNITS_PER_WORD)
1632 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1633 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1634 /* Make sure the operand fits in the reg that dies. */
1635 && (GET_MODE_SIZE (rel_mode)
1636 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1637 && HARD_REGNO_MODE_OK (regno, inmode)
1638 && HARD_REGNO_MODE_OK (regno, outmode))
1640 unsigned int offs;
1641 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1642 hard_regno_nregs[regno][outmode]);
1644 for (offs = 0; offs < nregs; offs++)
1645 if (fixed_regs[regno + offs]
1646 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1647 regno + offs))
1648 break;
1650 if (offs == nregs
1651 && (! (refers_to_regno_for_reload_p
1652 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1653 || can_reload_into (in, regno, inmode)))
1655 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1656 break;
1661 if (out)
1662 output_reloadnum = i;
1664 return i;
1667 /* Record an additional place we must replace a value
1668 for which we have already recorded a reload.
1669 RELOADNUM is the value returned by push_reload
1670 when the reload was recorded.
1671 This is used in insn patterns that use match_dup. */
1673 static void
1674 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
1676 if (replace_reloads)
1678 struct replacement *r = &replacements[n_replacements++];
1679 r->what = reloadnum;
1680 r->where = loc;
1681 r->mode = mode;
1685 /* Duplicate any replacement we have recorded to apply at
1686 location ORIG_LOC to also be performed at DUP_LOC.
1687 This is used in insn patterns that use match_dup. */
1689 static void
1690 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1692 int i, n = n_replacements;
1694 for (i = 0; i < n; i++)
1696 struct replacement *r = &replacements[i];
1697 if (r->where == orig_loc)
1698 push_replacement (dup_loc, r->what, r->mode);
1702 /* Transfer all replacements that used to be in reload FROM to be in
1703 reload TO. */
1705 void
1706 transfer_replacements (int to, int from)
1708 int i;
1710 for (i = 0; i < n_replacements; i++)
1711 if (replacements[i].what == from)
1712 replacements[i].what = to;
1715 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1716 or a subpart of it. If we have any replacements registered for IN_RTX,
1717 cancel the reloads that were supposed to load them.
1718 Return nonzero if we canceled any reloads. */
1720 remove_address_replacements (rtx in_rtx)
1722 int i, j;
1723 char reload_flags[MAX_RELOADS];
1724 int something_changed = 0;
1726 memset (reload_flags, 0, sizeof reload_flags);
1727 for (i = 0, j = 0; i < n_replacements; i++)
1729 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1730 reload_flags[replacements[i].what] |= 1;
1731 else
1733 replacements[j++] = replacements[i];
1734 reload_flags[replacements[i].what] |= 2;
1737 /* Note that the following store must be done before the recursive calls. */
1738 n_replacements = j;
1740 for (i = n_reloads - 1; i >= 0; i--)
1742 if (reload_flags[i] == 1)
1744 deallocate_reload_reg (i);
1745 remove_address_replacements (rld[i].in);
1746 rld[i].in = 0;
1747 something_changed = 1;
1750 return something_changed;
1753 /* If there is only one output reload, and it is not for an earlyclobber
1754 operand, try to combine it with a (logically unrelated) input reload
1755 to reduce the number of reload registers needed.
1757 This is safe if the input reload does not appear in
1758 the value being output-reloaded, because this implies
1759 it is not needed any more once the original insn completes.
1761 If that doesn't work, see we can use any of the registers that
1762 die in this insn as a reload register. We can if it is of the right
1763 class and does not appear in the value being output-reloaded. */
1765 static void
1766 combine_reloads (void)
1768 int i, regno;
1769 int output_reload = -1;
1770 int secondary_out = -1;
1771 rtx note;
1773 /* Find the output reload; return unless there is exactly one
1774 and that one is mandatory. */
1776 for (i = 0; i < n_reloads; i++)
1777 if (rld[i].out != 0)
1779 if (output_reload >= 0)
1780 return;
1781 output_reload = i;
1784 if (output_reload < 0 || rld[output_reload].optional)
1785 return;
1787 /* An input-output reload isn't combinable. */
1789 if (rld[output_reload].in != 0)
1790 return;
1792 /* If this reload is for an earlyclobber operand, we can't do anything. */
1793 if (earlyclobber_operand_p (rld[output_reload].out))
1794 return;
1796 /* If there is a reload for part of the address of this operand, we would
1797 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1798 its life to the point where doing this combine would not lower the
1799 number of spill registers needed. */
1800 for (i = 0; i < n_reloads; i++)
1801 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1802 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1803 && rld[i].opnum == rld[output_reload].opnum)
1804 return;
1806 /* Check each input reload; can we combine it? */
1808 for (i = 0; i < n_reloads; i++)
1809 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1810 /* Life span of this reload must not extend past main insn. */
1811 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1812 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1813 && rld[i].when_needed != RELOAD_OTHER
1814 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1815 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1816 [(int) rld[output_reload].outmode])
1817 && rld[i].inc == 0
1818 && rld[i].reg_rtx == 0
1819 #ifdef SECONDARY_MEMORY_NEEDED
1820 /* Don't combine two reloads with different secondary
1821 memory locations. */
1822 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1823 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1824 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1825 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1826 #endif
1827 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1828 ? (rld[i].rclass == rld[output_reload].rclass)
1829 : (reg_class_subset_p (rld[i].rclass,
1830 rld[output_reload].rclass)
1831 || reg_class_subset_p (rld[output_reload].rclass,
1832 rld[i].rclass)))
1833 && (MATCHES (rld[i].in, rld[output_reload].out)
1834 /* Args reversed because the first arg seems to be
1835 the one that we imagine being modified
1836 while the second is the one that might be affected. */
1837 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1838 rld[i].in)
1839 /* However, if the input is a register that appears inside
1840 the output, then we also can't share.
1841 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1842 If the same reload reg is used for both reg 69 and the
1843 result to be stored in memory, then that result
1844 will clobber the address of the memory ref. */
1845 && ! (REG_P (rld[i].in)
1846 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1847 rld[output_reload].out))))
1848 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1849 rld[i].when_needed != RELOAD_FOR_INPUT)
1850 && (reg_class_size[(int) rld[i].rclass]
1851 || targetm.small_register_classes_for_mode_p (VOIDmode))
1852 /* We will allow making things slightly worse by combining an
1853 input and an output, but no worse than that. */
1854 && (rld[i].when_needed == RELOAD_FOR_INPUT
1855 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1857 int j;
1859 /* We have found a reload to combine with! */
1860 rld[i].out = rld[output_reload].out;
1861 rld[i].out_reg = rld[output_reload].out_reg;
1862 rld[i].outmode = rld[output_reload].outmode;
1863 /* Mark the old output reload as inoperative. */
1864 rld[output_reload].out = 0;
1865 /* The combined reload is needed for the entire insn. */
1866 rld[i].when_needed = RELOAD_OTHER;
1867 /* If the output reload had a secondary reload, copy it. */
1868 if (rld[output_reload].secondary_out_reload != -1)
1870 rld[i].secondary_out_reload
1871 = rld[output_reload].secondary_out_reload;
1872 rld[i].secondary_out_icode
1873 = rld[output_reload].secondary_out_icode;
1876 #ifdef SECONDARY_MEMORY_NEEDED
1877 /* Copy any secondary MEM. */
1878 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1879 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1880 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1881 #endif
1882 /* If required, minimize the register class. */
1883 if (reg_class_subset_p (rld[output_reload].rclass,
1884 rld[i].rclass))
1885 rld[i].rclass = rld[output_reload].rclass;
1887 /* Transfer all replacements from the old reload to the combined. */
1888 for (j = 0; j < n_replacements; j++)
1889 if (replacements[j].what == output_reload)
1890 replacements[j].what = i;
1892 return;
1895 /* If this insn has only one operand that is modified or written (assumed
1896 to be the first), it must be the one corresponding to this reload. It
1897 is safe to use anything that dies in this insn for that output provided
1898 that it does not occur in the output (we already know it isn't an
1899 earlyclobber. If this is an asm insn, give up. */
1901 if (INSN_CODE (this_insn) == -1)
1902 return;
1904 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1905 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1906 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1907 return;
1909 /* See if some hard register that dies in this insn and is not used in
1910 the output is the right class. Only works if the register we pick
1911 up can fully hold our output reload. */
1912 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1913 if (REG_NOTE_KIND (note) == REG_DEAD
1914 && REG_P (XEXP (note, 0))
1915 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1916 rld[output_reload].out)
1917 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1918 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1919 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1920 regno)
1921 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1922 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1923 /* Ensure that a secondary or tertiary reload for this output
1924 won't want this register. */
1925 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1926 || (!(TEST_HARD_REG_BIT
1927 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1928 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1929 || !(TEST_HARD_REG_BIT
1930 (reg_class_contents[(int) rld[secondary_out].rclass],
1931 regno)))))
1932 && !fixed_regs[regno]
1933 /* Check that a former pseudo is valid; see find_dummy_reload. */
1934 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1935 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1936 ORIGINAL_REGNO (XEXP (note, 0)))
1937 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1939 rld[output_reload].reg_rtx
1940 = gen_rtx_REG (rld[output_reload].outmode, regno);
1941 return;
1945 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1946 See if one of IN and OUT is a register that may be used;
1947 this is desirable since a spill-register won't be needed.
1948 If so, return the register rtx that proves acceptable.
1950 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1951 RCLASS is the register class required for the reload.
1953 If FOR_REAL is >= 0, it is the number of the reload,
1954 and in some cases when it can be discovered that OUT doesn't need
1955 to be computed, clear out rld[FOR_REAL].out.
1957 If FOR_REAL is -1, this should not be done, because this call
1958 is just to see if a register can be found, not to find and install it.
1960 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1961 puts an additional constraint on being able to use IN for OUT since
1962 IN must not appear elsewhere in the insn (it is assumed that IN itself
1963 is safe from the earlyclobber). */
1965 static rtx
1966 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1967 enum machine_mode inmode, enum machine_mode outmode,
1968 reg_class_t rclass, int for_real, int earlyclobber)
1970 rtx in = real_in;
1971 rtx out = real_out;
1972 int in_offset = 0;
1973 int out_offset = 0;
1974 rtx value = 0;
1976 /* If operands exceed a word, we can't use either of them
1977 unless they have the same size. */
1978 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1979 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1980 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1981 return 0;
1983 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1984 respectively refers to a hard register. */
1986 /* Find the inside of any subregs. */
1987 while (GET_CODE (out) == SUBREG)
1989 if (REG_P (SUBREG_REG (out))
1990 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1991 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1992 GET_MODE (SUBREG_REG (out)),
1993 SUBREG_BYTE (out),
1994 GET_MODE (out));
1995 out = SUBREG_REG (out);
1997 while (GET_CODE (in) == SUBREG)
1999 if (REG_P (SUBREG_REG (in))
2000 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
2001 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
2002 GET_MODE (SUBREG_REG (in)),
2003 SUBREG_BYTE (in),
2004 GET_MODE (in));
2005 in = SUBREG_REG (in);
2008 /* Narrow down the reg class, the same way push_reload will;
2009 otherwise we might find a dummy now, but push_reload won't. */
2011 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
2012 if (preferred_class != NO_REGS)
2013 rclass = (enum reg_class) preferred_class;
2016 /* See if OUT will do. */
2017 if (REG_P (out)
2018 && REGNO (out) < FIRST_PSEUDO_REGISTER)
2020 unsigned int regno = REGNO (out) + out_offset;
2021 unsigned int nwords = hard_regno_nregs[regno][outmode];
2022 rtx saved_rtx;
2024 /* When we consider whether the insn uses OUT,
2025 ignore references within IN. They don't prevent us
2026 from copying IN into OUT, because those refs would
2027 move into the insn that reloads IN.
2029 However, we only ignore IN in its role as this reload.
2030 If the insn uses IN elsewhere and it contains OUT,
2031 that counts. We can't be sure it's the "same" operand
2032 so it might not go through this reload.
2034 We also need to avoid using OUT if it, or part of it, is a
2035 fixed register. Modifying such registers, even transiently,
2036 may have undefined effects on the machine, such as modifying
2037 the stack pointer. */
2038 saved_rtx = *inloc;
2039 *inloc = const0_rtx;
2041 if (regno < FIRST_PSEUDO_REGISTER
2042 && HARD_REGNO_MODE_OK (regno, outmode)
2043 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2044 PATTERN (this_insn), outloc))
2046 unsigned int i;
2048 for (i = 0; i < nwords; i++)
2049 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2050 regno + i)
2051 || fixed_regs[regno + i])
2052 break;
2054 if (i == nwords)
2056 if (REG_P (real_out))
2057 value = real_out;
2058 else
2059 value = gen_rtx_REG (outmode, regno);
2063 *inloc = saved_rtx;
2066 /* Consider using IN if OUT was not acceptable
2067 or if OUT dies in this insn (like the quotient in a divmod insn).
2068 We can't use IN unless it is dies in this insn,
2069 which means we must know accurately which hard regs are live.
2070 Also, the result can't go in IN if IN is used within OUT,
2071 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2072 if (hard_regs_live_known
2073 && REG_P (in)
2074 && REGNO (in) < FIRST_PSEUDO_REGISTER
2075 && (value == 0
2076 || find_reg_note (this_insn, REG_UNUSED, real_out))
2077 && find_reg_note (this_insn, REG_DEAD, real_in)
2078 && !fixed_regs[REGNO (in)]
2079 && HARD_REGNO_MODE_OK (REGNO (in),
2080 /* The only case where out and real_out might
2081 have different modes is where real_out
2082 is a subreg, and in that case, out
2083 has a real mode. */
2084 (GET_MODE (out) != VOIDmode
2085 ? GET_MODE (out) : outmode))
2086 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2087 /* However only do this if we can be sure that this input
2088 operand doesn't correspond with an uninitialized pseudo.
2089 global can assign some hardreg to it that is the same as
2090 the one assigned to a different, also live pseudo (as it
2091 can ignore the conflict). We must never introduce writes
2092 to such hardregs, as they would clobber the other live
2093 pseudo. See PR 20973. */
2094 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2095 ORIGINAL_REGNO (in))
2096 /* Similarly, only do this if we can be sure that the death
2097 note is still valid. global can assign some hardreg to
2098 the pseudo referenced in the note and simultaneously a
2099 subword of this hardreg to a different, also live pseudo,
2100 because only another subword of the hardreg is actually
2101 used in the insn. This cannot happen if the pseudo has
2102 been assigned exactly one hardreg. See PR 33732. */
2103 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2105 unsigned int regno = REGNO (in) + in_offset;
2106 unsigned int nwords = hard_regno_nregs[regno][inmode];
2108 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2109 && ! hard_reg_set_here_p (regno, regno + nwords,
2110 PATTERN (this_insn))
2111 && (! earlyclobber
2112 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2113 PATTERN (this_insn), inloc)))
2115 unsigned int i;
2117 for (i = 0; i < nwords; i++)
2118 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2119 regno + i))
2120 break;
2122 if (i == nwords)
2124 /* If we were going to use OUT as the reload reg
2125 and changed our mind, it means OUT is a dummy that
2126 dies here. So don't bother copying value to it. */
2127 if (for_real >= 0 && value == real_out)
2128 rld[for_real].out = 0;
2129 if (REG_P (real_in))
2130 value = real_in;
2131 else
2132 value = gen_rtx_REG (inmode, regno);
2137 return value;
2140 /* This page contains subroutines used mainly for determining
2141 whether the IN or an OUT of a reload can serve as the
2142 reload register. */
2144 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2147 earlyclobber_operand_p (rtx x)
2149 int i;
2151 for (i = 0; i < n_earlyclobbers; i++)
2152 if (reload_earlyclobbers[i] == x)
2153 return 1;
2155 return 0;
2158 /* Return 1 if expression X alters a hard reg in the range
2159 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2160 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2161 X should be the body of an instruction. */
2163 static int
2164 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2166 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2168 rtx op0 = SET_DEST (x);
2170 while (GET_CODE (op0) == SUBREG)
2171 op0 = SUBREG_REG (op0);
2172 if (REG_P (op0))
2174 unsigned int r = REGNO (op0);
2176 /* See if this reg overlaps range under consideration. */
2177 if (r < end_regno
2178 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2179 return 1;
2182 else if (GET_CODE (x) == PARALLEL)
2184 int i = XVECLEN (x, 0) - 1;
2186 for (; i >= 0; i--)
2187 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2188 return 1;
2191 return 0;
2194 /* Return 1 if ADDR is a valid memory address for mode MODE
2195 in address space AS, and check that each pseudo reg has the
2196 proper kind of hard reg. */
2199 strict_memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2200 rtx addr, addr_space_t as)
2202 #ifdef GO_IF_LEGITIMATE_ADDRESS
2203 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2204 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2205 return 0;
2207 win:
2208 return 1;
2209 #else
2210 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2211 #endif
2214 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2215 if they are the same hard reg, and has special hacks for
2216 autoincrement and autodecrement.
2217 This is specifically intended for find_reloads to use
2218 in determining whether two operands match.
2219 X is the operand whose number is the lower of the two.
2221 The value is 2 if Y contains a pre-increment that matches
2222 a non-incrementing address in X. */
2224 /* ??? To be completely correct, we should arrange to pass
2225 for X the output operand and for Y the input operand.
2226 For now, we assume that the output operand has the lower number
2227 because that is natural in (SET output (... input ...)). */
2230 operands_match_p (rtx x, rtx y)
2232 int i;
2233 RTX_CODE code = GET_CODE (x);
2234 const char *fmt;
2235 int success_2;
2237 if (x == y)
2238 return 1;
2239 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2240 && (REG_P (y) || (GET_CODE (y) == SUBREG
2241 && REG_P (SUBREG_REG (y)))))
2243 int j;
2245 if (code == SUBREG)
2247 i = REGNO (SUBREG_REG (x));
2248 if (i >= FIRST_PSEUDO_REGISTER)
2249 goto slow;
2250 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2251 GET_MODE (SUBREG_REG (x)),
2252 SUBREG_BYTE (x),
2253 GET_MODE (x));
2255 else
2256 i = REGNO (x);
2258 if (GET_CODE (y) == SUBREG)
2260 j = REGNO (SUBREG_REG (y));
2261 if (j >= FIRST_PSEUDO_REGISTER)
2262 goto slow;
2263 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2264 GET_MODE (SUBREG_REG (y)),
2265 SUBREG_BYTE (y),
2266 GET_MODE (y));
2268 else
2269 j = REGNO (y);
2271 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2272 multiple hard register group of scalar integer registers, so that
2273 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2274 register. */
2275 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2276 && SCALAR_INT_MODE_P (GET_MODE (x))
2277 && i < FIRST_PSEUDO_REGISTER)
2278 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2279 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2280 && SCALAR_INT_MODE_P (GET_MODE (y))
2281 && j < FIRST_PSEUDO_REGISTER)
2282 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2284 return i == j;
2286 /* If two operands must match, because they are really a single
2287 operand of an assembler insn, then two postincrements are invalid
2288 because the assembler insn would increment only once.
2289 On the other hand, a postincrement matches ordinary indexing
2290 if the postincrement is the output operand. */
2291 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2292 return operands_match_p (XEXP (x, 0), y);
2293 /* Two preincrements are invalid
2294 because the assembler insn would increment only once.
2295 On the other hand, a preincrement matches ordinary indexing
2296 if the preincrement is the input operand.
2297 In this case, return 2, since some callers need to do special
2298 things when this happens. */
2299 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2300 || GET_CODE (y) == PRE_MODIFY)
2301 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2303 slow:
2305 /* Now we have disposed of all the cases in which different rtx codes
2306 can match. */
2307 if (code != GET_CODE (y))
2308 return 0;
2310 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2311 if (GET_MODE (x) != GET_MODE (y))
2312 return 0;
2314 /* MEMs referring to different address space are not equivalent. */
2315 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2316 return 0;
2318 switch (code)
2320 CASE_CONST_UNIQUE:
2321 return 0;
2323 case LABEL_REF:
2324 return LABEL_REF_LABEL (x) == LABEL_REF_LABEL (y);
2325 case SYMBOL_REF:
2326 return XSTR (x, 0) == XSTR (y, 0);
2328 default:
2329 break;
2332 /* Compare the elements. If any pair of corresponding elements
2333 fail to match, return 0 for the whole things. */
2335 success_2 = 0;
2336 fmt = GET_RTX_FORMAT (code);
2337 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2339 int val, j;
2340 switch (fmt[i])
2342 case 'w':
2343 if (XWINT (x, i) != XWINT (y, i))
2344 return 0;
2345 break;
2347 case 'i':
2348 if (XINT (x, i) != XINT (y, i))
2349 return 0;
2350 break;
2352 case 'e':
2353 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2354 if (val == 0)
2355 return 0;
2356 /* If any subexpression returns 2,
2357 we should return 2 if we are successful. */
2358 if (val == 2)
2359 success_2 = 1;
2360 break;
2362 case '0':
2363 break;
2365 case 'E':
2366 if (XVECLEN (x, i) != XVECLEN (y, i))
2367 return 0;
2368 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2370 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2371 if (val == 0)
2372 return 0;
2373 if (val == 2)
2374 success_2 = 1;
2376 break;
2378 /* It is believed that rtx's at this level will never
2379 contain anything but integers and other rtx's,
2380 except for within LABEL_REFs and SYMBOL_REFs. */
2381 default:
2382 gcc_unreachable ();
2385 return 1 + success_2;
2388 /* Describe the range of registers or memory referenced by X.
2389 If X is a register, set REG_FLAG and put the first register
2390 number into START and the last plus one into END.
2391 If X is a memory reference, put a base address into BASE
2392 and a range of integer offsets into START and END.
2393 If X is pushing on the stack, we can assume it causes no trouble,
2394 so we set the SAFE field. */
2396 static struct decomposition
2397 decompose (rtx x)
2399 struct decomposition val;
2400 int all_const = 0;
2402 memset (&val, 0, sizeof (val));
2404 switch (GET_CODE (x))
2406 case MEM:
2408 rtx base = NULL_RTX, offset = 0;
2409 rtx addr = XEXP (x, 0);
2411 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2412 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2414 val.base = XEXP (addr, 0);
2415 val.start = -GET_MODE_SIZE (GET_MODE (x));
2416 val.end = GET_MODE_SIZE (GET_MODE (x));
2417 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2418 return val;
2421 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2423 if (GET_CODE (XEXP (addr, 1)) == PLUS
2424 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2425 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2427 val.base = XEXP (addr, 0);
2428 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2429 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2430 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2431 return val;
2435 if (GET_CODE (addr) == CONST)
2437 addr = XEXP (addr, 0);
2438 all_const = 1;
2440 if (GET_CODE (addr) == PLUS)
2442 if (CONSTANT_P (XEXP (addr, 0)))
2444 base = XEXP (addr, 1);
2445 offset = XEXP (addr, 0);
2447 else if (CONSTANT_P (XEXP (addr, 1)))
2449 base = XEXP (addr, 0);
2450 offset = XEXP (addr, 1);
2454 if (offset == 0)
2456 base = addr;
2457 offset = const0_rtx;
2459 if (GET_CODE (offset) == CONST)
2460 offset = XEXP (offset, 0);
2461 if (GET_CODE (offset) == PLUS)
2463 if (CONST_INT_P (XEXP (offset, 0)))
2465 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2466 offset = XEXP (offset, 0);
2468 else if (CONST_INT_P (XEXP (offset, 1)))
2470 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2471 offset = XEXP (offset, 1);
2473 else
2475 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2476 offset = const0_rtx;
2479 else if (!CONST_INT_P (offset))
2481 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2482 offset = const0_rtx;
2485 if (all_const && GET_CODE (base) == PLUS)
2486 base = gen_rtx_CONST (GET_MODE (base), base);
2488 gcc_assert (CONST_INT_P (offset));
2490 val.start = INTVAL (offset);
2491 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2492 val.base = base;
2494 break;
2496 case REG:
2497 val.reg_flag = 1;
2498 val.start = true_regnum (x);
2499 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2501 /* A pseudo with no hard reg. */
2502 val.start = REGNO (x);
2503 val.end = val.start + 1;
2505 else
2506 /* A hard reg. */
2507 val.end = end_hard_regno (GET_MODE (x), val.start);
2508 break;
2510 case SUBREG:
2511 if (!REG_P (SUBREG_REG (x)))
2512 /* This could be more precise, but it's good enough. */
2513 return decompose (SUBREG_REG (x));
2514 val.reg_flag = 1;
2515 val.start = true_regnum (x);
2516 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2517 return decompose (SUBREG_REG (x));
2518 else
2519 /* A hard reg. */
2520 val.end = val.start + subreg_nregs (x);
2521 break;
2523 case SCRATCH:
2524 /* This hasn't been assigned yet, so it can't conflict yet. */
2525 val.safe = 1;
2526 break;
2528 default:
2529 gcc_assert (CONSTANT_P (x));
2530 val.safe = 1;
2531 break;
2533 return val;
2536 /* Return 1 if altering Y will not modify the value of X.
2537 Y is also described by YDATA, which should be decompose (Y). */
2539 static int
2540 immune_p (rtx x, rtx y, struct decomposition ydata)
2542 struct decomposition xdata;
2544 if (ydata.reg_flag)
2545 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2546 if (ydata.safe)
2547 return 1;
2549 gcc_assert (MEM_P (y));
2550 /* If Y is memory and X is not, Y can't affect X. */
2551 if (!MEM_P (x))
2552 return 1;
2554 xdata = decompose (x);
2556 if (! rtx_equal_p (xdata.base, ydata.base))
2558 /* If bases are distinct symbolic constants, there is no overlap. */
2559 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2560 return 1;
2561 /* Constants and stack slots never overlap. */
2562 if (CONSTANT_P (xdata.base)
2563 && (ydata.base == frame_pointer_rtx
2564 || ydata.base == hard_frame_pointer_rtx
2565 || ydata.base == stack_pointer_rtx))
2566 return 1;
2567 if (CONSTANT_P (ydata.base)
2568 && (xdata.base == frame_pointer_rtx
2569 || xdata.base == hard_frame_pointer_rtx
2570 || xdata.base == stack_pointer_rtx))
2571 return 1;
2572 /* If either base is variable, we don't know anything. */
2573 return 0;
2576 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2579 /* Similar, but calls decompose. */
2582 safe_from_earlyclobber (rtx op, rtx clobber)
2584 struct decomposition early_data;
2586 early_data = decompose (clobber);
2587 return immune_p (op, clobber, early_data);
2590 /* Main entry point of this file: search the body of INSN
2591 for values that need reloading and record them with push_reload.
2592 REPLACE nonzero means record also where the values occur
2593 so that subst_reloads can be used.
2595 IND_LEVELS says how many levels of indirection are supported by this
2596 machine; a value of zero means that a memory reference is not a valid
2597 memory address.
2599 LIVE_KNOWN says we have valid information about which hard
2600 regs are live at each point in the program; this is true when
2601 we are called from global_alloc but false when stupid register
2602 allocation has been done.
2604 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2605 which is nonnegative if the reg has been commandeered for reloading into.
2606 It is copied into STATIC_RELOAD_REG_P and referenced from there
2607 by various subroutines.
2609 Return TRUE if some operands need to be changed, because of swapping
2610 commutative operands, reg_equiv_address substitution, or whatever. */
2613 find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2614 short *reload_reg_p)
2616 int insn_code_number;
2617 int i, j;
2618 int noperands;
2619 /* These start out as the constraints for the insn
2620 and they are chewed up as we consider alternatives. */
2621 const char *constraints[MAX_RECOG_OPERANDS];
2622 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2623 a register. */
2624 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2625 char pref_or_nothing[MAX_RECOG_OPERANDS];
2626 /* Nonzero for a MEM operand whose entire address needs a reload.
2627 May be -1 to indicate the entire address may or may not need a reload. */
2628 int address_reloaded[MAX_RECOG_OPERANDS];
2629 /* Nonzero for an address operand that needs to be completely reloaded.
2630 May be -1 to indicate the entire operand may or may not need a reload. */
2631 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2632 /* Value of enum reload_type to use for operand. */
2633 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2634 /* Value of enum reload_type to use within address of operand. */
2635 enum reload_type address_type[MAX_RECOG_OPERANDS];
2636 /* Save the usage of each operand. */
2637 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2638 int no_input_reloads = 0, no_output_reloads = 0;
2639 int n_alternatives;
2640 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2641 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2642 char this_alternative_win[MAX_RECOG_OPERANDS];
2643 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2644 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2645 int this_alternative_matches[MAX_RECOG_OPERANDS];
2646 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2647 int this_alternative_number;
2648 int goal_alternative_number = 0;
2649 int operand_reloadnum[MAX_RECOG_OPERANDS];
2650 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2651 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2652 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2653 char goal_alternative_win[MAX_RECOG_OPERANDS];
2654 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2655 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2656 int goal_alternative_swapped;
2657 int best;
2658 int commutative;
2659 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2660 rtx substed_operand[MAX_RECOG_OPERANDS];
2661 rtx body = PATTERN (insn);
2662 rtx set = single_set (insn);
2663 int goal_earlyclobber = 0, this_earlyclobber;
2664 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
2665 int retval = 0;
2667 this_insn = insn;
2668 n_reloads = 0;
2669 n_replacements = 0;
2670 n_earlyclobbers = 0;
2671 replace_reloads = replace;
2672 hard_regs_live_known = live_known;
2673 static_reload_reg_p = reload_reg_p;
2675 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2676 neither are insns that SET cc0. Insns that use CC0 are not allowed
2677 to have any input reloads. */
2678 if (JUMP_P (insn) || CALL_P (insn))
2679 no_output_reloads = 1;
2681 #ifdef HAVE_cc0
2682 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2683 no_input_reloads = 1;
2684 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2685 no_output_reloads = 1;
2686 #endif
2688 #ifdef SECONDARY_MEMORY_NEEDED
2689 /* The eliminated forms of any secondary memory locations are per-insn, so
2690 clear them out here. */
2692 if (secondary_memlocs_elim_used)
2694 memset (secondary_memlocs_elim, 0,
2695 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2696 secondary_memlocs_elim_used = 0;
2698 #endif
2700 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2701 is cheap to move between them. If it is not, there may not be an insn
2702 to do the copy, so we may need a reload. */
2703 if (GET_CODE (body) == SET
2704 && REG_P (SET_DEST (body))
2705 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2706 && REG_P (SET_SRC (body))
2707 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2708 && register_move_cost (GET_MODE (SET_SRC (body)),
2709 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2710 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2711 return 0;
2713 extract_insn (insn);
2715 noperands = reload_n_operands = recog_data.n_operands;
2716 n_alternatives = recog_data.n_alternatives;
2718 /* Just return "no reloads" if insn has no operands with constraints. */
2719 if (noperands == 0 || n_alternatives == 0)
2720 return 0;
2722 insn_code_number = INSN_CODE (insn);
2723 this_insn_is_asm = insn_code_number < 0;
2725 memcpy (operand_mode, recog_data.operand_mode,
2726 noperands * sizeof (enum machine_mode));
2727 memcpy (constraints, recog_data.constraints,
2728 noperands * sizeof (const char *));
2730 commutative = -1;
2732 /* If we will need to know, later, whether some pair of operands
2733 are the same, we must compare them now and save the result.
2734 Reloading the base and index registers will clobber them
2735 and afterward they will fail to match. */
2737 for (i = 0; i < noperands; i++)
2739 const char *p;
2740 int c;
2741 char *end;
2743 substed_operand[i] = recog_data.operand[i];
2744 p = constraints[i];
2746 modified[i] = RELOAD_READ;
2748 /* Scan this operand's constraint to see if it is an output operand,
2749 an in-out operand, is commutative, or should match another. */
2751 while ((c = *p))
2753 p += CONSTRAINT_LEN (c, p);
2754 switch (c)
2756 case '=':
2757 modified[i] = RELOAD_WRITE;
2758 break;
2759 case '+':
2760 modified[i] = RELOAD_READ_WRITE;
2761 break;
2762 case '%':
2764 /* The last operand should not be marked commutative. */
2765 gcc_assert (i != noperands - 1);
2767 /* We currently only support one commutative pair of
2768 operands. Some existing asm code currently uses more
2769 than one pair. Previously, that would usually work,
2770 but sometimes it would crash the compiler. We
2771 continue supporting that case as well as we can by
2772 silently ignoring all but the first pair. In the
2773 future we may handle it correctly. */
2774 if (commutative < 0)
2775 commutative = i;
2776 else
2777 gcc_assert (this_insn_is_asm);
2779 break;
2780 /* Use of ISDIGIT is tempting here, but it may get expensive because
2781 of locale support we don't want. */
2782 case '0': case '1': case '2': case '3': case '4':
2783 case '5': case '6': case '7': case '8': case '9':
2785 c = strtoul (p - 1, &end, 10);
2786 p = end;
2788 operands_match[c][i]
2789 = operands_match_p (recog_data.operand[c],
2790 recog_data.operand[i]);
2792 /* An operand may not match itself. */
2793 gcc_assert (c != i);
2795 /* If C can be commuted with C+1, and C might need to match I,
2796 then C+1 might also need to match I. */
2797 if (commutative >= 0)
2799 if (c == commutative || c == commutative + 1)
2801 int other = c + (c == commutative ? 1 : -1);
2802 operands_match[other][i]
2803 = operands_match_p (recog_data.operand[other],
2804 recog_data.operand[i]);
2806 if (i == commutative || i == commutative + 1)
2808 int other = i + (i == commutative ? 1 : -1);
2809 operands_match[c][other]
2810 = operands_match_p (recog_data.operand[c],
2811 recog_data.operand[other]);
2813 /* Note that C is supposed to be less than I.
2814 No need to consider altering both C and I because in
2815 that case we would alter one into the other. */
2822 /* Examine each operand that is a memory reference or memory address
2823 and reload parts of the addresses into index registers.
2824 Also here any references to pseudo regs that didn't get hard regs
2825 but are equivalent to constants get replaced in the insn itself
2826 with those constants. Nobody will ever see them again.
2828 Finally, set up the preferred classes of each operand. */
2830 for (i = 0; i < noperands; i++)
2832 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2834 address_reloaded[i] = 0;
2835 address_operand_reloaded[i] = 0;
2836 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2837 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2838 : RELOAD_OTHER);
2839 address_type[i]
2840 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2841 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2842 : RELOAD_OTHER);
2844 if (*constraints[i] == 0)
2845 /* Ignore things like match_operator operands. */
2847 else if (insn_extra_address_constraint
2848 (lookup_constraint (constraints[i])))
2850 address_operand_reloaded[i]
2851 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2852 recog_data.operand[i],
2853 recog_data.operand_loc[i],
2854 i, operand_type[i], ind_levels, insn);
2856 /* If we now have a simple operand where we used to have a
2857 PLUS or MULT, re-recognize and try again. */
2858 if ((OBJECT_P (*recog_data.operand_loc[i])
2859 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2860 && (GET_CODE (recog_data.operand[i]) == MULT
2861 || GET_CODE (recog_data.operand[i]) == PLUS))
2863 INSN_CODE (insn) = -1;
2864 retval = find_reloads (insn, replace, ind_levels, live_known,
2865 reload_reg_p);
2866 return retval;
2869 recog_data.operand[i] = *recog_data.operand_loc[i];
2870 substed_operand[i] = recog_data.operand[i];
2872 /* Address operands are reloaded in their existing mode,
2873 no matter what is specified in the machine description. */
2874 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2876 /* If the address is a single CONST_INT pick address mode
2877 instead otherwise we will later not know in which mode
2878 the reload should be performed. */
2879 if (operand_mode[i] == VOIDmode)
2880 operand_mode[i] = Pmode;
2883 else if (code == MEM)
2885 address_reloaded[i]
2886 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2887 recog_data.operand_loc[i],
2888 XEXP (recog_data.operand[i], 0),
2889 &XEXP (recog_data.operand[i], 0),
2890 i, address_type[i], ind_levels, insn);
2891 recog_data.operand[i] = *recog_data.operand_loc[i];
2892 substed_operand[i] = recog_data.operand[i];
2894 else if (code == SUBREG)
2896 rtx reg = SUBREG_REG (recog_data.operand[i]);
2897 rtx op
2898 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2899 ind_levels,
2900 set != 0
2901 && &SET_DEST (set) == recog_data.operand_loc[i],
2902 insn,
2903 &address_reloaded[i]);
2905 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2906 that didn't get a hard register, emit a USE with a REG_EQUAL
2907 note in front so that we might inherit a previous, possibly
2908 wider reload. */
2910 if (replace
2911 && MEM_P (op)
2912 && REG_P (reg)
2913 && (GET_MODE_SIZE (GET_MODE (reg))
2914 >= GET_MODE_SIZE (GET_MODE (op)))
2915 && reg_equiv_constant (REGNO (reg)) == 0)
2916 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2917 insn),
2918 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2920 substed_operand[i] = recog_data.operand[i] = op;
2922 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2923 /* We can get a PLUS as an "operand" as a result of register
2924 elimination. See eliminate_regs and gen_reload. We handle
2925 a unary operator by reloading the operand. */
2926 substed_operand[i] = recog_data.operand[i]
2927 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2928 ind_levels, 0, insn,
2929 &address_reloaded[i]);
2930 else if (code == REG)
2932 /* This is equivalent to calling find_reloads_toplev.
2933 The code is duplicated for speed.
2934 When we find a pseudo always equivalent to a constant,
2935 we replace it by the constant. We must be sure, however,
2936 that we don't try to replace it in the insn in which it
2937 is being set. */
2938 int regno = REGNO (recog_data.operand[i]);
2939 if (reg_equiv_constant (regno) != 0
2940 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2942 /* Record the existing mode so that the check if constants are
2943 allowed will work when operand_mode isn't specified. */
2945 if (operand_mode[i] == VOIDmode)
2946 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2948 substed_operand[i] = recog_data.operand[i]
2949 = reg_equiv_constant (regno);
2951 if (reg_equiv_memory_loc (regno) != 0
2952 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2953 /* We need not give a valid is_set_dest argument since the case
2954 of a constant equivalence was checked above. */
2955 substed_operand[i] = recog_data.operand[i]
2956 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2957 ind_levels, 0, insn,
2958 &address_reloaded[i]);
2960 /* If the operand is still a register (we didn't replace it with an
2961 equivalent), get the preferred class to reload it into. */
2962 code = GET_CODE (recog_data.operand[i]);
2963 preferred_class[i]
2964 = ((code == REG && REGNO (recog_data.operand[i])
2965 >= FIRST_PSEUDO_REGISTER)
2966 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2967 : NO_REGS);
2968 pref_or_nothing[i]
2969 = (code == REG
2970 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2971 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2974 /* If this is simply a copy from operand 1 to operand 0, merge the
2975 preferred classes for the operands. */
2976 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2977 && recog_data.operand[1] == SET_SRC (set))
2979 preferred_class[0] = preferred_class[1]
2980 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2981 pref_or_nothing[0] |= pref_or_nothing[1];
2982 pref_or_nothing[1] |= pref_or_nothing[0];
2985 /* Now see what we need for pseudo-regs that didn't get hard regs
2986 or got the wrong kind of hard reg. For this, we must consider
2987 all the operands together against the register constraints. */
2989 best = MAX_RECOG_OPERANDS * 2 + 600;
2991 goal_alternative_swapped = 0;
2993 /* The constraints are made of several alternatives.
2994 Each operand's constraint looks like foo,bar,... with commas
2995 separating the alternatives. The first alternatives for all
2996 operands go together, the second alternatives go together, etc.
2998 First loop over alternatives. */
3000 for (this_alternative_number = 0;
3001 this_alternative_number < n_alternatives;
3002 this_alternative_number++)
3004 int swapped;
3006 if (!TEST_BIT (recog_data.enabled_alternatives, this_alternative_number))
3008 int i;
3010 for (i = 0; i < recog_data.n_operands; i++)
3011 constraints[i] = skip_alternative (constraints[i]);
3013 continue;
3016 /* If insn is commutative (it's safe to exchange a certain pair
3017 of operands) then we need to try each alternative twice, the
3018 second time matching those two operands as if we had
3019 exchanged them. To do this, really exchange them in
3020 operands. */
3021 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3023 /* Loop over operands for one constraint alternative. */
3024 /* LOSERS counts those that don't fit this alternative
3025 and would require loading. */
3026 int losers = 0;
3027 /* BAD is set to 1 if it some operand can't fit this alternative
3028 even after reloading. */
3029 int bad = 0;
3030 /* REJECT is a count of how undesirable this alternative says it is
3031 if any reloading is required. If the alternative matches exactly
3032 then REJECT is ignored, but otherwise it gets this much
3033 counted against it in addition to the reloading needed. Each
3034 ? counts three times here since we want the disparaging caused by
3035 a bad register class to only count 1/3 as much. */
3036 int reject = 0;
3038 if (swapped)
3040 enum reg_class tclass;
3041 int t;
3043 recog_data.operand[commutative] = substed_operand[commutative + 1];
3044 recog_data.operand[commutative + 1] = substed_operand[commutative];
3045 /* Swap the duplicates too. */
3046 for (i = 0; i < recog_data.n_dups; i++)
3047 if (recog_data.dup_num[i] == commutative
3048 || recog_data.dup_num[i] == commutative + 1)
3049 *recog_data.dup_loc[i]
3050 = recog_data.operand[(int) recog_data.dup_num[i]];
3052 tclass = preferred_class[commutative];
3053 preferred_class[commutative] = preferred_class[commutative + 1];
3054 preferred_class[commutative + 1] = tclass;
3056 t = pref_or_nothing[commutative];
3057 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3058 pref_or_nothing[commutative + 1] = t;
3060 t = address_reloaded[commutative];
3061 address_reloaded[commutative] = address_reloaded[commutative + 1];
3062 address_reloaded[commutative + 1] = t;
3065 this_earlyclobber = 0;
3067 for (i = 0; i < noperands; i++)
3069 const char *p = constraints[i];
3070 char *end;
3071 int len;
3072 int win = 0;
3073 int did_match = 0;
3074 /* 0 => this operand can be reloaded somehow for this alternative. */
3075 int badop = 1;
3076 /* 0 => this operand can be reloaded if the alternative allows regs. */
3077 int winreg = 0;
3078 int c;
3079 int m;
3080 rtx operand = recog_data.operand[i];
3081 int offset = 0;
3082 /* Nonzero means this is a MEM that must be reloaded into a reg
3083 regardless of what the constraint says. */
3084 int force_reload = 0;
3085 int offmemok = 0;
3086 /* Nonzero if a constant forced into memory would be OK for this
3087 operand. */
3088 int constmemok = 0;
3089 int earlyclobber = 0;
3090 enum constraint_num cn;
3091 enum reg_class cl;
3093 /* If the predicate accepts a unary operator, it means that
3094 we need to reload the operand, but do not do this for
3095 match_operator and friends. */
3096 if (UNARY_P (operand) && *p != 0)
3097 operand = XEXP (operand, 0);
3099 /* If the operand is a SUBREG, extract
3100 the REG or MEM (or maybe even a constant) within.
3101 (Constants can occur as a result of reg_equiv_constant.) */
3103 while (GET_CODE (operand) == SUBREG)
3105 /* Offset only matters when operand is a REG and
3106 it is a hard reg. This is because it is passed
3107 to reg_fits_class_p if it is a REG and all pseudos
3108 return 0 from that function. */
3109 if (REG_P (SUBREG_REG (operand))
3110 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3112 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3113 GET_MODE (SUBREG_REG (operand)),
3114 SUBREG_BYTE (operand),
3115 GET_MODE (operand)) < 0)
3116 force_reload = 1;
3117 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3118 GET_MODE (SUBREG_REG (operand)),
3119 SUBREG_BYTE (operand),
3120 GET_MODE (operand));
3122 operand = SUBREG_REG (operand);
3123 /* Force reload if this is a constant or PLUS or if there may
3124 be a problem accessing OPERAND in the outer mode. */
3125 if (CONSTANT_P (operand)
3126 || GET_CODE (operand) == PLUS
3127 /* We must force a reload of paradoxical SUBREGs
3128 of a MEM because the alignment of the inner value
3129 may not be enough to do the outer reference. On
3130 big-endian machines, it may also reference outside
3131 the object.
3133 On machines that extend byte operations and we have a
3134 SUBREG where both the inner and outer modes are no wider
3135 than a word and the inner mode is narrower, is integral,
3136 and gets extended when loaded from memory, combine.c has
3137 made assumptions about the behavior of the machine in such
3138 register access. If the data is, in fact, in memory we
3139 must always load using the size assumed to be in the
3140 register and let the insn do the different-sized
3141 accesses.
3143 This is doubly true if WORD_REGISTER_OPERATIONS. In
3144 this case eliminate_regs has left non-paradoxical
3145 subregs for push_reload to see. Make sure it does
3146 by forcing the reload.
3148 ??? When is it right at this stage to have a subreg
3149 of a mem that is _not_ to be handled specially? IMO
3150 those should have been reduced to just a mem. */
3151 || ((MEM_P (operand)
3152 || (REG_P (operand)
3153 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3154 #ifndef WORD_REGISTER_OPERATIONS
3155 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3156 < BIGGEST_ALIGNMENT)
3157 && (GET_MODE_SIZE (operand_mode[i])
3158 > GET_MODE_SIZE (GET_MODE (operand))))
3159 || BYTES_BIG_ENDIAN
3160 #ifdef LOAD_EXTEND_OP
3161 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3162 && (GET_MODE_SIZE (GET_MODE (operand))
3163 <= UNITS_PER_WORD)
3164 && (GET_MODE_SIZE (operand_mode[i])
3165 > GET_MODE_SIZE (GET_MODE (operand)))
3166 && INTEGRAL_MODE_P (GET_MODE (operand))
3167 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3168 #endif
3170 #endif
3173 force_reload = 1;
3176 this_alternative[i] = NO_REGS;
3177 this_alternative_win[i] = 0;
3178 this_alternative_match_win[i] = 0;
3179 this_alternative_offmemok[i] = 0;
3180 this_alternative_earlyclobber[i] = 0;
3181 this_alternative_matches[i] = -1;
3183 /* An empty constraint or empty alternative
3184 allows anything which matched the pattern. */
3185 if (*p == 0 || *p == ',')
3186 win = 1, badop = 0;
3188 /* Scan this alternative's specs for this operand;
3189 set WIN if the operand fits any letter in this alternative.
3190 Otherwise, clear BADOP if this operand could
3191 fit some letter after reloads,
3192 or set WINREG if this operand could fit after reloads
3193 provided the constraint allows some registers. */
3196 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3198 case '\0':
3199 len = 0;
3200 break;
3201 case ',':
3202 c = '\0';
3203 break;
3205 case '?':
3206 reject += 6;
3207 break;
3209 case '!':
3210 reject = 600;
3211 break;
3213 case '#':
3214 /* Ignore rest of this alternative as far as
3215 reloading is concerned. */
3217 p++;
3218 while (*p && *p != ',');
3219 len = 0;
3220 break;
3222 case '0': case '1': case '2': case '3': case '4':
3223 case '5': case '6': case '7': case '8': case '9':
3224 m = strtoul (p, &end, 10);
3225 p = end;
3226 len = 0;
3228 this_alternative_matches[i] = m;
3229 /* We are supposed to match a previous operand.
3230 If we do, we win if that one did.
3231 If we do not, count both of the operands as losers.
3232 (This is too conservative, since most of the time
3233 only a single reload insn will be needed to make
3234 the two operands win. As a result, this alternative
3235 may be rejected when it is actually desirable.) */
3236 if ((swapped && (m != commutative || i != commutative + 1))
3237 /* If we are matching as if two operands were swapped,
3238 also pretend that operands_match had been computed
3239 with swapped.
3240 But if I is the second of those and C is the first,
3241 don't exchange them, because operands_match is valid
3242 only on one side of its diagonal. */
3243 ? (operands_match
3244 [(m == commutative || m == commutative + 1)
3245 ? 2 * commutative + 1 - m : m]
3246 [(i == commutative || i == commutative + 1)
3247 ? 2 * commutative + 1 - i : i])
3248 : operands_match[m][i])
3250 /* If we are matching a non-offsettable address where an
3251 offsettable address was expected, then we must reject
3252 this combination, because we can't reload it. */
3253 if (this_alternative_offmemok[m]
3254 && MEM_P (recog_data.operand[m])
3255 && this_alternative[m] == NO_REGS
3256 && ! this_alternative_win[m])
3257 bad = 1;
3259 did_match = this_alternative_win[m];
3261 else
3263 /* Operands don't match. */
3264 rtx value;
3265 int loc1, loc2;
3266 /* Retroactively mark the operand we had to match
3267 as a loser, if it wasn't already. */
3268 if (this_alternative_win[m])
3269 losers++;
3270 this_alternative_win[m] = 0;
3271 if (this_alternative[m] == NO_REGS)
3272 bad = 1;
3273 /* But count the pair only once in the total badness of
3274 this alternative, if the pair can be a dummy reload.
3275 The pointers in operand_loc are not swapped; swap
3276 them by hand if necessary. */
3277 if (swapped && i == commutative)
3278 loc1 = commutative + 1;
3279 else if (swapped && i == commutative + 1)
3280 loc1 = commutative;
3281 else
3282 loc1 = i;
3283 if (swapped && m == commutative)
3284 loc2 = commutative + 1;
3285 else if (swapped && m == commutative + 1)
3286 loc2 = commutative;
3287 else
3288 loc2 = m;
3289 value
3290 = find_dummy_reload (recog_data.operand[i],
3291 recog_data.operand[m],
3292 recog_data.operand_loc[loc1],
3293 recog_data.operand_loc[loc2],
3294 operand_mode[i], operand_mode[m],
3295 this_alternative[m], -1,
3296 this_alternative_earlyclobber[m]);
3298 if (value != 0)
3299 losers--;
3301 /* This can be fixed with reloads if the operand
3302 we are supposed to match can be fixed with reloads. */
3303 badop = 0;
3304 this_alternative[i] = this_alternative[m];
3306 /* If we have to reload this operand and some previous
3307 operand also had to match the same thing as this
3308 operand, we don't know how to do that. So reject this
3309 alternative. */
3310 if (! did_match || force_reload)
3311 for (j = 0; j < i; j++)
3312 if (this_alternative_matches[j]
3313 == this_alternative_matches[i])
3315 badop = 1;
3316 break;
3318 break;
3320 case 'p':
3321 /* All necessary reloads for an address_operand
3322 were handled in find_reloads_address. */
3323 this_alternative[i]
3324 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3325 ADDRESS, SCRATCH);
3326 win = 1;
3327 badop = 0;
3328 break;
3330 case TARGET_MEM_CONSTRAINT:
3331 if (force_reload)
3332 break;
3333 if (MEM_P (operand)
3334 || (REG_P (operand)
3335 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3336 && reg_renumber[REGNO (operand)] < 0))
3337 win = 1;
3338 if (CONST_POOL_OK_P (operand_mode[i], operand))
3339 badop = 0;
3340 constmemok = 1;
3341 break;
3343 case '<':
3344 if (MEM_P (operand)
3345 && ! address_reloaded[i]
3346 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3347 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3348 win = 1;
3349 break;
3351 case '>':
3352 if (MEM_P (operand)
3353 && ! address_reloaded[i]
3354 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3355 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3356 win = 1;
3357 break;
3359 /* Memory operand whose address is not offsettable. */
3360 case 'V':
3361 if (force_reload)
3362 break;
3363 if (MEM_P (operand)
3364 && ! (ind_levels ? offsettable_memref_p (operand)
3365 : offsettable_nonstrict_memref_p (operand))
3366 /* Certain mem addresses will become offsettable
3367 after they themselves are reloaded. This is important;
3368 we don't want our own handling of unoffsettables
3369 to override the handling of reg_equiv_address. */
3370 && !(REG_P (XEXP (operand, 0))
3371 && (ind_levels == 0
3372 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3373 win = 1;
3374 break;
3376 /* Memory operand whose address is offsettable. */
3377 case 'o':
3378 if (force_reload)
3379 break;
3380 if ((MEM_P (operand)
3381 /* If IND_LEVELS, find_reloads_address won't reload a
3382 pseudo that didn't get a hard reg, so we have to
3383 reject that case. */
3384 && ((ind_levels ? offsettable_memref_p (operand)
3385 : offsettable_nonstrict_memref_p (operand))
3386 /* A reloaded address is offsettable because it is now
3387 just a simple register indirect. */
3388 || address_reloaded[i] == 1))
3389 || (REG_P (operand)
3390 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3391 && reg_renumber[REGNO (operand)] < 0
3392 /* If reg_equiv_address is nonzero, we will be
3393 loading it into a register; hence it will be
3394 offsettable, but we cannot say that reg_equiv_mem
3395 is offsettable without checking. */
3396 && ((reg_equiv_mem (REGNO (operand)) != 0
3397 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3398 || (reg_equiv_address (REGNO (operand)) != 0))))
3399 win = 1;
3400 if (CONST_POOL_OK_P (operand_mode[i], operand)
3401 || MEM_P (operand))
3402 badop = 0;
3403 constmemok = 1;
3404 offmemok = 1;
3405 break;
3407 case '&':
3408 /* Output operand that is stored before the need for the
3409 input operands (and their index registers) is over. */
3410 earlyclobber = 1, this_earlyclobber = 1;
3411 break;
3413 case 'X':
3414 force_reload = 0;
3415 win = 1;
3416 break;
3418 case 'g':
3419 if (! force_reload
3420 /* A PLUS is never a valid operand, but reload can make
3421 it from a register when eliminating registers. */
3422 && GET_CODE (operand) != PLUS
3423 /* A SCRATCH is not a valid operand. */
3424 && GET_CODE (operand) != SCRATCH
3425 && (! CONSTANT_P (operand)
3426 || ! flag_pic
3427 || LEGITIMATE_PIC_OPERAND_P (operand))
3428 && (GENERAL_REGS == ALL_REGS
3429 || !REG_P (operand)
3430 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3431 && reg_renumber[REGNO (operand)] < 0)))
3432 win = 1;
3433 cl = GENERAL_REGS;
3434 goto reg;
3436 default:
3437 cn = lookup_constraint (p);
3438 switch (get_constraint_type (cn))
3440 case CT_REGISTER:
3441 cl = reg_class_for_constraint (cn);
3442 if (cl != NO_REGS)
3443 goto reg;
3444 break;
3446 case CT_CONST_INT:
3447 if (CONST_INT_P (operand)
3448 && (insn_const_int_ok_for_constraint
3449 (INTVAL (operand), cn)))
3450 win = true;
3451 break;
3453 case CT_MEMORY:
3454 if (force_reload)
3455 break;
3456 if (constraint_satisfied_p (operand, cn))
3457 win = 1;
3458 /* If the address was already reloaded,
3459 we win as well. */
3460 else if (MEM_P (operand) && address_reloaded[i] == 1)
3461 win = 1;
3462 /* Likewise if the address will be reloaded because
3463 reg_equiv_address is nonzero. For reg_equiv_mem
3464 we have to check. */
3465 else if (REG_P (operand)
3466 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3467 && reg_renumber[REGNO (operand)] < 0
3468 && ((reg_equiv_mem (REGNO (operand)) != 0
3469 && (constraint_satisfied_p
3470 (reg_equiv_mem (REGNO (operand)),
3471 cn)))
3472 || (reg_equiv_address (REGNO (operand))
3473 != 0)))
3474 win = 1;
3476 /* If we didn't already win, we can reload
3477 constants via force_const_mem, and other
3478 MEMs by reloading the address like for 'o'. */
3479 if (CONST_POOL_OK_P (operand_mode[i], operand)
3480 || MEM_P (operand))
3481 badop = 0;
3482 constmemok = 1;
3483 offmemok = 1;
3484 break;
3486 case CT_ADDRESS:
3487 if (constraint_satisfied_p (operand, cn))
3488 win = 1;
3490 /* If we didn't already win, we can reload
3491 the address into a base register. */
3492 this_alternative[i]
3493 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3494 ADDRESS, SCRATCH);
3495 badop = 0;
3496 break;
3498 case CT_FIXED_FORM:
3499 if (constraint_satisfied_p (operand, cn))
3500 win = 1;
3501 break;
3503 break;
3505 reg:
3506 this_alternative[i]
3507 = reg_class_subunion[this_alternative[i]][cl];
3508 if (GET_MODE (operand) == BLKmode)
3509 break;
3510 winreg = 1;
3511 if (REG_P (operand)
3512 && reg_fits_class_p (operand, this_alternative[i],
3513 offset, GET_MODE (recog_data.operand[i])))
3514 win = 1;
3515 break;
3517 while ((p += len), c);
3519 if (swapped == (commutative >= 0 ? 1 : 0))
3520 constraints[i] = p;
3522 /* If this operand could be handled with a reg,
3523 and some reg is allowed, then this operand can be handled. */
3524 if (winreg && this_alternative[i] != NO_REGS
3525 && (win || !class_only_fixed_regs[this_alternative[i]]))
3526 badop = 0;
3528 /* Record which operands fit this alternative. */
3529 this_alternative_earlyclobber[i] = earlyclobber;
3530 if (win && ! force_reload)
3531 this_alternative_win[i] = 1;
3532 else if (did_match && ! force_reload)
3533 this_alternative_match_win[i] = 1;
3534 else
3536 int const_to_mem = 0;
3538 this_alternative_offmemok[i] = offmemok;
3539 losers++;
3540 if (badop)
3541 bad = 1;
3542 /* Alternative loses if it has no regs for a reg operand. */
3543 if (REG_P (operand)
3544 && this_alternative[i] == NO_REGS
3545 && this_alternative_matches[i] < 0)
3546 bad = 1;
3548 /* If this is a constant that is reloaded into the desired
3549 class by copying it to memory first, count that as another
3550 reload. This is consistent with other code and is
3551 required to avoid choosing another alternative when
3552 the constant is moved into memory by this function on
3553 an early reload pass. Note that the test here is
3554 precisely the same as in the code below that calls
3555 force_const_mem. */
3556 if (CONST_POOL_OK_P (operand_mode[i], operand)
3557 && ((targetm.preferred_reload_class (operand,
3558 this_alternative[i])
3559 == NO_REGS)
3560 || no_input_reloads))
3562 const_to_mem = 1;
3563 if (this_alternative[i] != NO_REGS)
3564 losers++;
3567 /* Alternative loses if it requires a type of reload not
3568 permitted for this insn. We can always reload SCRATCH
3569 and objects with a REG_UNUSED note. */
3570 if (GET_CODE (operand) != SCRATCH
3571 && modified[i] != RELOAD_READ && no_output_reloads
3572 && ! find_reg_note (insn, REG_UNUSED, operand))
3573 bad = 1;
3574 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3575 && ! const_to_mem)
3576 bad = 1;
3578 /* If we can't reload this value at all, reject this
3579 alternative. Note that we could also lose due to
3580 LIMIT_RELOAD_CLASS, but we don't check that
3581 here. */
3583 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3585 if (targetm.preferred_reload_class (operand,
3586 this_alternative[i])
3587 == NO_REGS)
3588 reject = 600;
3590 if (operand_type[i] == RELOAD_FOR_OUTPUT
3591 && (targetm.preferred_output_reload_class (operand,
3592 this_alternative[i])
3593 == NO_REGS))
3594 reject = 600;
3597 /* We prefer to reload pseudos over reloading other things,
3598 since such reloads may be able to be eliminated later.
3599 If we are reloading a SCRATCH, we won't be generating any
3600 insns, just using a register, so it is also preferred.
3601 So bump REJECT in other cases. Don't do this in the
3602 case where we are forcing a constant into memory and
3603 it will then win since we don't want to have a different
3604 alternative match then. */
3605 if (! (REG_P (operand)
3606 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3607 && GET_CODE (operand) != SCRATCH
3608 && ! (const_to_mem && constmemok))
3609 reject += 2;
3611 /* Input reloads can be inherited more often than output
3612 reloads can be removed, so penalize output reloads. */
3613 if (operand_type[i] != RELOAD_FOR_INPUT
3614 && GET_CODE (operand) != SCRATCH)
3615 reject++;
3618 /* If this operand is a pseudo register that didn't get
3619 a hard reg and this alternative accepts some
3620 register, see if the class that we want is a subset
3621 of the preferred class for this register. If not,
3622 but it intersects that class, use the preferred class
3623 instead. If it does not intersect the preferred
3624 class, show that usage of this alternative should be
3625 discouraged; it will be discouraged more still if the
3626 register is `preferred or nothing'. We do this
3627 because it increases the chance of reusing our spill
3628 register in a later insn and avoiding a pair of
3629 memory stores and loads.
3631 Don't bother with this if this alternative will
3632 accept this operand.
3634 Don't do this for a multiword operand, since it is
3635 only a small win and has the risk of requiring more
3636 spill registers, which could cause a large loss.
3638 Don't do this if the preferred class has only one
3639 register because we might otherwise exhaust the
3640 class. */
3642 if (! win && ! did_match
3643 && this_alternative[i] != NO_REGS
3644 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3645 && reg_class_size [(int) preferred_class[i]] > 0
3646 && ! small_register_class_p (preferred_class[i]))
3648 if (! reg_class_subset_p (this_alternative[i],
3649 preferred_class[i]))
3651 /* Since we don't have a way of forming the intersection,
3652 we just do something special if the preferred class
3653 is a subset of the class we have; that's the most
3654 common case anyway. */
3655 if (reg_class_subset_p (preferred_class[i],
3656 this_alternative[i]))
3657 this_alternative[i] = preferred_class[i];
3658 else
3659 reject += (2 + 2 * pref_or_nothing[i]);
3664 /* Now see if any output operands that are marked "earlyclobber"
3665 in this alternative conflict with any input operands
3666 or any memory addresses. */
3668 for (i = 0; i < noperands; i++)
3669 if (this_alternative_earlyclobber[i]
3670 && (this_alternative_win[i] || this_alternative_match_win[i]))
3672 struct decomposition early_data;
3674 early_data = decompose (recog_data.operand[i]);
3676 gcc_assert (modified[i] != RELOAD_READ);
3678 if (this_alternative[i] == NO_REGS)
3680 this_alternative_earlyclobber[i] = 0;
3681 gcc_assert (this_insn_is_asm);
3682 error_for_asm (this_insn,
3683 "%<&%> constraint used with no register class");
3686 for (j = 0; j < noperands; j++)
3687 /* Is this an input operand or a memory ref? */
3688 if ((MEM_P (recog_data.operand[j])
3689 || modified[j] != RELOAD_WRITE)
3690 && j != i
3691 /* Ignore things like match_operator operands. */
3692 && !recog_data.is_operator[j]
3693 /* Don't count an input operand that is constrained to match
3694 the early clobber operand. */
3695 && ! (this_alternative_matches[j] == i
3696 && rtx_equal_p (recog_data.operand[i],
3697 recog_data.operand[j]))
3698 /* Is it altered by storing the earlyclobber operand? */
3699 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3700 early_data))
3702 /* If the output is in a non-empty few-regs class,
3703 it's costly to reload it, so reload the input instead. */
3704 if (small_register_class_p (this_alternative[i])
3705 && (REG_P (recog_data.operand[j])
3706 || GET_CODE (recog_data.operand[j]) == SUBREG))
3708 losers++;
3709 this_alternative_win[j] = 0;
3710 this_alternative_match_win[j] = 0;
3712 else
3713 break;
3715 /* If an earlyclobber operand conflicts with something,
3716 it must be reloaded, so request this and count the cost. */
3717 if (j != noperands)
3719 losers++;
3720 this_alternative_win[i] = 0;
3721 this_alternative_match_win[j] = 0;
3722 for (j = 0; j < noperands; j++)
3723 if (this_alternative_matches[j] == i
3724 && this_alternative_match_win[j])
3726 this_alternative_win[j] = 0;
3727 this_alternative_match_win[j] = 0;
3728 losers++;
3733 /* If one alternative accepts all the operands, no reload required,
3734 choose that alternative; don't consider the remaining ones. */
3735 if (losers == 0)
3737 /* Unswap these so that they are never swapped at `finish'. */
3738 if (swapped)
3740 recog_data.operand[commutative] = substed_operand[commutative];
3741 recog_data.operand[commutative + 1]
3742 = substed_operand[commutative + 1];
3744 for (i = 0; i < noperands; i++)
3746 goal_alternative_win[i] = this_alternative_win[i];
3747 goal_alternative_match_win[i] = this_alternative_match_win[i];
3748 goal_alternative[i] = this_alternative[i];
3749 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3750 goal_alternative_matches[i] = this_alternative_matches[i];
3751 goal_alternative_earlyclobber[i]
3752 = this_alternative_earlyclobber[i];
3754 goal_alternative_number = this_alternative_number;
3755 goal_alternative_swapped = swapped;
3756 goal_earlyclobber = this_earlyclobber;
3757 goto finish;
3760 /* REJECT, set by the ! and ? constraint characters and when a register
3761 would be reloaded into a non-preferred class, discourages the use of
3762 this alternative for a reload goal. REJECT is incremented by six
3763 for each ? and two for each non-preferred class. */
3764 losers = losers * 6 + reject;
3766 /* If this alternative can be made to work by reloading,
3767 and it needs less reloading than the others checked so far,
3768 record it as the chosen goal for reloading. */
3769 if (! bad)
3771 if (best > losers)
3773 for (i = 0; i < noperands; i++)
3775 goal_alternative[i] = this_alternative[i];
3776 goal_alternative_win[i] = this_alternative_win[i];
3777 goal_alternative_match_win[i]
3778 = this_alternative_match_win[i];
3779 goal_alternative_offmemok[i]
3780 = this_alternative_offmemok[i];
3781 goal_alternative_matches[i] = this_alternative_matches[i];
3782 goal_alternative_earlyclobber[i]
3783 = this_alternative_earlyclobber[i];
3785 goal_alternative_swapped = swapped;
3786 best = losers;
3787 goal_alternative_number = this_alternative_number;
3788 goal_earlyclobber = this_earlyclobber;
3792 if (swapped)
3794 enum reg_class tclass;
3795 int t;
3797 /* If the commutative operands have been swapped, swap
3798 them back in order to check the next alternative. */
3799 recog_data.operand[commutative] = substed_operand[commutative];
3800 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3801 /* Unswap the duplicates too. */
3802 for (i = 0; i < recog_data.n_dups; i++)
3803 if (recog_data.dup_num[i] == commutative
3804 || recog_data.dup_num[i] == commutative + 1)
3805 *recog_data.dup_loc[i]
3806 = recog_data.operand[(int) recog_data.dup_num[i]];
3808 /* Unswap the operand related information as well. */
3809 tclass = preferred_class[commutative];
3810 preferred_class[commutative] = preferred_class[commutative + 1];
3811 preferred_class[commutative + 1] = tclass;
3813 t = pref_or_nothing[commutative];
3814 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3815 pref_or_nothing[commutative + 1] = t;
3817 t = address_reloaded[commutative];
3818 address_reloaded[commutative] = address_reloaded[commutative + 1];
3819 address_reloaded[commutative + 1] = t;
3824 /* The operands don't meet the constraints.
3825 goal_alternative describes the alternative
3826 that we could reach by reloading the fewest operands.
3827 Reload so as to fit it. */
3829 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3831 /* No alternative works with reloads?? */
3832 if (insn_code_number >= 0)
3833 fatal_insn ("unable to generate reloads for:", insn);
3834 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3835 /* Avoid further trouble with this insn. */
3836 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3837 n_reloads = 0;
3838 return 0;
3841 /* Jump to `finish' from above if all operands are valid already.
3842 In that case, goal_alternative_win is all 1. */
3843 finish:
3845 /* Right now, for any pair of operands I and J that are required to match,
3846 with I < J,
3847 goal_alternative_matches[J] is I.
3848 Set up goal_alternative_matched as the inverse function:
3849 goal_alternative_matched[I] = J. */
3851 for (i = 0; i < noperands; i++)
3852 goal_alternative_matched[i] = -1;
3854 for (i = 0; i < noperands; i++)
3855 if (! goal_alternative_win[i]
3856 && goal_alternative_matches[i] >= 0)
3857 goal_alternative_matched[goal_alternative_matches[i]] = i;
3859 for (i = 0; i < noperands; i++)
3860 goal_alternative_win[i] |= goal_alternative_match_win[i];
3862 /* If the best alternative is with operands 1 and 2 swapped,
3863 consider them swapped before reporting the reloads. Update the
3864 operand numbers of any reloads already pushed. */
3866 if (goal_alternative_swapped)
3868 rtx tem;
3870 tem = substed_operand[commutative];
3871 substed_operand[commutative] = substed_operand[commutative + 1];
3872 substed_operand[commutative + 1] = tem;
3873 tem = recog_data.operand[commutative];
3874 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3875 recog_data.operand[commutative + 1] = tem;
3876 tem = *recog_data.operand_loc[commutative];
3877 *recog_data.operand_loc[commutative]
3878 = *recog_data.operand_loc[commutative + 1];
3879 *recog_data.operand_loc[commutative + 1] = tem;
3881 for (i = 0; i < n_reloads; i++)
3883 if (rld[i].opnum == commutative)
3884 rld[i].opnum = commutative + 1;
3885 else if (rld[i].opnum == commutative + 1)
3886 rld[i].opnum = commutative;
3890 for (i = 0; i < noperands; i++)
3892 operand_reloadnum[i] = -1;
3894 /* If this is an earlyclobber operand, we need to widen the scope.
3895 The reload must remain valid from the start of the insn being
3896 reloaded until after the operand is stored into its destination.
3897 We approximate this with RELOAD_OTHER even though we know that we
3898 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3900 One special case that is worth checking is when we have an
3901 output that is earlyclobber but isn't used past the insn (typically
3902 a SCRATCH). In this case, we only need have the reload live
3903 through the insn itself, but not for any of our input or output
3904 reloads.
3905 But we must not accidentally narrow the scope of an existing
3906 RELOAD_OTHER reload - leave these alone.
3908 In any case, anything needed to address this operand can remain
3909 however they were previously categorized. */
3911 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3912 operand_type[i]
3913 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3914 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3917 /* Any constants that aren't allowed and can't be reloaded
3918 into registers are here changed into memory references. */
3919 for (i = 0; i < noperands; i++)
3920 if (! goal_alternative_win[i])
3922 rtx op = recog_data.operand[i];
3923 rtx subreg = NULL_RTX;
3924 rtx plus = NULL_RTX;
3925 enum machine_mode mode = operand_mode[i];
3927 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3928 push_reload so we have to let them pass here. */
3929 if (GET_CODE (op) == SUBREG)
3931 subreg = op;
3932 op = SUBREG_REG (op);
3933 mode = GET_MODE (op);
3936 if (GET_CODE (op) == PLUS)
3938 plus = op;
3939 op = XEXP (op, 1);
3942 if (CONST_POOL_OK_P (mode, op)
3943 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3944 == NO_REGS)
3945 || no_input_reloads))
3947 int this_address_reloaded;
3948 rtx tem = force_const_mem (mode, op);
3950 /* If we stripped a SUBREG or a PLUS above add it back. */
3951 if (plus != NULL_RTX)
3952 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3954 if (subreg != NULL_RTX)
3955 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3957 this_address_reloaded = 0;
3958 substed_operand[i] = recog_data.operand[i]
3959 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3960 0, insn, &this_address_reloaded);
3962 /* If the alternative accepts constant pool refs directly
3963 there will be no reload needed at all. */
3964 if (plus == NULL_RTX
3965 && subreg == NULL_RTX
3966 && alternative_allows_const_pool_ref (this_address_reloaded == 0
3967 ? substed_operand[i]
3968 : NULL,
3969 recog_data.constraints[i],
3970 goal_alternative_number))
3971 goal_alternative_win[i] = 1;
3975 /* Record the values of the earlyclobber operands for the caller. */
3976 if (goal_earlyclobber)
3977 for (i = 0; i < noperands; i++)
3978 if (goal_alternative_earlyclobber[i])
3979 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3981 /* Now record reloads for all the operands that need them. */
3982 for (i = 0; i < noperands; i++)
3983 if (! goal_alternative_win[i])
3985 /* Operands that match previous ones have already been handled. */
3986 if (goal_alternative_matches[i] >= 0)
3988 /* Handle an operand with a nonoffsettable address
3989 appearing where an offsettable address will do
3990 by reloading the address into a base register.
3992 ??? We can also do this when the operand is a register and
3993 reg_equiv_mem is not offsettable, but this is a bit tricky,
3994 so we don't bother with it. It may not be worth doing. */
3995 else if (goal_alternative_matched[i] == -1
3996 && goal_alternative_offmemok[i]
3997 && MEM_P (recog_data.operand[i]))
3999 /* If the address to be reloaded is a VOIDmode constant,
4000 use the default address mode as mode of the reload register,
4001 as would have been done by find_reloads_address. */
4002 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
4003 enum machine_mode address_mode;
4005 address_mode = get_address_mode (recog_data.operand[i]);
4006 operand_reloadnum[i]
4007 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
4008 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
4009 base_reg_class (VOIDmode, as, MEM, SCRATCH),
4010 address_mode,
4011 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
4012 rld[operand_reloadnum[i]].inc
4013 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4015 /* If this operand is an output, we will have made any
4016 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4017 now we are treating part of the operand as an input, so
4018 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
4020 if (modified[i] == RELOAD_WRITE)
4022 for (j = 0; j < n_reloads; j++)
4024 if (rld[j].opnum == i)
4026 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4027 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4028 else if (rld[j].when_needed
4029 == RELOAD_FOR_OUTADDR_ADDRESS)
4030 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4035 else if (goal_alternative_matched[i] == -1)
4037 operand_reloadnum[i]
4038 = push_reload ((modified[i] != RELOAD_WRITE
4039 ? recog_data.operand[i] : 0),
4040 (modified[i] != RELOAD_READ
4041 ? recog_data.operand[i] : 0),
4042 (modified[i] != RELOAD_WRITE
4043 ? recog_data.operand_loc[i] : 0),
4044 (modified[i] != RELOAD_READ
4045 ? recog_data.operand_loc[i] : 0),
4046 (enum reg_class) goal_alternative[i],
4047 (modified[i] == RELOAD_WRITE
4048 ? VOIDmode : operand_mode[i]),
4049 (modified[i] == RELOAD_READ
4050 ? VOIDmode : operand_mode[i]),
4051 (insn_code_number < 0 ? 0
4052 : insn_data[insn_code_number].operand[i].strict_low),
4053 0, i, operand_type[i]);
4055 /* In a matching pair of operands, one must be input only
4056 and the other must be output only.
4057 Pass the input operand as IN and the other as OUT. */
4058 else if (modified[i] == RELOAD_READ
4059 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4061 operand_reloadnum[i]
4062 = push_reload (recog_data.operand[i],
4063 recog_data.operand[goal_alternative_matched[i]],
4064 recog_data.operand_loc[i],
4065 recog_data.operand_loc[goal_alternative_matched[i]],
4066 (enum reg_class) goal_alternative[i],
4067 operand_mode[i],
4068 operand_mode[goal_alternative_matched[i]],
4069 0, 0, i, RELOAD_OTHER);
4070 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4072 else if (modified[i] == RELOAD_WRITE
4073 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4075 operand_reloadnum[goal_alternative_matched[i]]
4076 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4077 recog_data.operand[i],
4078 recog_data.operand_loc[goal_alternative_matched[i]],
4079 recog_data.operand_loc[i],
4080 (enum reg_class) goal_alternative[i],
4081 operand_mode[goal_alternative_matched[i]],
4082 operand_mode[i],
4083 0, 0, i, RELOAD_OTHER);
4084 operand_reloadnum[i] = output_reloadnum;
4086 else
4088 gcc_assert (insn_code_number < 0);
4089 error_for_asm (insn, "inconsistent operand constraints "
4090 "in an %<asm%>");
4091 /* Avoid further trouble with this insn. */
4092 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4093 n_reloads = 0;
4094 return 0;
4097 else if (goal_alternative_matched[i] < 0
4098 && goal_alternative_matches[i] < 0
4099 && address_operand_reloaded[i] != 1
4100 && optimize)
4102 /* For each non-matching operand that's a MEM or a pseudo-register
4103 that didn't get a hard register, make an optional reload.
4104 This may get done even if the insn needs no reloads otherwise. */
4106 rtx operand = recog_data.operand[i];
4108 while (GET_CODE (operand) == SUBREG)
4109 operand = SUBREG_REG (operand);
4110 if ((MEM_P (operand)
4111 || (REG_P (operand)
4112 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4113 /* If this is only for an output, the optional reload would not
4114 actually cause us to use a register now, just note that
4115 something is stored here. */
4116 && (goal_alternative[i] != NO_REGS
4117 || modified[i] == RELOAD_WRITE)
4118 && ! no_input_reloads
4119 /* An optional output reload might allow to delete INSN later.
4120 We mustn't make in-out reloads on insns that are not permitted
4121 output reloads.
4122 If this is an asm, we can't delete it; we must not even call
4123 push_reload for an optional output reload in this case,
4124 because we can't be sure that the constraint allows a register,
4125 and push_reload verifies the constraints for asms. */
4126 && (modified[i] == RELOAD_READ
4127 || (! no_output_reloads && ! this_insn_is_asm)))
4128 operand_reloadnum[i]
4129 = push_reload ((modified[i] != RELOAD_WRITE
4130 ? recog_data.operand[i] : 0),
4131 (modified[i] != RELOAD_READ
4132 ? recog_data.operand[i] : 0),
4133 (modified[i] != RELOAD_WRITE
4134 ? recog_data.operand_loc[i] : 0),
4135 (modified[i] != RELOAD_READ
4136 ? recog_data.operand_loc[i] : 0),
4137 (enum reg_class) goal_alternative[i],
4138 (modified[i] == RELOAD_WRITE
4139 ? VOIDmode : operand_mode[i]),
4140 (modified[i] == RELOAD_READ
4141 ? VOIDmode : operand_mode[i]),
4142 (insn_code_number < 0 ? 0
4143 : insn_data[insn_code_number].operand[i].strict_low),
4144 1, i, operand_type[i]);
4145 /* If a memory reference remains (either as a MEM or a pseudo that
4146 did not get a hard register), yet we can't make an optional
4147 reload, check if this is actually a pseudo register reference;
4148 we then need to emit a USE and/or a CLOBBER so that reload
4149 inheritance will do the right thing. */
4150 else if (replace
4151 && (MEM_P (operand)
4152 || (REG_P (operand)
4153 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4154 && reg_renumber [REGNO (operand)] < 0)))
4156 operand = *recog_data.operand_loc[i];
4158 while (GET_CODE (operand) == SUBREG)
4159 operand = SUBREG_REG (operand);
4160 if (REG_P (operand))
4162 if (modified[i] != RELOAD_WRITE)
4163 /* We mark the USE with QImode so that we recognize
4164 it as one that can be safely deleted at the end
4165 of reload. */
4166 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4167 insn), QImode);
4168 if (modified[i] != RELOAD_READ)
4169 emit_insn_after (gen_clobber (operand), insn);
4173 else if (goal_alternative_matches[i] >= 0
4174 && goal_alternative_win[goal_alternative_matches[i]]
4175 && modified[i] == RELOAD_READ
4176 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4177 && ! no_input_reloads && ! no_output_reloads
4178 && optimize)
4180 /* Similarly, make an optional reload for a pair of matching
4181 objects that are in MEM or a pseudo that didn't get a hard reg. */
4183 rtx operand = recog_data.operand[i];
4185 while (GET_CODE (operand) == SUBREG)
4186 operand = SUBREG_REG (operand);
4187 if ((MEM_P (operand)
4188 || (REG_P (operand)
4189 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4190 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4191 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4192 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4193 recog_data.operand[i],
4194 recog_data.operand_loc[goal_alternative_matches[i]],
4195 recog_data.operand_loc[i],
4196 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4197 operand_mode[goal_alternative_matches[i]],
4198 operand_mode[i],
4199 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4202 /* Perform whatever substitutions on the operands we are supposed
4203 to make due to commutativity or replacement of registers
4204 with equivalent constants or memory slots. */
4206 for (i = 0; i < noperands; i++)
4208 /* We only do this on the last pass through reload, because it is
4209 possible for some data (like reg_equiv_address) to be changed during
4210 later passes. Moreover, we lose the opportunity to get a useful
4211 reload_{in,out}_reg when we do these replacements. */
4213 if (replace)
4215 rtx substitution = substed_operand[i];
4217 *recog_data.operand_loc[i] = substitution;
4219 /* If we're replacing an operand with a LABEL_REF, we need to
4220 make sure that there's a REG_LABEL_OPERAND note attached to
4221 this instruction. */
4222 if (GET_CODE (substitution) == LABEL_REF
4223 && !find_reg_note (insn, REG_LABEL_OPERAND,
4224 LABEL_REF_LABEL (substitution))
4225 /* For a JUMP_P, if it was a branch target it must have
4226 already been recorded as such. */
4227 && (!JUMP_P (insn)
4228 || !label_is_jump_target_p (LABEL_REF_LABEL (substitution),
4229 insn)))
4231 add_reg_note (insn, REG_LABEL_OPERAND,
4232 LABEL_REF_LABEL (substitution));
4233 if (LABEL_P (LABEL_REF_LABEL (substitution)))
4234 ++LABEL_NUSES (LABEL_REF_LABEL (substitution));
4238 else
4239 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4242 /* If this insn pattern contains any MATCH_DUP's, make sure that
4243 they will be substituted if the operands they match are substituted.
4244 Also do now any substitutions we already did on the operands.
4246 Don't do this if we aren't making replacements because we might be
4247 propagating things allocated by frame pointer elimination into places
4248 it doesn't expect. */
4250 if (insn_code_number >= 0 && replace)
4251 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4253 int opno = recog_data.dup_num[i];
4254 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4255 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4258 #if 0
4259 /* This loses because reloading of prior insns can invalidate the equivalence
4260 (or at least find_equiv_reg isn't smart enough to find it any more),
4261 causing this insn to need more reload regs than it needed before.
4262 It may be too late to make the reload regs available.
4263 Now this optimization is done safely in choose_reload_regs. */
4265 /* For each reload of a reg into some other class of reg,
4266 search for an existing equivalent reg (same value now) in the right class.
4267 We can use it as long as we don't need to change its contents. */
4268 for (i = 0; i < n_reloads; i++)
4269 if (rld[i].reg_rtx == 0
4270 && rld[i].in != 0
4271 && REG_P (rld[i].in)
4272 && rld[i].out == 0)
4274 rld[i].reg_rtx
4275 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4276 static_reload_reg_p, 0, rld[i].inmode);
4277 /* Prevent generation of insn to load the value
4278 because the one we found already has the value. */
4279 if (rld[i].reg_rtx)
4280 rld[i].in = rld[i].reg_rtx;
4282 #endif
4284 /* If we detected error and replaced asm instruction by USE, forget about the
4285 reloads. */
4286 if (GET_CODE (PATTERN (insn)) == USE
4287 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4288 n_reloads = 0;
4290 /* Perhaps an output reload can be combined with another
4291 to reduce needs by one. */
4292 if (!goal_earlyclobber)
4293 combine_reloads ();
4295 /* If we have a pair of reloads for parts of an address, they are reloading
4296 the same object, the operands themselves were not reloaded, and they
4297 are for two operands that are supposed to match, merge the reloads and
4298 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4300 for (i = 0; i < n_reloads; i++)
4302 int k;
4304 for (j = i + 1; j < n_reloads; j++)
4305 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4306 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4307 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4308 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4309 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4310 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4311 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4312 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4313 && rtx_equal_p (rld[i].in, rld[j].in)
4314 && (operand_reloadnum[rld[i].opnum] < 0
4315 || rld[operand_reloadnum[rld[i].opnum]].optional)
4316 && (operand_reloadnum[rld[j].opnum] < 0
4317 || rld[operand_reloadnum[rld[j].opnum]].optional)
4318 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4319 || (goal_alternative_matches[rld[j].opnum]
4320 == rld[i].opnum)))
4322 for (k = 0; k < n_replacements; k++)
4323 if (replacements[k].what == j)
4324 replacements[k].what = i;
4326 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4327 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4328 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4329 else
4330 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4331 rld[j].in = 0;
4335 /* Scan all the reloads and update their type.
4336 If a reload is for the address of an operand and we didn't reload
4337 that operand, change the type. Similarly, change the operand number
4338 of a reload when two operands match. If a reload is optional, treat it
4339 as though the operand isn't reloaded.
4341 ??? This latter case is somewhat odd because if we do the optional
4342 reload, it means the object is hanging around. Thus we need only
4343 do the address reload if the optional reload was NOT done.
4345 Change secondary reloads to be the address type of their operand, not
4346 the normal type.
4348 If an operand's reload is now RELOAD_OTHER, change any
4349 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4350 RELOAD_FOR_OTHER_ADDRESS. */
4352 for (i = 0; i < n_reloads; i++)
4354 if (rld[i].secondary_p
4355 && rld[i].when_needed == operand_type[rld[i].opnum])
4356 rld[i].when_needed = address_type[rld[i].opnum];
4358 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4359 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4360 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4361 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4362 && (operand_reloadnum[rld[i].opnum] < 0
4363 || rld[operand_reloadnum[rld[i].opnum]].optional))
4365 /* If we have a secondary reload to go along with this reload,
4366 change its type to RELOAD_FOR_OPADDR_ADDR. */
4368 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4369 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4370 && rld[i].secondary_in_reload != -1)
4372 int secondary_in_reload = rld[i].secondary_in_reload;
4374 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4376 /* If there's a tertiary reload we have to change it also. */
4377 if (secondary_in_reload > 0
4378 && rld[secondary_in_reload].secondary_in_reload != -1)
4379 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4380 = RELOAD_FOR_OPADDR_ADDR;
4383 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4384 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4385 && rld[i].secondary_out_reload != -1)
4387 int secondary_out_reload = rld[i].secondary_out_reload;
4389 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4391 /* If there's a tertiary reload we have to change it also. */
4392 if (secondary_out_reload
4393 && rld[secondary_out_reload].secondary_out_reload != -1)
4394 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4395 = RELOAD_FOR_OPADDR_ADDR;
4398 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4399 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4400 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4401 else
4402 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4405 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4406 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4407 && operand_reloadnum[rld[i].opnum] >= 0
4408 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4409 == RELOAD_OTHER))
4410 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4412 if (goal_alternative_matches[rld[i].opnum] >= 0)
4413 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4416 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4417 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4418 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4420 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4421 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4422 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4423 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4424 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4425 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4426 This is complicated by the fact that a single operand can have more
4427 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4428 choose_reload_regs without affecting code quality, and cases that
4429 actually fail are extremely rare, so it turns out to be better to fix
4430 the problem here by not generating cases that choose_reload_regs will
4431 fail for. */
4432 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4433 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4434 a single operand.
4435 We can reduce the register pressure by exploiting that a
4436 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4437 does not conflict with any of them, if it is only used for the first of
4438 the RELOAD_FOR_X_ADDRESS reloads. */
4440 int first_op_addr_num = -2;
4441 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4442 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4443 int need_change = 0;
4444 /* We use last_op_addr_reload and the contents of the above arrays
4445 first as flags - -2 means no instance encountered, -1 means exactly
4446 one instance encountered.
4447 If more than one instance has been encountered, we store the reload
4448 number of the first reload of the kind in question; reload numbers
4449 are known to be non-negative. */
4450 for (i = 0; i < noperands; i++)
4451 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4452 for (i = n_reloads - 1; i >= 0; i--)
4454 switch (rld[i].when_needed)
4456 case RELOAD_FOR_OPERAND_ADDRESS:
4457 if (++first_op_addr_num >= 0)
4459 first_op_addr_num = i;
4460 need_change = 1;
4462 break;
4463 case RELOAD_FOR_INPUT_ADDRESS:
4464 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4466 first_inpaddr_num[rld[i].opnum] = i;
4467 need_change = 1;
4469 break;
4470 case RELOAD_FOR_OUTPUT_ADDRESS:
4471 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4473 first_outpaddr_num[rld[i].opnum] = i;
4474 need_change = 1;
4476 break;
4477 default:
4478 break;
4482 if (need_change)
4484 for (i = 0; i < n_reloads; i++)
4486 int first_num;
4487 enum reload_type type;
4489 switch (rld[i].when_needed)
4491 case RELOAD_FOR_OPADDR_ADDR:
4492 first_num = first_op_addr_num;
4493 type = RELOAD_FOR_OPERAND_ADDRESS;
4494 break;
4495 case RELOAD_FOR_INPADDR_ADDRESS:
4496 first_num = first_inpaddr_num[rld[i].opnum];
4497 type = RELOAD_FOR_INPUT_ADDRESS;
4498 break;
4499 case RELOAD_FOR_OUTADDR_ADDRESS:
4500 first_num = first_outpaddr_num[rld[i].opnum];
4501 type = RELOAD_FOR_OUTPUT_ADDRESS;
4502 break;
4503 default:
4504 continue;
4506 if (first_num < 0)
4507 continue;
4508 else if (i > first_num)
4509 rld[i].when_needed = type;
4510 else
4512 /* Check if the only TYPE reload that uses reload I is
4513 reload FIRST_NUM. */
4514 for (j = n_reloads - 1; j > first_num; j--)
4516 if (rld[j].when_needed == type
4517 && (rld[i].secondary_p
4518 ? rld[j].secondary_in_reload == i
4519 : reg_mentioned_p (rld[i].in, rld[j].in)))
4521 rld[i].when_needed = type;
4522 break;
4530 /* See if we have any reloads that are now allowed to be merged
4531 because we've changed when the reload is needed to
4532 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4533 check for the most common cases. */
4535 for (i = 0; i < n_reloads; i++)
4536 if (rld[i].in != 0 && rld[i].out == 0
4537 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4538 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4539 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4540 for (j = 0; j < n_reloads; j++)
4541 if (i != j && rld[j].in != 0 && rld[j].out == 0
4542 && rld[j].when_needed == rld[i].when_needed
4543 && MATCHES (rld[i].in, rld[j].in)
4544 && rld[i].rclass == rld[j].rclass
4545 && !rld[i].nocombine && !rld[j].nocombine
4546 && rld[i].reg_rtx == rld[j].reg_rtx)
4548 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4549 transfer_replacements (i, j);
4550 rld[j].in = 0;
4553 #ifdef HAVE_cc0
4554 /* If we made any reloads for addresses, see if they violate a
4555 "no input reloads" requirement for this insn. But loads that we
4556 do after the insn (such as for output addresses) are fine. */
4557 if (no_input_reloads)
4558 for (i = 0; i < n_reloads; i++)
4559 gcc_assert (rld[i].in == 0
4560 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4561 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4562 #endif
4564 /* Compute reload_mode and reload_nregs. */
4565 for (i = 0; i < n_reloads; i++)
4567 rld[i].mode
4568 = (rld[i].inmode == VOIDmode
4569 || (GET_MODE_SIZE (rld[i].outmode)
4570 > GET_MODE_SIZE (rld[i].inmode)))
4571 ? rld[i].outmode : rld[i].inmode;
4573 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4576 /* Special case a simple move with an input reload and a
4577 destination of a hard reg, if the hard reg is ok, use it. */
4578 for (i = 0; i < n_reloads; i++)
4579 if (rld[i].when_needed == RELOAD_FOR_INPUT
4580 && GET_CODE (PATTERN (insn)) == SET
4581 && REG_P (SET_DEST (PATTERN (insn)))
4582 && (SET_SRC (PATTERN (insn)) == rld[i].in
4583 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4584 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4586 rtx dest = SET_DEST (PATTERN (insn));
4587 unsigned int regno = REGNO (dest);
4589 if (regno < FIRST_PSEUDO_REGISTER
4590 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4591 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4593 int nr = hard_regno_nregs[regno][rld[i].mode];
4594 int ok = 1, nri;
4596 for (nri = 1; nri < nr; nri ++)
4597 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4599 ok = 0;
4600 break;
4603 if (ok)
4604 rld[i].reg_rtx = dest;
4608 return retval;
4611 /* Return true if alternative number ALTNUM in constraint-string
4612 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4613 MEM gives the reference if it didn't need any reloads, otherwise it
4614 is null. */
4616 static bool
4617 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4618 const char *constraint, int altnum)
4620 int c;
4622 /* Skip alternatives before the one requested. */
4623 while (altnum > 0)
4625 while (*constraint++ != ',')
4627 altnum--;
4629 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4630 If one of them is present, this alternative accepts the result of
4631 passing a constant-pool reference through find_reloads_toplev.
4633 The same is true of extra memory constraints if the address
4634 was reloaded into a register. However, the target may elect
4635 to disallow the original constant address, forcing it to be
4636 reloaded into a register instead. */
4637 for (; (c = *constraint) && c != ',' && c != '#';
4638 constraint += CONSTRAINT_LEN (c, constraint))
4640 enum constraint_num cn = lookup_constraint (constraint);
4641 if (insn_extra_memory_constraint (cn)
4642 && (mem == NULL || constraint_satisfied_p (mem, cn)))
4643 return true;
4645 return false;
4648 /* Scan X for memory references and scan the addresses for reloading.
4649 Also checks for references to "constant" regs that we want to eliminate
4650 and replaces them with the values they stand for.
4651 We may alter X destructively if it contains a reference to such.
4652 If X is just a constant reg, we return the equivalent value
4653 instead of X.
4655 IND_LEVELS says how many levels of indirect addressing this machine
4656 supports.
4658 OPNUM and TYPE identify the purpose of the reload.
4660 IS_SET_DEST is true if X is the destination of a SET, which is not
4661 appropriate to be replaced by a constant.
4663 INSN, if nonzero, is the insn in which we do the reload. It is used
4664 to determine if we may generate output reloads, and where to put USEs
4665 for pseudos that we have to replace with stack slots.
4667 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4668 result of find_reloads_address. */
4670 static rtx
4671 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4672 int ind_levels, int is_set_dest, rtx_insn *insn,
4673 int *address_reloaded)
4675 RTX_CODE code = GET_CODE (x);
4677 const char *fmt = GET_RTX_FORMAT (code);
4678 int i;
4679 int copied;
4681 if (code == REG)
4683 /* This code is duplicated for speed in find_reloads. */
4684 int regno = REGNO (x);
4685 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4686 x = reg_equiv_constant (regno);
4687 #if 0
4688 /* This creates (subreg (mem...)) which would cause an unnecessary
4689 reload of the mem. */
4690 else if (reg_equiv_mem (regno) != 0)
4691 x = reg_equiv_mem (regno);
4692 #endif
4693 else if (reg_equiv_memory_loc (regno)
4694 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4696 rtx mem = make_memloc (x, regno);
4697 if (reg_equiv_address (regno)
4698 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4700 /* If this is not a toplevel operand, find_reloads doesn't see
4701 this substitution. We have to emit a USE of the pseudo so
4702 that delete_output_reload can see it. */
4703 if (replace_reloads && recog_data.operand[opnum] != x)
4704 /* We mark the USE with QImode so that we recognize it
4705 as one that can be safely deleted at the end of
4706 reload. */
4707 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4708 QImode);
4709 x = mem;
4710 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4711 opnum, type, ind_levels, insn);
4712 if (!rtx_equal_p (x, mem))
4713 push_reg_equiv_alt_mem (regno, x);
4714 if (address_reloaded)
4715 *address_reloaded = i;
4718 return x;
4720 if (code == MEM)
4722 rtx tem = x;
4724 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4725 opnum, type, ind_levels, insn);
4726 if (address_reloaded)
4727 *address_reloaded = i;
4729 return tem;
4732 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4734 /* Check for SUBREG containing a REG that's equivalent to a
4735 constant. If the constant has a known value, truncate it
4736 right now. Similarly if we are extracting a single-word of a
4737 multi-word constant. If the constant is symbolic, allow it
4738 to be substituted normally. push_reload will strip the
4739 subreg later. The constant must not be VOIDmode, because we
4740 will lose the mode of the register (this should never happen
4741 because one of the cases above should handle it). */
4743 int regno = REGNO (SUBREG_REG (x));
4744 rtx tem;
4746 if (regno >= FIRST_PSEUDO_REGISTER
4747 && reg_renumber[regno] < 0
4748 && reg_equiv_constant (regno) != 0)
4750 tem =
4751 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4752 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4753 gcc_assert (tem);
4754 if (CONSTANT_P (tem)
4755 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4757 tem = force_const_mem (GET_MODE (x), tem);
4758 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4759 &XEXP (tem, 0), opnum, type,
4760 ind_levels, insn);
4761 if (address_reloaded)
4762 *address_reloaded = i;
4764 return tem;
4767 /* If the subreg contains a reg that will be converted to a mem,
4768 attempt to convert the whole subreg to a (narrower or wider)
4769 memory reference instead. If this succeeds, we're done --
4770 otherwise fall through to check whether the inner reg still
4771 needs address reloads anyway. */
4773 if (regno >= FIRST_PSEUDO_REGISTER
4774 && reg_equiv_memory_loc (regno) != 0)
4776 tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4777 insn, address_reloaded);
4778 if (tem)
4779 return tem;
4783 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4785 if (fmt[i] == 'e')
4787 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4788 ind_levels, is_set_dest, insn,
4789 address_reloaded);
4790 /* If we have replaced a reg with it's equivalent memory loc -
4791 that can still be handled here e.g. if it's in a paradoxical
4792 subreg - we must make the change in a copy, rather than using
4793 a destructive change. This way, find_reloads can still elect
4794 not to do the change. */
4795 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4797 x = shallow_copy_rtx (x);
4798 copied = 1;
4800 XEXP (x, i) = new_part;
4803 return x;
4806 /* Return a mem ref for the memory equivalent of reg REGNO.
4807 This mem ref is not shared with anything. */
4809 static rtx
4810 make_memloc (rtx ad, int regno)
4812 /* We must rerun eliminate_regs, in case the elimination
4813 offsets have changed. */
4814 rtx tem
4815 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4818 /* If TEM might contain a pseudo, we must copy it to avoid
4819 modifying it when we do the substitution for the reload. */
4820 if (rtx_varies_p (tem, 0))
4821 tem = copy_rtx (tem);
4823 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4824 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4826 /* Copy the result if it's still the same as the equivalence, to avoid
4827 modifying it when we do the substitution for the reload. */
4828 if (tem == reg_equiv_memory_loc (regno))
4829 tem = copy_rtx (tem);
4830 return tem;
4833 /* Returns true if AD could be turned into a valid memory reference
4834 to mode MODE in address space AS by reloading the part pointed to
4835 by PART into a register. */
4837 static int
4838 maybe_memory_address_addr_space_p (enum machine_mode mode, rtx ad,
4839 addr_space_t as, rtx *part)
4841 int retv;
4842 rtx tem = *part;
4843 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4845 *part = reg;
4846 retv = memory_address_addr_space_p (mode, ad, as);
4847 *part = tem;
4849 return retv;
4852 /* Record all reloads needed for handling memory address AD
4853 which appears in *LOC in a memory reference to mode MODE
4854 which itself is found in location *MEMREFLOC.
4855 Note that we take shortcuts assuming that no multi-reg machine mode
4856 occurs as part of an address.
4858 OPNUM and TYPE specify the purpose of this reload.
4860 IND_LEVELS says how many levels of indirect addressing this machine
4861 supports.
4863 INSN, if nonzero, is the insn in which we do the reload. It is used
4864 to determine if we may generate output reloads, and where to put USEs
4865 for pseudos that we have to replace with stack slots.
4867 Value is one if this address is reloaded or replaced as a whole; it is
4868 zero if the top level of this address was not reloaded or replaced, and
4869 it is -1 if it may or may not have been reloaded or replaced.
4871 Note that there is no verification that the address will be valid after
4872 this routine does its work. Instead, we rely on the fact that the address
4873 was valid when reload started. So we need only undo things that reload
4874 could have broken. These are wrong register types, pseudos not allocated
4875 to a hard register, and frame pointer elimination. */
4877 static int
4878 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
4879 rtx *loc, int opnum, enum reload_type type,
4880 int ind_levels, rtx_insn *insn)
4882 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4883 : ADDR_SPACE_GENERIC;
4884 int regno;
4885 int removed_and = 0;
4886 int op_index;
4887 rtx tem;
4889 /* If the address is a register, see if it is a legitimate address and
4890 reload if not. We first handle the cases where we need not reload
4891 or where we must reload in a non-standard way. */
4893 if (REG_P (ad))
4895 regno = REGNO (ad);
4897 if (reg_equiv_constant (regno) != 0)
4899 find_reloads_address_part (reg_equiv_constant (regno), loc,
4900 base_reg_class (mode, as, MEM, SCRATCH),
4901 GET_MODE (ad), opnum, type, ind_levels);
4902 return 1;
4905 tem = reg_equiv_memory_loc (regno);
4906 if (tem != 0)
4908 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4910 tem = make_memloc (ad, regno);
4911 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4912 XEXP (tem, 0),
4913 MEM_ADDR_SPACE (tem)))
4915 rtx orig = tem;
4917 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4918 &XEXP (tem, 0), opnum,
4919 ADDR_TYPE (type), ind_levels, insn);
4920 if (!rtx_equal_p (tem, orig))
4921 push_reg_equiv_alt_mem (regno, tem);
4923 /* We can avoid a reload if the register's equivalent memory
4924 expression is valid as an indirect memory address.
4925 But not all addresses are valid in a mem used as an indirect
4926 address: only reg or reg+constant. */
4928 if (ind_levels > 0
4929 && strict_memory_address_addr_space_p (mode, tem, as)
4930 && (REG_P (XEXP (tem, 0))
4931 || (GET_CODE (XEXP (tem, 0)) == PLUS
4932 && REG_P (XEXP (XEXP (tem, 0), 0))
4933 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4935 /* TEM is not the same as what we'll be replacing the
4936 pseudo with after reload, put a USE in front of INSN
4937 in the final reload pass. */
4938 if (replace_reloads
4939 && num_not_at_initial_offset
4940 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4942 *loc = tem;
4943 /* We mark the USE with QImode so that we
4944 recognize it as one that can be safely
4945 deleted at the end of reload. */
4946 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4947 insn), QImode);
4949 /* This doesn't really count as replacing the address
4950 as a whole, since it is still a memory access. */
4952 return 0;
4954 ad = tem;
4958 /* The only remaining case where we can avoid a reload is if this is a
4959 hard register that is valid as a base register and which is not the
4960 subject of a CLOBBER in this insn. */
4962 else if (regno < FIRST_PSEUDO_REGISTER
4963 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4964 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4965 return 0;
4967 /* If we do not have one of the cases above, we must do the reload. */
4968 push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4969 base_reg_class (mode, as, MEM, SCRATCH),
4970 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4971 return 1;
4974 if (strict_memory_address_addr_space_p (mode, ad, as))
4976 /* The address appears valid, so reloads are not needed.
4977 But the address may contain an eliminable register.
4978 This can happen because a machine with indirect addressing
4979 may consider a pseudo register by itself a valid address even when
4980 it has failed to get a hard reg.
4981 So do a tree-walk to find and eliminate all such regs. */
4983 /* But first quickly dispose of a common case. */
4984 if (GET_CODE (ad) == PLUS
4985 && CONST_INT_P (XEXP (ad, 1))
4986 && REG_P (XEXP (ad, 0))
4987 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4988 return 0;
4990 subst_reg_equivs_changed = 0;
4991 *loc = subst_reg_equivs (ad, insn);
4993 if (! subst_reg_equivs_changed)
4994 return 0;
4996 /* Check result for validity after substitution. */
4997 if (strict_memory_address_addr_space_p (mode, ad, as))
4998 return 0;
5001 #ifdef LEGITIMIZE_RELOAD_ADDRESS
5004 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
5006 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5007 ind_levels, win);
5009 break;
5010 win:
5011 *memrefloc = copy_rtx (*memrefloc);
5012 XEXP (*memrefloc, 0) = ad;
5013 move_replacements (&ad, &XEXP (*memrefloc, 0));
5014 return -1;
5016 while (0);
5017 #endif
5019 /* The address is not valid. We have to figure out why. First see if
5020 we have an outer AND and remove it if so. Then analyze what's inside. */
5022 if (GET_CODE (ad) == AND)
5024 removed_and = 1;
5025 loc = &XEXP (ad, 0);
5026 ad = *loc;
5029 /* One possibility for why the address is invalid is that it is itself
5030 a MEM. This can happen when the frame pointer is being eliminated, a
5031 pseudo is not allocated to a hard register, and the offset between the
5032 frame and stack pointers is not its initial value. In that case the
5033 pseudo will have been replaced by a MEM referring to the
5034 stack pointer. */
5035 if (MEM_P (ad))
5037 /* First ensure that the address in this MEM is valid. Then, unless
5038 indirect addresses are valid, reload the MEM into a register. */
5039 tem = ad;
5040 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5041 opnum, ADDR_TYPE (type),
5042 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5044 /* If tem was changed, then we must create a new memory reference to
5045 hold it and store it back into memrefloc. */
5046 if (tem != ad && memrefloc)
5048 *memrefloc = copy_rtx (*memrefloc);
5049 copy_replacements (tem, XEXP (*memrefloc, 0));
5050 loc = &XEXP (*memrefloc, 0);
5051 if (removed_and)
5052 loc = &XEXP (*loc, 0);
5055 /* Check similar cases as for indirect addresses as above except
5056 that we can allow pseudos and a MEM since they should have been
5057 taken care of above. */
5059 if (ind_levels == 0
5060 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5061 || MEM_P (XEXP (tem, 0))
5062 || ! (REG_P (XEXP (tem, 0))
5063 || (GET_CODE (XEXP (tem, 0)) == PLUS
5064 && REG_P (XEXP (XEXP (tem, 0), 0))
5065 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5067 /* Must use TEM here, not AD, since it is the one that will
5068 have any subexpressions reloaded, if needed. */
5069 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5070 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5071 VOIDmode, 0,
5072 0, opnum, type);
5073 return ! removed_and;
5075 else
5076 return 0;
5079 /* If we have address of a stack slot but it's not valid because the
5080 displacement is too large, compute the sum in a register.
5081 Handle all base registers here, not just fp/ap/sp, because on some
5082 targets (namely SH) we can also get too large displacements from
5083 big-endian corrections. */
5084 else if (GET_CODE (ad) == PLUS
5085 && REG_P (XEXP (ad, 0))
5086 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5087 && CONST_INT_P (XEXP (ad, 1))
5088 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5089 CONST_INT)
5090 /* Similarly, if we were to reload the base register and the
5091 mem+offset address is still invalid, then we want to reload
5092 the whole address, not just the base register. */
5093 || ! maybe_memory_address_addr_space_p
5094 (mode, ad, as, &(XEXP (ad, 0)))))
5097 /* Unshare the MEM rtx so we can safely alter it. */
5098 if (memrefloc)
5100 *memrefloc = copy_rtx (*memrefloc);
5101 loc = &XEXP (*memrefloc, 0);
5102 if (removed_and)
5103 loc = &XEXP (*loc, 0);
5106 if (double_reg_address_ok
5107 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5108 PLUS, CONST_INT))
5110 /* Unshare the sum as well. */
5111 *loc = ad = copy_rtx (ad);
5113 /* Reload the displacement into an index reg.
5114 We assume the frame pointer or arg pointer is a base reg. */
5115 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5116 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5117 type, ind_levels);
5118 return 0;
5120 else
5122 /* If the sum of two regs is not necessarily valid,
5123 reload the sum into a base reg.
5124 That will at least work. */
5125 find_reloads_address_part (ad, loc,
5126 base_reg_class (mode, as, MEM, SCRATCH),
5127 GET_MODE (ad), opnum, type, ind_levels);
5129 return ! removed_and;
5132 /* If we have an indexed stack slot, there are three possible reasons why
5133 it might be invalid: The index might need to be reloaded, the address
5134 might have been made by frame pointer elimination and hence have a
5135 constant out of range, or both reasons might apply.
5137 We can easily check for an index needing reload, but even if that is the
5138 case, we might also have an invalid constant. To avoid making the
5139 conservative assumption and requiring two reloads, we see if this address
5140 is valid when not interpreted strictly. If it is, the only problem is
5141 that the index needs a reload and find_reloads_address_1 will take care
5142 of it.
5144 Handle all base registers here, not just fp/ap/sp, because on some
5145 targets (namely SPARC) we can also get invalid addresses from preventive
5146 subreg big-endian corrections made by find_reloads_toplev. We
5147 can also get expressions involving LO_SUM (rather than PLUS) from
5148 find_reloads_subreg_address.
5150 If we decide to do something, it must be that `double_reg_address_ok'
5151 is true. We generate a reload of the base register + constant and
5152 rework the sum so that the reload register will be added to the index.
5153 This is safe because we know the address isn't shared.
5155 We check for the base register as both the first and second operand of
5156 the innermost PLUS and/or LO_SUM. */
5158 for (op_index = 0; op_index < 2; ++op_index)
5160 rtx operand, addend;
5161 enum rtx_code inner_code;
5163 if (GET_CODE (ad) != PLUS)
5164 continue;
5166 inner_code = GET_CODE (XEXP (ad, 0));
5167 if (!(GET_CODE (ad) == PLUS
5168 && CONST_INT_P (XEXP (ad, 1))
5169 && (inner_code == PLUS || inner_code == LO_SUM)))
5170 continue;
5172 operand = XEXP (XEXP (ad, 0), op_index);
5173 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5174 continue;
5176 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5178 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5179 GET_CODE (addend))
5180 || operand == frame_pointer_rtx
5181 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
5182 || operand == hard_frame_pointer_rtx
5183 #endif
5184 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5185 || operand == arg_pointer_rtx
5186 #endif
5187 || operand == stack_pointer_rtx)
5188 && ! maybe_memory_address_addr_space_p
5189 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5191 rtx offset_reg;
5192 enum reg_class cls;
5194 offset_reg = plus_constant (GET_MODE (ad), operand,
5195 INTVAL (XEXP (ad, 1)));
5197 /* Form the adjusted address. */
5198 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5199 ad = gen_rtx_PLUS (GET_MODE (ad),
5200 op_index == 0 ? offset_reg : addend,
5201 op_index == 0 ? addend : offset_reg);
5202 else
5203 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5204 op_index == 0 ? offset_reg : addend,
5205 op_index == 0 ? addend : offset_reg);
5206 *loc = ad;
5208 cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5209 find_reloads_address_part (XEXP (ad, op_index),
5210 &XEXP (ad, op_index), cls,
5211 GET_MODE (ad), opnum, type, ind_levels);
5212 find_reloads_address_1 (mode, as,
5213 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5214 GET_CODE (XEXP (ad, op_index)),
5215 &XEXP (ad, 1 - op_index), opnum,
5216 type, 0, insn);
5218 return 0;
5222 /* See if address becomes valid when an eliminable register
5223 in a sum is replaced. */
5225 tem = ad;
5226 if (GET_CODE (ad) == PLUS)
5227 tem = subst_indexed_address (ad);
5228 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5230 /* Ok, we win that way. Replace any additional eliminable
5231 registers. */
5233 subst_reg_equivs_changed = 0;
5234 tem = subst_reg_equivs (tem, insn);
5236 /* Make sure that didn't make the address invalid again. */
5238 if (! subst_reg_equivs_changed
5239 || strict_memory_address_addr_space_p (mode, tem, as))
5241 *loc = tem;
5242 return 0;
5246 /* If constants aren't valid addresses, reload the constant address
5247 into a register. */
5248 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5250 enum machine_mode address_mode = GET_MODE (ad);
5251 if (address_mode == VOIDmode)
5252 address_mode = targetm.addr_space.address_mode (as);
5254 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5255 Unshare it so we can safely alter it. */
5256 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5257 && CONSTANT_POOL_ADDRESS_P (ad))
5259 *memrefloc = copy_rtx (*memrefloc);
5260 loc = &XEXP (*memrefloc, 0);
5261 if (removed_and)
5262 loc = &XEXP (*loc, 0);
5265 find_reloads_address_part (ad, loc,
5266 base_reg_class (mode, as, MEM, SCRATCH),
5267 address_mode, opnum, type, ind_levels);
5268 return ! removed_and;
5271 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5272 opnum, type, ind_levels, insn);
5275 /* Find all pseudo regs appearing in AD
5276 that are eliminable in favor of equivalent values
5277 and do not have hard regs; replace them by their equivalents.
5278 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5279 front of it for pseudos that we have to replace with stack slots. */
5281 static rtx
5282 subst_reg_equivs (rtx ad, rtx_insn *insn)
5284 RTX_CODE code = GET_CODE (ad);
5285 int i;
5286 const char *fmt;
5288 switch (code)
5290 case HIGH:
5291 case CONST:
5292 CASE_CONST_ANY:
5293 case SYMBOL_REF:
5294 case LABEL_REF:
5295 case PC:
5296 case CC0:
5297 return ad;
5299 case REG:
5301 int regno = REGNO (ad);
5303 if (reg_equiv_constant (regno) != 0)
5305 subst_reg_equivs_changed = 1;
5306 return reg_equiv_constant (regno);
5308 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5310 rtx mem = make_memloc (ad, regno);
5311 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5313 subst_reg_equivs_changed = 1;
5314 /* We mark the USE with QImode so that we recognize it
5315 as one that can be safely deleted at the end of
5316 reload. */
5317 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5318 QImode);
5319 return mem;
5323 return ad;
5325 case PLUS:
5326 /* Quickly dispose of a common case. */
5327 if (XEXP (ad, 0) == frame_pointer_rtx
5328 && CONST_INT_P (XEXP (ad, 1)))
5329 return ad;
5330 break;
5332 default:
5333 break;
5336 fmt = GET_RTX_FORMAT (code);
5337 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5338 if (fmt[i] == 'e')
5339 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5340 return ad;
5343 /* Compute the sum of X and Y, making canonicalizations assumed in an
5344 address, namely: sum constant integers, surround the sum of two
5345 constants with a CONST, put the constant as the second operand, and
5346 group the constant on the outermost sum.
5348 This routine assumes both inputs are already in canonical form. */
5351 form_sum (enum machine_mode mode, rtx x, rtx y)
5353 rtx tem;
5355 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5356 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5358 if (CONST_INT_P (x))
5359 return plus_constant (mode, y, INTVAL (x));
5360 else if (CONST_INT_P (y))
5361 return plus_constant (mode, x, INTVAL (y));
5362 else if (CONSTANT_P (x))
5363 tem = x, x = y, y = tem;
5365 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5366 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5368 /* Note that if the operands of Y are specified in the opposite
5369 order in the recursive calls below, infinite recursion will occur. */
5370 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5371 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5373 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5374 constant will have been placed second. */
5375 if (CONSTANT_P (x) && CONSTANT_P (y))
5377 if (GET_CODE (x) == CONST)
5378 x = XEXP (x, 0);
5379 if (GET_CODE (y) == CONST)
5380 y = XEXP (y, 0);
5382 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5385 return gen_rtx_PLUS (mode, x, y);
5388 /* If ADDR is a sum containing a pseudo register that should be
5389 replaced with a constant (from reg_equiv_constant),
5390 return the result of doing so, and also apply the associative
5391 law so that the result is more likely to be a valid address.
5392 (But it is not guaranteed to be one.)
5394 Note that at most one register is replaced, even if more are
5395 replaceable. Also, we try to put the result into a canonical form
5396 so it is more likely to be a valid address.
5398 In all other cases, return ADDR. */
5400 static rtx
5401 subst_indexed_address (rtx addr)
5403 rtx op0 = 0, op1 = 0, op2 = 0;
5404 rtx tem;
5405 int regno;
5407 if (GET_CODE (addr) == PLUS)
5409 /* Try to find a register to replace. */
5410 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5411 if (REG_P (op0)
5412 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5413 && reg_renumber[regno] < 0
5414 && reg_equiv_constant (regno) != 0)
5415 op0 = reg_equiv_constant (regno);
5416 else if (REG_P (op1)
5417 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5418 && reg_renumber[regno] < 0
5419 && reg_equiv_constant (regno) != 0)
5420 op1 = reg_equiv_constant (regno);
5421 else if (GET_CODE (op0) == PLUS
5422 && (tem = subst_indexed_address (op0)) != op0)
5423 op0 = tem;
5424 else if (GET_CODE (op1) == PLUS
5425 && (tem = subst_indexed_address (op1)) != op1)
5426 op1 = tem;
5427 else
5428 return addr;
5430 /* Pick out up to three things to add. */
5431 if (GET_CODE (op1) == PLUS)
5432 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5433 else if (GET_CODE (op0) == PLUS)
5434 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5436 /* Compute the sum. */
5437 if (op2 != 0)
5438 op1 = form_sum (GET_MODE (addr), op1, op2);
5439 if (op1 != 0)
5440 op0 = form_sum (GET_MODE (addr), op0, op1);
5442 return op0;
5444 return addr;
5447 /* Update the REG_INC notes for an insn. It updates all REG_INC
5448 notes for the instruction which refer to REGNO the to refer
5449 to the reload number.
5451 INSN is the insn for which any REG_INC notes need updating.
5453 REGNO is the register number which has been reloaded.
5455 RELOADNUM is the reload number. */
5457 static void
5458 update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5459 int reloadnum ATTRIBUTE_UNUSED)
5461 #ifdef AUTO_INC_DEC
5462 rtx link;
5464 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5465 if (REG_NOTE_KIND (link) == REG_INC
5466 && (int) REGNO (XEXP (link, 0)) == regno)
5467 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5468 #endif
5471 /* Record the pseudo registers we must reload into hard registers in a
5472 subexpression of a would-be memory address, X referring to a value
5473 in mode MODE. (This function is not called if the address we find
5474 is strictly valid.)
5476 CONTEXT = 1 means we are considering regs as index regs,
5477 = 0 means we are considering them as base regs.
5478 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5479 or an autoinc code.
5480 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5481 is the code of the index part of the address. Otherwise, pass SCRATCH
5482 for this argument.
5483 OPNUM and TYPE specify the purpose of any reloads made.
5485 IND_LEVELS says how many levels of indirect addressing are
5486 supported at this point in the address.
5488 INSN, if nonzero, is the insn in which we do the reload. It is used
5489 to determine if we may generate output reloads.
5491 We return nonzero if X, as a whole, is reloaded or replaced. */
5493 /* Note that we take shortcuts assuming that no multi-reg machine mode
5494 occurs as part of an address.
5495 Also, this is not fully machine-customizable; it works for machines
5496 such as VAXen and 68000's and 32000's, but other possible machines
5497 could have addressing modes that this does not handle right.
5498 If you add push_reload calls here, you need to make sure gen_reload
5499 handles those cases gracefully. */
5501 static int
5502 find_reloads_address_1 (enum machine_mode mode, addr_space_t as,
5503 rtx x, int context,
5504 enum rtx_code outer_code, enum rtx_code index_code,
5505 rtx *loc, int opnum, enum reload_type type,
5506 int ind_levels, rtx_insn *insn)
5508 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5509 ((CONTEXT) == 0 \
5510 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5511 : REGNO_OK_FOR_INDEX_P (REGNO))
5513 enum reg_class context_reg_class;
5514 RTX_CODE code = GET_CODE (x);
5515 bool reloaded_inner_of_autoinc = false;
5517 if (context == 1)
5518 context_reg_class = INDEX_REG_CLASS;
5519 else
5520 context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5522 switch (code)
5524 case PLUS:
5526 rtx orig_op0 = XEXP (x, 0);
5527 rtx orig_op1 = XEXP (x, 1);
5528 RTX_CODE code0 = GET_CODE (orig_op0);
5529 RTX_CODE code1 = GET_CODE (orig_op1);
5530 rtx op0 = orig_op0;
5531 rtx op1 = orig_op1;
5533 if (GET_CODE (op0) == SUBREG)
5535 op0 = SUBREG_REG (op0);
5536 code0 = GET_CODE (op0);
5537 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5538 op0 = gen_rtx_REG (word_mode,
5539 (REGNO (op0) +
5540 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5541 GET_MODE (SUBREG_REG (orig_op0)),
5542 SUBREG_BYTE (orig_op0),
5543 GET_MODE (orig_op0))));
5546 if (GET_CODE (op1) == SUBREG)
5548 op1 = SUBREG_REG (op1);
5549 code1 = GET_CODE (op1);
5550 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5551 /* ??? Why is this given op1's mode and above for
5552 ??? op0 SUBREGs we use word_mode? */
5553 op1 = gen_rtx_REG (GET_MODE (op1),
5554 (REGNO (op1) +
5555 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5556 GET_MODE (SUBREG_REG (orig_op1)),
5557 SUBREG_BYTE (orig_op1),
5558 GET_MODE (orig_op1))));
5560 /* Plus in the index register may be created only as a result of
5561 register rematerialization for expression like &localvar*4. Reload it.
5562 It may be possible to combine the displacement on the outer level,
5563 but it is probably not worthwhile to do so. */
5564 if (context == 1)
5566 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5567 opnum, ADDR_TYPE (type), ind_levels, insn);
5568 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5569 context_reg_class,
5570 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5571 return 1;
5574 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5575 || code0 == ZERO_EXTEND || code1 == MEM)
5577 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5578 &XEXP (x, 0), opnum, type, ind_levels,
5579 insn);
5580 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5581 &XEXP (x, 1), opnum, type, ind_levels,
5582 insn);
5585 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5586 || code1 == ZERO_EXTEND || code0 == MEM)
5588 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5589 &XEXP (x, 0), opnum, type, ind_levels,
5590 insn);
5591 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5592 &XEXP (x, 1), opnum, type, ind_levels,
5593 insn);
5596 else if (code0 == CONST_INT || code0 == CONST
5597 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5598 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5599 &XEXP (x, 1), opnum, type, ind_levels,
5600 insn);
5602 else if (code1 == CONST_INT || code1 == CONST
5603 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5604 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5605 &XEXP (x, 0), opnum, type, ind_levels,
5606 insn);
5608 else if (code0 == REG && code1 == REG)
5610 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5611 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5612 return 0;
5613 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5614 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5615 return 0;
5616 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5617 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5618 &XEXP (x, 1), opnum, type, ind_levels,
5619 insn);
5620 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5621 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5622 &XEXP (x, 0), opnum, type, ind_levels,
5623 insn);
5624 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5625 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5626 &XEXP (x, 0), opnum, type, ind_levels,
5627 insn);
5628 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5629 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5630 &XEXP (x, 1), opnum, type, ind_levels,
5631 insn);
5632 else
5634 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5635 &XEXP (x, 0), opnum, type, ind_levels,
5636 insn);
5637 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5638 &XEXP (x, 1), opnum, type, ind_levels,
5639 insn);
5643 else if (code0 == REG)
5645 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5646 &XEXP (x, 0), opnum, type, ind_levels,
5647 insn);
5648 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5649 &XEXP (x, 1), opnum, type, ind_levels,
5650 insn);
5653 else if (code1 == REG)
5655 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5656 &XEXP (x, 1), opnum, type, ind_levels,
5657 insn);
5658 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5659 &XEXP (x, 0), opnum, type, ind_levels,
5660 insn);
5664 return 0;
5666 case POST_MODIFY:
5667 case PRE_MODIFY:
5669 rtx op0 = XEXP (x, 0);
5670 rtx op1 = XEXP (x, 1);
5671 enum rtx_code index_code;
5672 int regno;
5673 int reloadnum;
5675 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5676 return 0;
5678 /* Currently, we only support {PRE,POST}_MODIFY constructs
5679 where a base register is {inc,dec}remented by the contents
5680 of another register or by a constant value. Thus, these
5681 operands must match. */
5682 gcc_assert (op0 == XEXP (op1, 0));
5684 /* Require index register (or constant). Let's just handle the
5685 register case in the meantime... If the target allows
5686 auto-modify by a constant then we could try replacing a pseudo
5687 register with its equivalent constant where applicable.
5689 We also handle the case where the register was eliminated
5690 resulting in a PLUS subexpression.
5692 If we later decide to reload the whole PRE_MODIFY or
5693 POST_MODIFY, inc_for_reload might clobber the reload register
5694 before reading the index. The index register might therefore
5695 need to live longer than a TYPE reload normally would, so be
5696 conservative and class it as RELOAD_OTHER. */
5697 if ((REG_P (XEXP (op1, 1))
5698 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5699 || GET_CODE (XEXP (op1, 1)) == PLUS)
5700 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5701 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5702 ind_levels, insn);
5704 gcc_assert (REG_P (XEXP (op1, 0)));
5706 regno = REGNO (XEXP (op1, 0));
5707 index_code = GET_CODE (XEXP (op1, 1));
5709 /* A register that is incremented cannot be constant! */
5710 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5711 || reg_equiv_constant (regno) == 0);
5713 /* Handle a register that is equivalent to a memory location
5714 which cannot be addressed directly. */
5715 if (reg_equiv_memory_loc (regno) != 0
5716 && (reg_equiv_address (regno) != 0
5717 || num_not_at_initial_offset))
5719 rtx tem = make_memloc (XEXP (x, 0), regno);
5721 if (reg_equiv_address (regno)
5722 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5724 rtx orig = tem;
5726 /* First reload the memory location's address.
5727 We can't use ADDR_TYPE (type) here, because we need to
5728 write back the value after reading it, hence we actually
5729 need two registers. */
5730 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5731 &XEXP (tem, 0), opnum,
5732 RELOAD_OTHER,
5733 ind_levels, insn);
5735 if (!rtx_equal_p (tem, orig))
5736 push_reg_equiv_alt_mem (regno, tem);
5738 /* Then reload the memory location into a base
5739 register. */
5740 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5741 &XEXP (op1, 0),
5742 base_reg_class (mode, as,
5743 code, index_code),
5744 GET_MODE (x), GET_MODE (x), 0,
5745 0, opnum, RELOAD_OTHER);
5747 update_auto_inc_notes (this_insn, regno, reloadnum);
5748 return 0;
5752 if (reg_renumber[regno] >= 0)
5753 regno = reg_renumber[regno];
5755 /* We require a base register here... */
5756 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5758 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5759 &XEXP (op1, 0), &XEXP (x, 0),
5760 base_reg_class (mode, as,
5761 code, index_code),
5762 GET_MODE (x), GET_MODE (x), 0, 0,
5763 opnum, RELOAD_OTHER);
5765 update_auto_inc_notes (this_insn, regno, reloadnum);
5766 return 0;
5769 return 0;
5771 case POST_INC:
5772 case POST_DEC:
5773 case PRE_INC:
5774 case PRE_DEC:
5775 if (REG_P (XEXP (x, 0)))
5777 int regno = REGNO (XEXP (x, 0));
5778 int value = 0;
5779 rtx x_orig = x;
5781 /* A register that is incremented cannot be constant! */
5782 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5783 || reg_equiv_constant (regno) == 0);
5785 /* Handle a register that is equivalent to a memory location
5786 which cannot be addressed directly. */
5787 if (reg_equiv_memory_loc (regno) != 0
5788 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5790 rtx tem = make_memloc (XEXP (x, 0), regno);
5791 if (reg_equiv_address (regno)
5792 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5794 rtx orig = tem;
5796 /* First reload the memory location's address.
5797 We can't use ADDR_TYPE (type) here, because we need to
5798 write back the value after reading it, hence we actually
5799 need two registers. */
5800 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5801 &XEXP (tem, 0), opnum, type,
5802 ind_levels, insn);
5803 reloaded_inner_of_autoinc = true;
5804 if (!rtx_equal_p (tem, orig))
5805 push_reg_equiv_alt_mem (regno, tem);
5806 /* Put this inside a new increment-expression. */
5807 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5808 /* Proceed to reload that, as if it contained a register. */
5812 /* If we have a hard register that is ok in this incdec context,
5813 don't make a reload. If the register isn't nice enough for
5814 autoincdec, we can reload it. But, if an autoincrement of a
5815 register that we here verified as playing nice, still outside
5816 isn't "valid", it must be that no autoincrement is "valid".
5817 If that is true and something made an autoincrement anyway,
5818 this must be a special context where one is allowed.
5819 (For example, a "push" instruction.)
5820 We can't improve this address, so leave it alone. */
5822 /* Otherwise, reload the autoincrement into a suitable hard reg
5823 and record how much to increment by. */
5825 if (reg_renumber[regno] >= 0)
5826 regno = reg_renumber[regno];
5827 if (regno >= FIRST_PSEUDO_REGISTER
5828 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5829 index_code))
5831 int reloadnum;
5833 /* If we can output the register afterwards, do so, this
5834 saves the extra update.
5835 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5836 CALL_INSN - and it does not set CC0.
5837 But don't do this if we cannot directly address the
5838 memory location, since this will make it harder to
5839 reuse address reloads, and increases register pressure.
5840 Also don't do this if we can probably update x directly. */
5841 rtx equiv = (MEM_P (XEXP (x, 0))
5842 ? XEXP (x, 0)
5843 : reg_equiv_mem (regno));
5844 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5845 if (insn && NONJUMP_INSN_P (insn) && equiv
5846 && memory_operand (equiv, GET_MODE (equiv))
5847 #ifdef HAVE_cc0
5848 && ! sets_cc0_p (PATTERN (insn))
5849 #endif
5850 && ! (icode != CODE_FOR_nothing
5851 && insn_operand_matches (icode, 0, equiv)
5852 && insn_operand_matches (icode, 1, equiv))
5853 /* Using RELOAD_OTHER means we emit this and the reload we
5854 made earlier in the wrong order. */
5855 && !reloaded_inner_of_autoinc)
5857 /* We use the original pseudo for loc, so that
5858 emit_reload_insns() knows which pseudo this
5859 reload refers to and updates the pseudo rtx, not
5860 its equivalent memory location, as well as the
5861 corresponding entry in reg_last_reload_reg. */
5862 loc = &XEXP (x_orig, 0);
5863 x = XEXP (x, 0);
5864 reloadnum
5865 = push_reload (x, x, loc, loc,
5866 context_reg_class,
5867 GET_MODE (x), GET_MODE (x), 0, 0,
5868 opnum, RELOAD_OTHER);
5870 else
5872 reloadnum
5873 = push_reload (x, x, loc, (rtx*) 0,
5874 context_reg_class,
5875 GET_MODE (x), GET_MODE (x), 0, 0,
5876 opnum, type);
5877 rld[reloadnum].inc
5878 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5880 value = 1;
5883 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5884 reloadnum);
5886 return value;
5888 return 0;
5890 case TRUNCATE:
5891 case SIGN_EXTEND:
5892 case ZERO_EXTEND:
5893 /* Look for parts to reload in the inner expression and reload them
5894 too, in addition to this operation. Reloading all inner parts in
5895 addition to this one shouldn't be necessary, but at this point,
5896 we don't know if we can possibly omit any part that *can* be
5897 reloaded. Targets that are better off reloading just either part
5898 (or perhaps even a different part of an outer expression), should
5899 define LEGITIMIZE_RELOAD_ADDRESS. */
5900 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5901 context, code, SCRATCH, &XEXP (x, 0), opnum,
5902 type, ind_levels, insn);
5903 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5904 context_reg_class,
5905 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5906 return 1;
5908 case MEM:
5909 /* This is probably the result of a substitution, by eliminate_regs, of
5910 an equivalent address for a pseudo that was not allocated to a hard
5911 register. Verify that the specified address is valid and reload it
5912 into a register.
5914 Since we know we are going to reload this item, don't decrement for
5915 the indirection level.
5917 Note that this is actually conservative: it would be slightly more
5918 efficient to use the value of SPILL_INDIRECT_LEVELS from
5919 reload1.c here. */
5921 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5922 opnum, ADDR_TYPE (type), ind_levels, insn);
5923 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5924 context_reg_class,
5925 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5926 return 1;
5928 case REG:
5930 int regno = REGNO (x);
5932 if (reg_equiv_constant (regno) != 0)
5934 find_reloads_address_part (reg_equiv_constant (regno), loc,
5935 context_reg_class,
5936 GET_MODE (x), opnum, type, ind_levels);
5937 return 1;
5940 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5941 that feeds this insn. */
5942 if (reg_equiv_mem (regno) != 0)
5944 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5945 context_reg_class,
5946 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5947 return 1;
5949 #endif
5951 if (reg_equiv_memory_loc (regno)
5952 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5954 rtx tem = make_memloc (x, regno);
5955 if (reg_equiv_address (regno) != 0
5956 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5958 x = tem;
5959 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5960 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5961 ind_levels, insn);
5962 if (!rtx_equal_p (x, tem))
5963 push_reg_equiv_alt_mem (regno, x);
5967 if (reg_renumber[regno] >= 0)
5968 regno = reg_renumber[regno];
5970 if (regno >= FIRST_PSEUDO_REGISTER
5971 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5972 index_code))
5974 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5975 context_reg_class,
5976 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5977 return 1;
5980 /* If a register appearing in an address is the subject of a CLOBBER
5981 in this insn, reload it into some other register to be safe.
5982 The CLOBBER is supposed to make the register unavailable
5983 from before this insn to after it. */
5984 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5986 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5987 context_reg_class,
5988 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5989 return 1;
5992 return 0;
5994 case SUBREG:
5995 if (REG_P (SUBREG_REG (x)))
5997 /* If this is a SUBREG of a hard register and the resulting register
5998 is of the wrong class, reload the whole SUBREG. This avoids
5999 needless copies if SUBREG_REG is multi-word. */
6000 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6002 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
6004 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6005 index_code))
6007 push_reload (x, NULL_RTX, loc, (rtx*) 0,
6008 context_reg_class,
6009 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6010 return 1;
6013 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6014 is larger than the class size, then reload the whole SUBREG. */
6015 else
6017 enum reg_class rclass = context_reg_class;
6018 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6019 > reg_class_size[(int) rclass])
6021 /* If the inner register will be replaced by a memory
6022 reference, we can do this only if we can replace the
6023 whole subreg by a (narrower) memory reference. If
6024 this is not possible, fall through and reload just
6025 the inner register (including address reloads). */
6026 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6028 rtx tem = find_reloads_subreg_address (x, opnum,
6029 ADDR_TYPE (type),
6030 ind_levels, insn,
6031 NULL);
6032 if (tem)
6034 push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6035 GET_MODE (tem), VOIDmode, 0, 0,
6036 opnum, type);
6037 return 1;
6040 else
6042 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6043 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6044 return 1;
6049 break;
6051 default:
6052 break;
6056 const char *fmt = GET_RTX_FORMAT (code);
6057 int i;
6059 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6061 if (fmt[i] == 'e')
6062 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6063 we get here. */
6064 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6065 code, SCRATCH, &XEXP (x, i),
6066 opnum, type, ind_levels, insn);
6070 #undef REG_OK_FOR_CONTEXT
6071 return 0;
6074 /* X, which is found at *LOC, is a part of an address that needs to be
6075 reloaded into a register of class RCLASS. If X is a constant, or if
6076 X is a PLUS that contains a constant, check that the constant is a
6077 legitimate operand and that we are supposed to be able to load
6078 it into the register.
6080 If not, force the constant into memory and reload the MEM instead.
6082 MODE is the mode to use, in case X is an integer constant.
6084 OPNUM and TYPE describe the purpose of any reloads made.
6086 IND_LEVELS says how many levels of indirect addressing this machine
6087 supports. */
6089 static void
6090 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6091 enum machine_mode mode, int opnum,
6092 enum reload_type type, int ind_levels)
6094 if (CONSTANT_P (x)
6095 && (!targetm.legitimate_constant_p (mode, x)
6096 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6098 x = force_const_mem (mode, x);
6099 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6100 opnum, type, ind_levels, 0);
6103 else if (GET_CODE (x) == PLUS
6104 && CONSTANT_P (XEXP (x, 1))
6105 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6106 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6107 == NO_REGS))
6109 rtx tem;
6111 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6112 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6113 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6114 opnum, type, ind_levels, 0);
6117 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6118 mode, VOIDmode, 0, 0, opnum, type);
6121 /* X, a subreg of a pseudo, is a part of an address that needs to be
6122 reloaded, and the pseusdo is equivalent to a memory location.
6124 Attempt to replace the whole subreg by a (possibly narrower or wider)
6125 memory reference. If this is possible, return this new memory
6126 reference, and push all required address reloads. Otherwise,
6127 return NULL.
6129 OPNUM and TYPE identify the purpose of the reload.
6131 IND_LEVELS says how many levels of indirect addressing are
6132 supported at this point in the address.
6134 INSN, if nonzero, is the insn in which we do the reload. It is used
6135 to determine where to put USEs for pseudos that we have to replace with
6136 stack slots. */
6138 static rtx
6139 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6140 int ind_levels, rtx_insn *insn,
6141 int *address_reloaded)
6143 enum machine_mode outer_mode = GET_MODE (x);
6144 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6145 int regno = REGNO (SUBREG_REG (x));
6146 int reloaded = 0;
6147 rtx tem, orig;
6148 int offset;
6150 gcc_assert (reg_equiv_memory_loc (regno) != 0);
6152 /* We cannot replace the subreg with a modified memory reference if:
6154 - we have a paradoxical subreg that implicitly acts as a zero or
6155 sign extension operation due to LOAD_EXTEND_OP;
6157 - we have a subreg that is implicitly supposed to act on the full
6158 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6160 - the address of the equivalent memory location is mode-dependent; or
6162 - we have a paradoxical subreg and the resulting memory is not
6163 sufficiently aligned to allow access in the wider mode.
6165 In addition, we choose not to perform the replacement for *any*
6166 paradoxical subreg, even if it were possible in principle. This
6167 is to avoid generating wider memory references than necessary.
6169 This corresponds to how previous versions of reload used to handle
6170 paradoxical subregs where no address reload was required. */
6172 if (paradoxical_subreg_p (x))
6173 return NULL;
6175 #ifdef WORD_REGISTER_OPERATIONS
6176 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode)
6177 && ((GET_MODE_SIZE (outer_mode) - 1) / UNITS_PER_WORD
6178 == (GET_MODE_SIZE (inner_mode) - 1) / UNITS_PER_WORD))
6179 return NULL;
6180 #endif
6182 /* Since we don't attempt to handle paradoxical subregs, we can just
6183 call into simplify_subreg, which will handle all remaining checks
6184 for us. */
6185 orig = make_memloc (SUBREG_REG (x), regno);
6186 offset = SUBREG_BYTE (x);
6187 tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6188 if (!tem || !MEM_P (tem))
6189 return NULL;
6191 /* Now push all required address reloads, if any. */
6192 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6193 XEXP (tem, 0), &XEXP (tem, 0),
6194 opnum, type, ind_levels, insn);
6195 /* ??? Do we need to handle nonzero offsets somehow? */
6196 if (!offset && !rtx_equal_p (tem, orig))
6197 push_reg_equiv_alt_mem (regno, tem);
6199 /* For some processors an address may be valid in the original mode but
6200 not in a smaller mode. For example, ARM accepts a scaled index register
6201 in SImode but not in HImode. Note that this is only a problem if the
6202 address in reg_equiv_mem is already invalid in the new mode; other
6203 cases would be fixed by find_reloads_address as usual.
6205 ??? We attempt to handle such cases here by doing an additional reload
6206 of the full address after the usual processing by find_reloads_address.
6207 Note that this may not work in the general case, but it seems to cover
6208 the cases where this situation currently occurs. A more general fix
6209 might be to reload the *value* instead of the address, but this would
6210 not be expected by the callers of this routine as-is.
6212 If find_reloads_address already completed replaced the address, there
6213 is nothing further to do. */
6214 if (reloaded == 0
6215 && reg_equiv_mem (regno) != 0
6216 && !strict_memory_address_addr_space_p
6217 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6218 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6220 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6221 base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6222 MEM, SCRATCH),
6223 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6224 reloaded = 1;
6227 /* If this is not a toplevel operand, find_reloads doesn't see this
6228 substitution. We have to emit a USE of the pseudo so that
6229 delete_output_reload can see it. */
6230 if (replace_reloads && recog_data.operand[opnum] != x)
6231 /* We mark the USE with QImode so that we recognize it as one that
6232 can be safely deleted at the end of reload. */
6233 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6234 QImode);
6236 if (address_reloaded)
6237 *address_reloaded = reloaded;
6239 return tem;
6242 /* Substitute into the current INSN the registers into which we have reloaded
6243 the things that need reloading. The array `replacements'
6244 contains the locations of all pointers that must be changed
6245 and says what to replace them with.
6247 Return the rtx that X translates into; usually X, but modified. */
6249 void
6250 subst_reloads (rtx_insn *insn)
6252 int i;
6254 for (i = 0; i < n_replacements; i++)
6256 struct replacement *r = &replacements[i];
6257 rtx reloadreg = rld[r->what].reg_rtx;
6258 if (reloadreg)
6260 #ifdef DEBUG_RELOAD
6261 /* This checking takes a very long time on some platforms
6262 causing the gcc.c-torture/compile/limits-fnargs.c test
6263 to time out during testing. See PR 31850.
6265 Internal consistency test. Check that we don't modify
6266 anything in the equivalence arrays. Whenever something from
6267 those arrays needs to be reloaded, it must be unshared before
6268 being substituted into; the equivalence must not be modified.
6269 Otherwise, if the equivalence is used after that, it will
6270 have been modified, and the thing substituted (probably a
6271 register) is likely overwritten and not a usable equivalence. */
6272 int check_regno;
6274 for (check_regno = 0; check_regno < max_regno; check_regno++)
6276 #define CHECK_MODF(ARRAY) \
6277 gcc_assert (!(*reg_equivs)[check_regno].ARRAY \
6278 || !loc_mentioned_in_p (r->where, \
6279 (*reg_equivs)[check_regno].ARRAY))
6281 CHECK_MODF (constant);
6282 CHECK_MODF (memory_loc);
6283 CHECK_MODF (address);
6284 CHECK_MODF (mem);
6285 #undef CHECK_MODF
6287 #endif /* DEBUG_RELOAD */
6289 /* If we're replacing a LABEL_REF with a register, there must
6290 already be an indication (to e.g. flow) which label this
6291 register refers to. */
6292 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6293 || !JUMP_P (insn)
6294 || find_reg_note (insn,
6295 REG_LABEL_OPERAND,
6296 XEXP (*r->where, 0))
6297 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6299 /* Encapsulate RELOADREG so its machine mode matches what
6300 used to be there. Note that gen_lowpart_common will
6301 do the wrong thing if RELOADREG is multi-word. RELOADREG
6302 will always be a REG here. */
6303 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6304 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6306 *r->where = reloadreg;
6308 /* If reload got no reg and isn't optional, something's wrong. */
6309 else
6310 gcc_assert (rld[r->what].optional);
6314 /* Make a copy of any replacements being done into X and move those
6315 copies to locations in Y, a copy of X. */
6317 void
6318 copy_replacements (rtx x, rtx y)
6320 copy_replacements_1 (&x, &y, n_replacements);
6323 static void
6324 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6326 int i, j;
6327 rtx x, y;
6328 struct replacement *r;
6329 enum rtx_code code;
6330 const char *fmt;
6332 for (j = 0; j < orig_replacements; j++)
6333 if (replacements[j].where == px)
6335 r = &replacements[n_replacements++];
6336 r->where = py;
6337 r->what = replacements[j].what;
6338 r->mode = replacements[j].mode;
6341 x = *px;
6342 y = *py;
6343 code = GET_CODE (x);
6344 fmt = GET_RTX_FORMAT (code);
6346 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6348 if (fmt[i] == 'e')
6349 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6350 else if (fmt[i] == 'E')
6351 for (j = XVECLEN (x, i); --j >= 0; )
6352 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6353 orig_replacements);
6357 /* Change any replacements being done to *X to be done to *Y. */
6359 void
6360 move_replacements (rtx *x, rtx *y)
6362 int i;
6364 for (i = 0; i < n_replacements; i++)
6365 if (replacements[i].where == x)
6366 replacements[i].where = y;
6369 /* If LOC was scheduled to be replaced by something, return the replacement.
6370 Otherwise, return *LOC. */
6373 find_replacement (rtx *loc)
6375 struct replacement *r;
6377 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6379 rtx reloadreg = rld[r->what].reg_rtx;
6381 if (reloadreg && r->where == loc)
6383 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6384 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6386 return reloadreg;
6388 else if (reloadreg && GET_CODE (*loc) == SUBREG
6389 && r->where == &SUBREG_REG (*loc))
6391 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6392 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6394 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6395 GET_MODE (SUBREG_REG (*loc)),
6396 SUBREG_BYTE (*loc));
6400 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6401 what's inside and make a new rtl if so. */
6402 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6403 || GET_CODE (*loc) == MULT)
6405 rtx x = find_replacement (&XEXP (*loc, 0));
6406 rtx y = find_replacement (&XEXP (*loc, 1));
6408 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6409 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6412 return *loc;
6415 /* Return nonzero if register in range [REGNO, ENDREGNO)
6416 appears either explicitly or implicitly in X
6417 other than being stored into (except for earlyclobber operands).
6419 References contained within the substructure at LOC do not count.
6420 LOC may be zero, meaning don't ignore anything.
6422 This is similar to refers_to_regno_p in rtlanal.c except that we
6423 look at equivalences for pseudos that didn't get hard registers. */
6425 static int
6426 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6427 rtx x, rtx *loc)
6429 int i;
6430 unsigned int r;
6431 RTX_CODE code;
6432 const char *fmt;
6434 if (x == 0)
6435 return 0;
6437 repeat:
6438 code = GET_CODE (x);
6440 switch (code)
6442 case REG:
6443 r = REGNO (x);
6445 /* If this is a pseudo, a hard register must not have been allocated.
6446 X must therefore either be a constant or be in memory. */
6447 if (r >= FIRST_PSEUDO_REGISTER)
6449 if (reg_equiv_memory_loc (r))
6450 return refers_to_regno_for_reload_p (regno, endregno,
6451 reg_equiv_memory_loc (r),
6452 (rtx*) 0);
6454 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6455 return 0;
6458 return (endregno > r
6459 && regno < r + (r < FIRST_PSEUDO_REGISTER
6460 ? hard_regno_nregs[r][GET_MODE (x)]
6461 : 1));
6463 case SUBREG:
6464 /* If this is a SUBREG of a hard reg, we can see exactly which
6465 registers are being modified. Otherwise, handle normally. */
6466 if (REG_P (SUBREG_REG (x))
6467 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6469 unsigned int inner_regno = subreg_regno (x);
6470 unsigned int inner_endregno
6471 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6472 ? subreg_nregs (x) : 1);
6474 return endregno > inner_regno && regno < inner_endregno;
6476 break;
6478 case CLOBBER:
6479 case SET:
6480 if (&SET_DEST (x) != loc
6481 /* Note setting a SUBREG counts as referring to the REG it is in for
6482 a pseudo but not for hard registers since we can
6483 treat each word individually. */
6484 && ((GET_CODE (SET_DEST (x)) == SUBREG
6485 && loc != &SUBREG_REG (SET_DEST (x))
6486 && REG_P (SUBREG_REG (SET_DEST (x)))
6487 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6488 && refers_to_regno_for_reload_p (regno, endregno,
6489 SUBREG_REG (SET_DEST (x)),
6490 loc))
6491 /* If the output is an earlyclobber operand, this is
6492 a conflict. */
6493 || ((!REG_P (SET_DEST (x))
6494 || earlyclobber_operand_p (SET_DEST (x)))
6495 && refers_to_regno_for_reload_p (regno, endregno,
6496 SET_DEST (x), loc))))
6497 return 1;
6499 if (code == CLOBBER || loc == &SET_SRC (x))
6500 return 0;
6501 x = SET_SRC (x);
6502 goto repeat;
6504 default:
6505 break;
6508 /* X does not match, so try its subexpressions. */
6510 fmt = GET_RTX_FORMAT (code);
6511 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6513 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6515 if (i == 0)
6517 x = XEXP (x, 0);
6518 goto repeat;
6520 else
6521 if (refers_to_regno_for_reload_p (regno, endregno,
6522 XEXP (x, i), loc))
6523 return 1;
6525 else if (fmt[i] == 'E')
6527 int j;
6528 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6529 if (loc != &XVECEXP (x, i, j)
6530 && refers_to_regno_for_reload_p (regno, endregno,
6531 XVECEXP (x, i, j), loc))
6532 return 1;
6535 return 0;
6538 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6539 we check if any register number in X conflicts with the relevant register
6540 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6541 contains a MEM (we don't bother checking for memory addresses that can't
6542 conflict because we expect this to be a rare case.
6544 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6545 that we look at equivalences for pseudos that didn't get hard registers. */
6548 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6550 int regno, endregno;
6552 /* Overly conservative. */
6553 if (GET_CODE (x) == STRICT_LOW_PART
6554 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6555 x = XEXP (x, 0);
6557 /* If either argument is a constant, then modifying X can not affect IN. */
6558 if (CONSTANT_P (x) || CONSTANT_P (in))
6559 return 0;
6560 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6561 return refers_to_mem_for_reload_p (in);
6562 else if (GET_CODE (x) == SUBREG)
6564 regno = REGNO (SUBREG_REG (x));
6565 if (regno < FIRST_PSEUDO_REGISTER)
6566 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6567 GET_MODE (SUBREG_REG (x)),
6568 SUBREG_BYTE (x),
6569 GET_MODE (x));
6570 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6571 ? subreg_nregs (x) : 1);
6573 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6575 else if (REG_P (x))
6577 regno = REGNO (x);
6579 /* If this is a pseudo, it must not have been assigned a hard register.
6580 Therefore, it must either be in memory or be a constant. */
6582 if (regno >= FIRST_PSEUDO_REGISTER)
6584 if (reg_equiv_memory_loc (regno))
6585 return refers_to_mem_for_reload_p (in);
6586 gcc_assert (reg_equiv_constant (regno));
6587 return 0;
6590 endregno = END_HARD_REGNO (x);
6592 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6594 else if (MEM_P (x))
6595 return refers_to_mem_for_reload_p (in);
6596 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6597 || GET_CODE (x) == CC0)
6598 return reg_mentioned_p (x, in);
6599 else
6601 gcc_assert (GET_CODE (x) == PLUS);
6603 /* We actually want to know if X is mentioned somewhere inside IN.
6604 We must not say that (plus (sp) (const_int 124)) is in
6605 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6606 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6607 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6608 while (MEM_P (in))
6609 in = XEXP (in, 0);
6610 if (REG_P (in))
6611 return 0;
6612 else if (GET_CODE (in) == PLUS)
6613 return (rtx_equal_p (x, in)
6614 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6615 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6616 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6617 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6620 gcc_unreachable ();
6623 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6624 registers. */
6626 static int
6627 refers_to_mem_for_reload_p (rtx x)
6629 const char *fmt;
6630 int i;
6632 if (MEM_P (x))
6633 return 1;
6635 if (REG_P (x))
6636 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6637 && reg_equiv_memory_loc (REGNO (x)));
6639 fmt = GET_RTX_FORMAT (GET_CODE (x));
6640 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6641 if (fmt[i] == 'e'
6642 && (MEM_P (XEXP (x, i))
6643 || refers_to_mem_for_reload_p (XEXP (x, i))))
6644 return 1;
6646 return 0;
6649 /* Check the insns before INSN to see if there is a suitable register
6650 containing the same value as GOAL.
6651 If OTHER is -1, look for a register in class RCLASS.
6652 Otherwise, just see if register number OTHER shares GOAL's value.
6654 Return an rtx for the register found, or zero if none is found.
6656 If RELOAD_REG_P is (short *)1,
6657 we reject any hard reg that appears in reload_reg_rtx
6658 because such a hard reg is also needed coming into this insn.
6660 If RELOAD_REG_P is any other nonzero value,
6661 it is a vector indexed by hard reg number
6662 and we reject any hard reg whose element in the vector is nonnegative
6663 as well as any that appears in reload_reg_rtx.
6665 If GOAL is zero, then GOALREG is a register number; we look
6666 for an equivalent for that register.
6668 MODE is the machine mode of the value we want an equivalence for.
6669 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6671 This function is used by jump.c as well as in the reload pass.
6673 If GOAL is the sum of the stack pointer and a constant, we treat it
6674 as if it were a constant except that sp is required to be unchanging. */
6677 find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6678 short *reload_reg_p, int goalreg, enum machine_mode mode)
6680 rtx_insn *p = insn;
6681 rtx goaltry, valtry, value;
6682 rtx_insn *where;
6683 rtx pat;
6684 int regno = -1;
6685 int valueno;
6686 int goal_mem = 0;
6687 int goal_const = 0;
6688 int goal_mem_addr_varies = 0;
6689 int need_stable_sp = 0;
6690 int nregs;
6691 int valuenregs;
6692 int num = 0;
6694 if (goal == 0)
6695 regno = goalreg;
6696 else if (REG_P (goal))
6697 regno = REGNO (goal);
6698 else if (MEM_P (goal))
6700 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6701 if (MEM_VOLATILE_P (goal))
6702 return 0;
6703 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6704 return 0;
6705 /* An address with side effects must be reexecuted. */
6706 switch (code)
6708 case POST_INC:
6709 case PRE_INC:
6710 case POST_DEC:
6711 case PRE_DEC:
6712 case POST_MODIFY:
6713 case PRE_MODIFY:
6714 return 0;
6715 default:
6716 break;
6718 goal_mem = 1;
6720 else if (CONSTANT_P (goal))
6721 goal_const = 1;
6722 else if (GET_CODE (goal) == PLUS
6723 && XEXP (goal, 0) == stack_pointer_rtx
6724 && CONSTANT_P (XEXP (goal, 1)))
6725 goal_const = need_stable_sp = 1;
6726 else if (GET_CODE (goal) == PLUS
6727 && XEXP (goal, 0) == frame_pointer_rtx
6728 && CONSTANT_P (XEXP (goal, 1)))
6729 goal_const = 1;
6730 else
6731 return 0;
6733 num = 0;
6734 /* Scan insns back from INSN, looking for one that copies
6735 a value into or out of GOAL.
6736 Stop and give up if we reach a label. */
6738 while (1)
6740 p = PREV_INSN (p);
6741 if (p && DEBUG_INSN_P (p))
6742 continue;
6743 num++;
6744 if (p == 0 || LABEL_P (p)
6745 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6746 return 0;
6748 /* Don't reuse register contents from before a setjmp-type
6749 function call; on the second return (from the longjmp) it
6750 might have been clobbered by a later reuse. It doesn't
6751 seem worthwhile to actually go and see if it is actually
6752 reused even if that information would be readily available;
6753 just don't reuse it across the setjmp call. */
6754 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6755 return 0;
6757 if (NONJUMP_INSN_P (p)
6758 /* If we don't want spill regs ... */
6759 && (! (reload_reg_p != 0
6760 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6761 /* ... then ignore insns introduced by reload; they aren't
6762 useful and can cause results in reload_as_needed to be
6763 different from what they were when calculating the need for
6764 spills. If we notice an input-reload insn here, we will
6765 reject it below, but it might hide a usable equivalent.
6766 That makes bad code. It may even fail: perhaps no reg was
6767 spilled for this insn because it was assumed we would find
6768 that equivalent. */
6769 || INSN_UID (p) < reload_first_uid))
6771 rtx tem;
6772 pat = single_set (p);
6774 /* First check for something that sets some reg equal to GOAL. */
6775 if (pat != 0
6776 && ((regno >= 0
6777 && true_regnum (SET_SRC (pat)) == regno
6778 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6780 (regno >= 0
6781 && true_regnum (SET_DEST (pat)) == regno
6782 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6784 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6785 /* When looking for stack pointer + const,
6786 make sure we don't use a stack adjust. */
6787 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6788 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6789 || (goal_mem
6790 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6791 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6792 || (goal_mem
6793 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6794 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6795 /* If we are looking for a constant,
6796 and something equivalent to that constant was copied
6797 into a reg, we can use that reg. */
6798 || (goal_const && REG_NOTES (p) != 0
6799 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6800 && ((rtx_equal_p (XEXP (tem, 0), goal)
6801 && (valueno
6802 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6803 || (REG_P (SET_DEST (pat))
6804 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6805 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6806 && CONST_INT_P (goal)
6807 && 0 != (goaltry
6808 = operand_subword (XEXP (tem, 0), 0, 0,
6809 VOIDmode))
6810 && rtx_equal_p (goal, goaltry)
6811 && (valtry
6812 = operand_subword (SET_DEST (pat), 0, 0,
6813 VOIDmode))
6814 && (valueno = true_regnum (valtry)) >= 0)))
6815 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6816 NULL_RTX))
6817 && REG_P (SET_DEST (pat))
6818 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6819 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6820 && CONST_INT_P (goal)
6821 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6822 VOIDmode))
6823 && rtx_equal_p (goal, goaltry)
6824 && (valtry
6825 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6826 && (valueno = true_regnum (valtry)) >= 0)))
6828 if (other >= 0)
6830 if (valueno != other)
6831 continue;
6833 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6834 continue;
6835 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6836 mode, valueno))
6837 continue;
6838 value = valtry;
6839 where = p;
6840 break;
6845 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6846 (or copying VALUE into GOAL, if GOAL is also a register).
6847 Now verify that VALUE is really valid. */
6849 /* VALUENO is the register number of VALUE; a hard register. */
6851 /* Don't try to re-use something that is killed in this insn. We want
6852 to be able to trust REG_UNUSED notes. */
6853 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6854 return 0;
6856 /* If we propose to get the value from the stack pointer or if GOAL is
6857 a MEM based on the stack pointer, we need a stable SP. */
6858 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6859 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6860 goal)))
6861 need_stable_sp = 1;
6863 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6864 if (GET_MODE (value) != mode)
6865 return 0;
6867 /* Reject VALUE if it was loaded from GOAL
6868 and is also a register that appears in the address of GOAL. */
6870 if (goal_mem && value == SET_DEST (single_set (where))
6871 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6872 goal, (rtx*) 0))
6873 return 0;
6875 /* Reject registers that overlap GOAL. */
6877 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6878 nregs = hard_regno_nregs[regno][mode];
6879 else
6880 nregs = 1;
6881 valuenregs = hard_regno_nregs[valueno][mode];
6883 if (!goal_mem && !goal_const
6884 && regno + nregs > valueno && regno < valueno + valuenregs)
6885 return 0;
6887 /* Reject VALUE if it is one of the regs reserved for reloads.
6888 Reload1 knows how to reuse them anyway, and it would get
6889 confused if we allocated one without its knowledge.
6890 (Now that insns introduced by reload are ignored above,
6891 this case shouldn't happen, but I'm not positive.) */
6893 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6895 int i;
6896 for (i = 0; i < valuenregs; ++i)
6897 if (reload_reg_p[valueno + i] >= 0)
6898 return 0;
6901 /* Reject VALUE if it is a register being used for an input reload
6902 even if it is not one of those reserved. */
6904 if (reload_reg_p != 0)
6906 int i;
6907 for (i = 0; i < n_reloads; i++)
6908 if (rld[i].reg_rtx != 0 && rld[i].in)
6910 int regno1 = REGNO (rld[i].reg_rtx);
6911 int nregs1 = hard_regno_nregs[regno1]
6912 [GET_MODE (rld[i].reg_rtx)];
6913 if (regno1 < valueno + valuenregs
6914 && regno1 + nregs1 > valueno)
6915 return 0;
6919 if (goal_mem)
6920 /* We must treat frame pointer as varying here,
6921 since it can vary--in a nonlocal goto as generated by expand_goto. */
6922 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6924 /* Now verify that the values of GOAL and VALUE remain unaltered
6925 until INSN is reached. */
6927 p = insn;
6928 while (1)
6930 p = PREV_INSN (p);
6931 if (p == where)
6932 return value;
6934 /* Don't trust the conversion past a function call
6935 if either of the two is in a call-clobbered register, or memory. */
6936 if (CALL_P (p))
6938 int i;
6940 if (goal_mem || need_stable_sp)
6941 return 0;
6943 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6944 for (i = 0; i < nregs; ++i)
6945 if (call_used_regs[regno + i]
6946 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6947 return 0;
6949 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6950 for (i = 0; i < valuenregs; ++i)
6951 if (call_used_regs[valueno + i]
6952 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6953 return 0;
6956 if (INSN_P (p))
6958 pat = PATTERN (p);
6960 /* Watch out for unspec_volatile, and volatile asms. */
6961 if (volatile_insn_p (pat))
6962 return 0;
6964 /* If this insn P stores in either GOAL or VALUE, return 0.
6965 If GOAL is a memory ref and this insn writes memory, return 0.
6966 If GOAL is a memory ref and its address is not constant,
6967 and this insn P changes a register used in GOAL, return 0. */
6969 if (GET_CODE (pat) == COND_EXEC)
6970 pat = COND_EXEC_CODE (pat);
6971 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6973 rtx dest = SET_DEST (pat);
6974 while (GET_CODE (dest) == SUBREG
6975 || GET_CODE (dest) == ZERO_EXTRACT
6976 || GET_CODE (dest) == STRICT_LOW_PART)
6977 dest = XEXP (dest, 0);
6978 if (REG_P (dest))
6980 int xregno = REGNO (dest);
6981 int xnregs;
6982 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6983 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6984 else
6985 xnregs = 1;
6986 if (xregno < regno + nregs && xregno + xnregs > regno)
6987 return 0;
6988 if (xregno < valueno + valuenregs
6989 && xregno + xnregs > valueno)
6990 return 0;
6991 if (goal_mem_addr_varies
6992 && reg_overlap_mentioned_for_reload_p (dest, goal))
6993 return 0;
6994 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6995 return 0;
6997 else if (goal_mem && MEM_P (dest)
6998 && ! push_operand (dest, GET_MODE (dest)))
6999 return 0;
7000 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7001 && reg_equiv_memory_loc (regno) != 0)
7002 return 0;
7003 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
7004 return 0;
7006 else if (GET_CODE (pat) == PARALLEL)
7008 int i;
7009 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7011 rtx v1 = XVECEXP (pat, 0, i);
7012 if (GET_CODE (v1) == COND_EXEC)
7013 v1 = COND_EXEC_CODE (v1);
7014 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7016 rtx dest = SET_DEST (v1);
7017 while (GET_CODE (dest) == SUBREG
7018 || GET_CODE (dest) == ZERO_EXTRACT
7019 || GET_CODE (dest) == STRICT_LOW_PART)
7020 dest = XEXP (dest, 0);
7021 if (REG_P (dest))
7023 int xregno = REGNO (dest);
7024 int xnregs;
7025 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7026 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7027 else
7028 xnregs = 1;
7029 if (xregno < regno + nregs
7030 && xregno + xnregs > regno)
7031 return 0;
7032 if (xregno < valueno + valuenregs
7033 && xregno + xnregs > valueno)
7034 return 0;
7035 if (goal_mem_addr_varies
7036 && reg_overlap_mentioned_for_reload_p (dest,
7037 goal))
7038 return 0;
7039 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7040 return 0;
7042 else if (goal_mem && MEM_P (dest)
7043 && ! push_operand (dest, GET_MODE (dest)))
7044 return 0;
7045 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7046 && reg_equiv_memory_loc (regno) != 0)
7047 return 0;
7048 else if (need_stable_sp
7049 && push_operand (dest, GET_MODE (dest)))
7050 return 0;
7055 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7057 rtx link;
7059 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7060 link = XEXP (link, 1))
7062 pat = XEXP (link, 0);
7063 if (GET_CODE (pat) == CLOBBER)
7065 rtx dest = SET_DEST (pat);
7067 if (REG_P (dest))
7069 int xregno = REGNO (dest);
7070 int xnregs
7071 = hard_regno_nregs[xregno][GET_MODE (dest)];
7073 if (xregno < regno + nregs
7074 && xregno + xnregs > regno)
7075 return 0;
7076 else if (xregno < valueno + valuenregs
7077 && xregno + xnregs > valueno)
7078 return 0;
7079 else if (goal_mem_addr_varies
7080 && reg_overlap_mentioned_for_reload_p (dest,
7081 goal))
7082 return 0;
7085 else if (goal_mem && MEM_P (dest)
7086 && ! push_operand (dest, GET_MODE (dest)))
7087 return 0;
7088 else if (need_stable_sp
7089 && push_operand (dest, GET_MODE (dest)))
7090 return 0;
7095 #ifdef AUTO_INC_DEC
7096 /* If this insn auto-increments or auto-decrements
7097 either regno or valueno, return 0 now.
7098 If GOAL is a memory ref and its address is not constant,
7099 and this insn P increments a register used in GOAL, return 0. */
7101 rtx link;
7103 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7104 if (REG_NOTE_KIND (link) == REG_INC
7105 && REG_P (XEXP (link, 0)))
7107 int incno = REGNO (XEXP (link, 0));
7108 if (incno < regno + nregs && incno >= regno)
7109 return 0;
7110 if (incno < valueno + valuenregs && incno >= valueno)
7111 return 0;
7112 if (goal_mem_addr_varies
7113 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7114 goal))
7115 return 0;
7118 #endif
7123 /* Find a place where INCED appears in an increment or decrement operator
7124 within X, and return the amount INCED is incremented or decremented by.
7125 The value is always positive. */
7127 static int
7128 find_inc_amount (rtx x, rtx inced)
7130 enum rtx_code code = GET_CODE (x);
7131 const char *fmt;
7132 int i;
7134 if (code == MEM)
7136 rtx addr = XEXP (x, 0);
7137 if ((GET_CODE (addr) == PRE_DEC
7138 || GET_CODE (addr) == POST_DEC
7139 || GET_CODE (addr) == PRE_INC
7140 || GET_CODE (addr) == POST_INC)
7141 && XEXP (addr, 0) == inced)
7142 return GET_MODE_SIZE (GET_MODE (x));
7143 else if ((GET_CODE (addr) == PRE_MODIFY
7144 || GET_CODE (addr) == POST_MODIFY)
7145 && GET_CODE (XEXP (addr, 1)) == PLUS
7146 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7147 && XEXP (addr, 0) == inced
7148 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7150 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7151 return i < 0 ? -i : i;
7155 fmt = GET_RTX_FORMAT (code);
7156 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7158 if (fmt[i] == 'e')
7160 int tem = find_inc_amount (XEXP (x, i), inced);
7161 if (tem != 0)
7162 return tem;
7164 if (fmt[i] == 'E')
7166 int j;
7167 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7169 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7170 if (tem != 0)
7171 return tem;
7176 return 0;
7179 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7180 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7182 #ifdef AUTO_INC_DEC
7183 static int
7184 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7185 rtx insn)
7187 rtx link;
7189 gcc_assert (insn);
7191 if (! INSN_P (insn))
7192 return 0;
7194 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7195 if (REG_NOTE_KIND (link) == REG_INC)
7197 unsigned int test = (int) REGNO (XEXP (link, 0));
7198 if (test >= regno && test < endregno)
7199 return 1;
7201 return 0;
7203 #else
7205 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7207 #endif
7209 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7210 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7211 REG_INC. REGNO must refer to a hard register. */
7214 regno_clobbered_p (unsigned int regno, rtx_insn *insn, enum machine_mode mode,
7215 int sets)
7217 unsigned int nregs, endregno;
7219 /* regno must be a hard register. */
7220 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7222 nregs = hard_regno_nregs[regno][mode];
7223 endregno = regno + nregs;
7225 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7226 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7227 && REG_P (XEXP (PATTERN (insn), 0)))
7229 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7231 return test >= regno && test < endregno;
7234 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7235 return 1;
7237 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7239 int i = XVECLEN (PATTERN (insn), 0) - 1;
7241 for (; i >= 0; i--)
7243 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7244 if ((GET_CODE (elt) == CLOBBER
7245 || (sets == 1 && GET_CODE (elt) == SET))
7246 && REG_P (XEXP (elt, 0)))
7248 unsigned int test = REGNO (XEXP (elt, 0));
7250 if (test >= regno && test < endregno)
7251 return 1;
7253 if (sets == 2
7254 && reg_inc_found_and_valid_p (regno, endregno, elt))
7255 return 1;
7259 return 0;
7262 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7264 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
7266 int regno;
7268 if (GET_MODE (reloadreg) == mode)
7269 return reloadreg;
7271 regno = REGNO (reloadreg);
7273 if (REG_WORDS_BIG_ENDIAN)
7274 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7275 - (int) hard_regno_nregs[regno][mode];
7277 return gen_rtx_REG (mode, regno);
7280 static const char *const reload_when_needed_name[] =
7282 "RELOAD_FOR_INPUT",
7283 "RELOAD_FOR_OUTPUT",
7284 "RELOAD_FOR_INSN",
7285 "RELOAD_FOR_INPUT_ADDRESS",
7286 "RELOAD_FOR_INPADDR_ADDRESS",
7287 "RELOAD_FOR_OUTPUT_ADDRESS",
7288 "RELOAD_FOR_OUTADDR_ADDRESS",
7289 "RELOAD_FOR_OPERAND_ADDRESS",
7290 "RELOAD_FOR_OPADDR_ADDR",
7291 "RELOAD_OTHER",
7292 "RELOAD_FOR_OTHER_ADDRESS"
7295 /* These functions are used to print the variables set by 'find_reloads' */
7297 DEBUG_FUNCTION void
7298 debug_reload_to_stream (FILE *f)
7300 int r;
7301 const char *prefix;
7303 if (! f)
7304 f = stderr;
7305 for (r = 0; r < n_reloads; r++)
7307 fprintf (f, "Reload %d: ", r);
7309 if (rld[r].in != 0)
7311 fprintf (f, "reload_in (%s) = ",
7312 GET_MODE_NAME (rld[r].inmode));
7313 print_inline_rtx (f, rld[r].in, 24);
7314 fprintf (f, "\n\t");
7317 if (rld[r].out != 0)
7319 fprintf (f, "reload_out (%s) = ",
7320 GET_MODE_NAME (rld[r].outmode));
7321 print_inline_rtx (f, rld[r].out, 24);
7322 fprintf (f, "\n\t");
7325 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7327 fprintf (f, "%s (opnum = %d)",
7328 reload_when_needed_name[(int) rld[r].when_needed],
7329 rld[r].opnum);
7331 if (rld[r].optional)
7332 fprintf (f, ", optional");
7334 if (rld[r].nongroup)
7335 fprintf (f, ", nongroup");
7337 if (rld[r].inc != 0)
7338 fprintf (f, ", inc by %d", rld[r].inc);
7340 if (rld[r].nocombine)
7341 fprintf (f, ", can't combine");
7343 if (rld[r].secondary_p)
7344 fprintf (f, ", secondary_reload_p");
7346 if (rld[r].in_reg != 0)
7348 fprintf (f, "\n\treload_in_reg: ");
7349 print_inline_rtx (f, rld[r].in_reg, 24);
7352 if (rld[r].out_reg != 0)
7354 fprintf (f, "\n\treload_out_reg: ");
7355 print_inline_rtx (f, rld[r].out_reg, 24);
7358 if (rld[r].reg_rtx != 0)
7360 fprintf (f, "\n\treload_reg_rtx: ");
7361 print_inline_rtx (f, rld[r].reg_rtx, 24);
7364 prefix = "\n\t";
7365 if (rld[r].secondary_in_reload != -1)
7367 fprintf (f, "%ssecondary_in_reload = %d",
7368 prefix, rld[r].secondary_in_reload);
7369 prefix = ", ";
7372 if (rld[r].secondary_out_reload != -1)
7373 fprintf (f, "%ssecondary_out_reload = %d\n",
7374 prefix, rld[r].secondary_out_reload);
7376 prefix = "\n\t";
7377 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7379 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7380 insn_data[rld[r].secondary_in_icode].name);
7381 prefix = ", ";
7384 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7385 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7386 insn_data[rld[r].secondary_out_icode].name);
7388 fprintf (f, "\n");
7392 DEBUG_FUNCTION void
7393 debug_reload (void)
7395 debug_reload_to_stream (stderr);