* lib/ubsan-dg.exp (check_effective_target_fsanitize_undefined):
[official-gcc.git] / gcc / reload.c
blob3f5abb84878ba997ca0b25aec56bef1791bcb1f8
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains subroutines used only from the file reload1.c.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
56 NOTE SIDE EFFECTS:
58 find_reloads can alter the operands of the instruction it is called on.
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
63 better that way.
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
72 Using a reload register for several reloads in one insn:
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload. */
86 #define REG_OK_STRICT
88 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
89 #undef DEBUG_RELOAD
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "tm.h"
95 #include "rtl-error.h"
96 #include "tm_p.h"
97 #include "insn-config.h"
98 #include "expr.h"
99 #include "insn-codes.h"
100 #include "optabs.h"
101 #include "recog.h"
102 #include "dominance.h"
103 #include "cfg.h"
104 #include "predict.h"
105 #include "basic-block.h"
106 #include "df.h"
107 #include "reload.h"
108 #include "regs.h"
109 #include "addresses.h"
110 #include "hard-reg-set.h"
111 #include "flags.h"
112 #include "hashtab.h"
113 #include "hash-set.h"
114 #include "vec.h"
115 #include "machmode.h"
116 #include "input.h"
117 #include "function.h"
118 #include "params.h"
119 #include "target.h"
120 #include "ira.h"
122 /* True if X is a constant that can be forced into the constant pool.
123 MODE is the mode of the operand, or VOIDmode if not known. */
124 #define CONST_POOL_OK_P(MODE, X) \
125 ((MODE) != VOIDmode \
126 && CONSTANT_P (X) \
127 && GET_CODE (X) != HIGH \
128 && !targetm.cannot_force_const_mem (MODE, X))
130 /* True if C is a non-empty register class that has too few registers
131 to be safely used as a reload target class. */
133 static inline bool
134 small_register_class_p (reg_class_t rclass)
136 return (reg_class_size [(int) rclass] == 1
137 || (reg_class_size [(int) rclass] >= 1
138 && targetm.class_likely_spilled_p (rclass)));
142 /* All reloads of the current insn are recorded here. See reload.h for
143 comments. */
144 int n_reloads;
145 struct reload rld[MAX_RELOADS];
147 /* All the "earlyclobber" operands of the current insn
148 are recorded here. */
149 int n_earlyclobbers;
150 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
152 int reload_n_operands;
154 /* Replacing reloads.
156 If `replace_reloads' is nonzero, then as each reload is recorded
157 an entry is made for it in the table `replacements'.
158 Then later `subst_reloads' can look through that table and
159 perform all the replacements needed. */
161 /* Nonzero means record the places to replace. */
162 static int replace_reloads;
164 /* Each replacement is recorded with a structure like this. */
165 struct replacement
167 rtx *where; /* Location to store in */
168 int what; /* which reload this is for */
169 machine_mode mode; /* mode it must have */
172 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
174 /* Number of replacements currently recorded. */
175 static int n_replacements;
177 /* Used to track what is modified by an operand. */
178 struct decomposition
180 int reg_flag; /* Nonzero if referencing a register. */
181 int safe; /* Nonzero if this can't conflict with anything. */
182 rtx base; /* Base address for MEM. */
183 HOST_WIDE_INT start; /* Starting offset or register number. */
184 HOST_WIDE_INT end; /* Ending offset or register number. */
187 #ifdef SECONDARY_MEMORY_NEEDED
189 /* Save MEMs needed to copy from one class of registers to another. One MEM
190 is used per mode, but normally only one or two modes are ever used.
192 We keep two versions, before and after register elimination. The one
193 after register elimination is record separately for each operand. This
194 is done in case the address is not valid to be sure that we separately
195 reload each. */
197 static rtx secondary_memlocs[NUM_MACHINE_MODES];
198 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
199 static int secondary_memlocs_elim_used = 0;
200 #endif
202 /* The instruction we are doing reloads for;
203 so we can test whether a register dies in it. */
204 static rtx_insn *this_insn;
206 /* Nonzero if this instruction is a user-specified asm with operands. */
207 static int this_insn_is_asm;
209 /* If hard_regs_live_known is nonzero,
210 we can tell which hard regs are currently live,
211 at least enough to succeed in choosing dummy reloads. */
212 static int hard_regs_live_known;
214 /* Indexed by hard reg number,
215 element is nonnegative if hard reg has been spilled.
216 This vector is passed to `find_reloads' as an argument
217 and is not changed here. */
218 static short *static_reload_reg_p;
220 /* Set to 1 in subst_reg_equivs if it changes anything. */
221 static int subst_reg_equivs_changed;
223 /* On return from push_reload, holds the reload-number for the OUT
224 operand, which can be different for that from the input operand. */
225 static int output_reloadnum;
227 /* Compare two RTX's. */
228 #define MATCHES(x, y) \
229 (x == y || (x != 0 && (REG_P (x) \
230 ? REG_P (y) && REGNO (x) == REGNO (y) \
231 : rtx_equal_p (x, y) && ! side_effects_p (x))))
233 /* Indicates if two reloads purposes are for similar enough things that we
234 can merge their reloads. */
235 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
236 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
237 || ((when1) == (when2) && (op1) == (op2)) \
238 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
239 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
240 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
241 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
242 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
244 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
245 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
246 ((when1) != (when2) \
247 || ! ((op1) == (op2) \
248 || (when1) == RELOAD_FOR_INPUT \
249 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
250 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
252 /* If we are going to reload an address, compute the reload type to
253 use. */
254 #define ADDR_TYPE(type) \
255 ((type) == RELOAD_FOR_INPUT_ADDRESS \
256 ? RELOAD_FOR_INPADDR_ADDRESS \
257 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
258 ? RELOAD_FOR_OUTADDR_ADDRESS \
259 : (type)))
261 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
262 machine_mode, enum reload_type,
263 enum insn_code *, secondary_reload_info *);
264 static enum reg_class find_valid_class (machine_mode, machine_mode,
265 int, unsigned int);
266 static void push_replacement (rtx *, int, machine_mode);
267 static void dup_replacements (rtx *, rtx *);
268 static void combine_reloads (void);
269 static int find_reusable_reload (rtx *, rtx, enum reg_class,
270 enum reload_type, int, int);
271 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode,
272 machine_mode, reg_class_t, int, int);
273 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
274 static struct decomposition decompose (rtx);
275 static int immune_p (rtx, rtx, struct decomposition);
276 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
277 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
278 rtx_insn *, int *);
279 static rtx make_memloc (rtx, int);
280 static int maybe_memory_address_addr_space_p (machine_mode, rtx,
281 addr_space_t, rtx *);
282 static int find_reloads_address (machine_mode, rtx *, rtx, rtx *,
283 int, enum reload_type, int, rtx_insn *);
284 static rtx subst_reg_equivs (rtx, rtx_insn *);
285 static rtx subst_indexed_address (rtx);
286 static void update_auto_inc_notes (rtx_insn *, int, int);
287 static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int,
288 enum rtx_code, enum rtx_code, rtx *,
289 int, enum reload_type,int, rtx_insn *);
290 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
291 machine_mode, int,
292 enum reload_type, int);
293 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
294 int, rtx_insn *, int *);
295 static void copy_replacements_1 (rtx *, rtx *, int);
296 static int find_inc_amount (rtx, rtx);
297 static int refers_to_mem_for_reload_p (rtx);
298 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
299 rtx, rtx *);
301 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
302 list yet. */
304 static void
305 push_reg_equiv_alt_mem (int regno, rtx mem)
307 rtx it;
309 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
310 if (rtx_equal_p (XEXP (it, 0), mem))
311 return;
313 reg_equiv_alt_mem_list (regno)
314 = alloc_EXPR_LIST (REG_EQUIV, mem,
315 reg_equiv_alt_mem_list (regno));
318 /* Determine if any secondary reloads are needed for loading (if IN_P is
319 nonzero) or storing (if IN_P is zero) X to or from a reload register of
320 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
321 are needed, push them.
323 Return the reload number of the secondary reload we made, or -1 if
324 we didn't need one. *PICODE is set to the insn_code to use if we do
325 need a secondary reload. */
327 static int
328 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
329 enum reg_class reload_class,
330 machine_mode reload_mode, enum reload_type type,
331 enum insn_code *picode, secondary_reload_info *prev_sri)
333 enum reg_class rclass = NO_REGS;
334 enum reg_class scratch_class;
335 machine_mode mode = reload_mode;
336 enum insn_code icode = CODE_FOR_nothing;
337 enum insn_code t_icode = CODE_FOR_nothing;
338 enum reload_type secondary_type;
339 int s_reload, t_reload = -1;
340 const char *scratch_constraint;
341 secondary_reload_info sri;
343 if (type == RELOAD_FOR_INPUT_ADDRESS
344 || type == RELOAD_FOR_OUTPUT_ADDRESS
345 || type == RELOAD_FOR_INPADDR_ADDRESS
346 || type == RELOAD_FOR_OUTADDR_ADDRESS)
347 secondary_type = type;
348 else
349 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
351 *picode = CODE_FOR_nothing;
353 /* If X is a paradoxical SUBREG, use the inner value to determine both the
354 mode and object being reloaded. */
355 if (paradoxical_subreg_p (x))
357 x = SUBREG_REG (x);
358 reload_mode = GET_MODE (x);
361 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
362 is still a pseudo-register by now, it *must* have an equivalent MEM
363 but we don't want to assume that), use that equivalent when seeing if
364 a secondary reload is needed since whether or not a reload is needed
365 might be sensitive to the form of the MEM. */
367 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
368 && reg_equiv_mem (REGNO (x)))
369 x = reg_equiv_mem (REGNO (x));
371 sri.icode = CODE_FOR_nothing;
372 sri.prev_sri = prev_sri;
373 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
374 reload_mode, &sri);
375 icode = (enum insn_code) sri.icode;
377 /* If we don't need any secondary registers, done. */
378 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
379 return -1;
381 if (rclass != NO_REGS)
382 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
383 reload_mode, type, &t_icode, &sri);
385 /* If we will be using an insn, the secondary reload is for a
386 scratch register. */
388 if (icode != CODE_FOR_nothing)
390 /* If IN_P is nonzero, the reload register will be the output in
391 operand 0. If IN_P is zero, the reload register will be the input
392 in operand 1. Outputs should have an initial "=", which we must
393 skip. */
395 /* ??? It would be useful to be able to handle only two, or more than
396 three, operands, but for now we can only handle the case of having
397 exactly three: output, input and one temp/scratch. */
398 gcc_assert (insn_data[(int) icode].n_operands == 3);
400 /* ??? We currently have no way to represent a reload that needs
401 an icode to reload from an intermediate tertiary reload register.
402 We should probably have a new field in struct reload to tag a
403 chain of scratch operand reloads onto. */
404 gcc_assert (rclass == NO_REGS);
406 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
407 gcc_assert (*scratch_constraint == '=');
408 scratch_constraint++;
409 if (*scratch_constraint == '&')
410 scratch_constraint++;
411 scratch_class = (reg_class_for_constraint
412 (lookup_constraint (scratch_constraint)));
414 rclass = scratch_class;
415 mode = insn_data[(int) icode].operand[2].mode;
418 /* This case isn't valid, so fail. Reload is allowed to use the same
419 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
420 in the case of a secondary register, we actually need two different
421 registers for correct code. We fail here to prevent the possibility of
422 silently generating incorrect code later.
424 The convention is that secondary input reloads are valid only if the
425 secondary_class is different from class. If you have such a case, you
426 can not use secondary reloads, you must work around the problem some
427 other way.
429 Allow this when a reload_in/out pattern is being used. I.e. assume
430 that the generated code handles this case. */
432 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
433 || t_icode != CODE_FOR_nothing);
435 /* See if we can reuse an existing secondary reload. */
436 for (s_reload = 0; s_reload < n_reloads; s_reload++)
437 if (rld[s_reload].secondary_p
438 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
439 || reg_class_subset_p (rld[s_reload].rclass, rclass))
440 && ((in_p && rld[s_reload].inmode == mode)
441 || (! in_p && rld[s_reload].outmode == mode))
442 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
443 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
444 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
445 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
446 && (small_register_class_p (rclass)
447 || targetm.small_register_classes_for_mode_p (VOIDmode))
448 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
449 opnum, rld[s_reload].opnum))
451 if (in_p)
452 rld[s_reload].inmode = mode;
453 if (! in_p)
454 rld[s_reload].outmode = mode;
456 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
457 rld[s_reload].rclass = rclass;
459 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
460 rld[s_reload].optional &= optional;
461 rld[s_reload].secondary_p = 1;
462 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
463 opnum, rld[s_reload].opnum))
464 rld[s_reload].when_needed = RELOAD_OTHER;
466 break;
469 if (s_reload == n_reloads)
471 #ifdef SECONDARY_MEMORY_NEEDED
472 /* If we need a memory location to copy between the two reload regs,
473 set it up now. Note that we do the input case before making
474 the reload and the output case after. This is due to the
475 way reloads are output. */
477 if (in_p && icode == CODE_FOR_nothing
478 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
480 get_secondary_mem (x, reload_mode, opnum, type);
482 /* We may have just added new reloads. Make sure we add
483 the new reload at the end. */
484 s_reload = n_reloads;
486 #endif
488 /* We need to make a new secondary reload for this register class. */
489 rld[s_reload].in = rld[s_reload].out = 0;
490 rld[s_reload].rclass = rclass;
492 rld[s_reload].inmode = in_p ? mode : VOIDmode;
493 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
494 rld[s_reload].reg_rtx = 0;
495 rld[s_reload].optional = optional;
496 rld[s_reload].inc = 0;
497 /* Maybe we could combine these, but it seems too tricky. */
498 rld[s_reload].nocombine = 1;
499 rld[s_reload].in_reg = 0;
500 rld[s_reload].out_reg = 0;
501 rld[s_reload].opnum = opnum;
502 rld[s_reload].when_needed = secondary_type;
503 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
504 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
505 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
506 rld[s_reload].secondary_out_icode
507 = ! in_p ? t_icode : CODE_FOR_nothing;
508 rld[s_reload].secondary_p = 1;
510 n_reloads++;
512 #ifdef SECONDARY_MEMORY_NEEDED
513 if (! in_p && icode == CODE_FOR_nothing
514 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
515 get_secondary_mem (x, mode, opnum, type);
516 #endif
519 *picode = icode;
520 return s_reload;
523 /* If a secondary reload is needed, return its class. If both an intermediate
524 register and a scratch register is needed, we return the class of the
525 intermediate register. */
526 reg_class_t
527 secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode,
528 rtx x)
530 enum insn_code icode;
531 secondary_reload_info sri;
533 sri.icode = CODE_FOR_nothing;
534 sri.prev_sri = NULL;
535 rclass
536 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
537 icode = (enum insn_code) sri.icode;
539 /* If there are no secondary reloads at all, we return NO_REGS.
540 If an intermediate register is needed, we return its class. */
541 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
542 return rclass;
544 /* No intermediate register is needed, but we have a special reload
545 pattern, which we assume for now needs a scratch register. */
546 return scratch_reload_class (icode);
549 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
550 three operands, verify that operand 2 is an output operand, and return
551 its register class.
552 ??? We'd like to be able to handle any pattern with at least 2 operands,
553 for zero or more scratch registers, but that needs more infrastructure. */
554 enum reg_class
555 scratch_reload_class (enum insn_code icode)
557 const char *scratch_constraint;
558 enum reg_class rclass;
560 gcc_assert (insn_data[(int) icode].n_operands == 3);
561 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
562 gcc_assert (*scratch_constraint == '=');
563 scratch_constraint++;
564 if (*scratch_constraint == '&')
565 scratch_constraint++;
566 rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
567 gcc_assert (rclass != NO_REGS);
568 return rclass;
571 #ifdef SECONDARY_MEMORY_NEEDED
573 /* Return a memory location that will be used to copy X in mode MODE.
574 If we haven't already made a location for this mode in this insn,
575 call find_reloads_address on the location being returned. */
578 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode,
579 int opnum, enum reload_type type)
581 rtx loc;
582 int mem_valid;
584 /* By default, if MODE is narrower than a word, widen it to a word.
585 This is required because most machines that require these memory
586 locations do not support short load and stores from all registers
587 (e.g., FP registers). */
589 #ifdef SECONDARY_MEMORY_NEEDED_MODE
590 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
591 #else
592 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
593 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
594 #endif
596 /* If we already have made a MEM for this operand in MODE, return it. */
597 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
598 return secondary_memlocs_elim[(int) mode][opnum];
600 /* If this is the first time we've tried to get a MEM for this mode,
601 allocate a new one. `something_changed' in reload will get set
602 by noticing that the frame size has changed. */
604 if (secondary_memlocs[(int) mode] == 0)
606 #ifdef SECONDARY_MEMORY_NEEDED_RTX
607 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
608 #else
609 secondary_memlocs[(int) mode]
610 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
611 #endif
614 /* Get a version of the address doing any eliminations needed. If that
615 didn't give us a new MEM, make a new one if it isn't valid. */
617 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
618 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
619 MEM_ADDR_SPACE (loc));
621 if (! mem_valid && loc == secondary_memlocs[(int) mode])
622 loc = copy_rtx (loc);
624 /* The only time the call below will do anything is if the stack
625 offset is too large. In that case IND_LEVELS doesn't matter, so we
626 can just pass a zero. Adjust the type to be the address of the
627 corresponding object. If the address was valid, save the eliminated
628 address. If it wasn't valid, we need to make a reload each time, so
629 don't save it. */
631 if (! mem_valid)
633 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
634 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
635 : RELOAD_OTHER);
637 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
638 opnum, type, 0, 0);
641 secondary_memlocs_elim[(int) mode][opnum] = loc;
642 if (secondary_memlocs_elim_used <= (int)mode)
643 secondary_memlocs_elim_used = (int)mode + 1;
644 return loc;
647 /* Clear any secondary memory locations we've made. */
649 void
650 clear_secondary_mem (void)
652 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
654 #endif /* SECONDARY_MEMORY_NEEDED */
657 /* Find the largest class which has at least one register valid in
658 mode INNER, and which for every such register, that register number
659 plus N is also valid in OUTER (if in range) and is cheap to move
660 into REGNO. Such a class must exist. */
662 static enum reg_class
663 find_valid_class (machine_mode outer ATTRIBUTE_UNUSED,
664 machine_mode inner ATTRIBUTE_UNUSED, int n,
665 unsigned int dest_regno ATTRIBUTE_UNUSED)
667 int best_cost = -1;
668 int rclass;
669 int regno;
670 enum reg_class best_class = NO_REGS;
671 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
672 unsigned int best_size = 0;
673 int cost;
675 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
677 int bad = 0;
678 int good = 0;
679 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
680 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
682 if (HARD_REGNO_MODE_OK (regno, inner))
684 good = 1;
685 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
686 && ! HARD_REGNO_MODE_OK (regno + n, outer))
687 bad = 1;
691 if (bad || !good)
692 continue;
693 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
695 if ((reg_class_size[rclass] > best_size
696 && (best_cost < 0 || best_cost >= cost))
697 || best_cost > cost)
699 best_class = (enum reg_class) rclass;
700 best_size = reg_class_size[rclass];
701 best_cost = register_move_cost (outer, (enum reg_class) rclass,
702 dest_class);
706 gcc_assert (best_size != 0);
708 return best_class;
711 /* We are trying to reload a subreg of something that is not a register.
712 Find the largest class which contains only registers valid in
713 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
714 which we would eventually like to obtain the object. */
716 static enum reg_class
717 find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED,
718 machine_mode mode ATTRIBUTE_UNUSED,
719 enum reg_class dest_class ATTRIBUTE_UNUSED)
721 int best_cost = -1;
722 int rclass;
723 int regno;
724 enum reg_class best_class = NO_REGS;
725 unsigned int best_size = 0;
726 int cost;
728 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
730 int bad = 0;
731 for (regno = 0; regno < FIRST_PSEUDO_REGISTER && !bad; regno++)
733 if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
734 && !HARD_REGNO_MODE_OK (regno, mode))
735 bad = 1;
738 if (bad)
739 continue;
741 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
743 if ((reg_class_size[rclass] > best_size
744 && (best_cost < 0 || best_cost >= cost))
745 || best_cost > cost)
747 best_class = (enum reg_class) rclass;
748 best_size = reg_class_size[rclass];
749 best_cost = register_move_cost (outer, (enum reg_class) rclass,
750 dest_class);
754 gcc_assert (best_size != 0);
756 #ifdef LIMIT_RELOAD_CLASS
757 best_class = LIMIT_RELOAD_CLASS (mode, best_class);
758 #endif
759 return best_class;
762 /* Return the number of a previously made reload that can be combined with
763 a new one, or n_reloads if none of the existing reloads can be used.
764 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
765 push_reload, they determine the kind of the new reload that we try to
766 combine. P_IN points to the corresponding value of IN, which can be
767 modified by this function.
768 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
770 static int
771 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
772 enum reload_type type, int opnum, int dont_share)
774 rtx in = *p_in;
775 int i;
776 /* We can't merge two reloads if the output of either one is
777 earlyclobbered. */
779 if (earlyclobber_operand_p (out))
780 return n_reloads;
782 /* We can use an existing reload if the class is right
783 and at least one of IN and OUT is a match
784 and the other is at worst neutral.
785 (A zero compared against anything is neutral.)
787 For targets with small register classes, don't use existing reloads
788 unless they are for the same thing since that can cause us to need
789 more reload registers than we otherwise would. */
791 for (i = 0; i < n_reloads; i++)
792 if ((reg_class_subset_p (rclass, rld[i].rclass)
793 || reg_class_subset_p (rld[i].rclass, rclass))
794 /* If the existing reload has a register, it must fit our class. */
795 && (rld[i].reg_rtx == 0
796 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
797 true_regnum (rld[i].reg_rtx)))
798 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
799 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
800 || (out != 0 && MATCHES (rld[i].out, out)
801 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
802 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
803 && (small_register_class_p (rclass)
804 || targetm.small_register_classes_for_mode_p (VOIDmode))
805 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
806 return i;
808 /* Reloading a plain reg for input can match a reload to postincrement
809 that reg, since the postincrement's value is the right value.
810 Likewise, it can match a preincrement reload, since we regard
811 the preincrementation as happening before any ref in this insn
812 to that register. */
813 for (i = 0; i < n_reloads; i++)
814 if ((reg_class_subset_p (rclass, rld[i].rclass)
815 || reg_class_subset_p (rld[i].rclass, rclass))
816 /* If the existing reload has a register, it must fit our
817 class. */
818 && (rld[i].reg_rtx == 0
819 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
820 true_regnum (rld[i].reg_rtx)))
821 && out == 0 && rld[i].out == 0 && rld[i].in != 0
822 && ((REG_P (in)
823 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
824 && MATCHES (XEXP (rld[i].in, 0), in))
825 || (REG_P (rld[i].in)
826 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
827 && MATCHES (XEXP (in, 0), rld[i].in)))
828 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
829 && (small_register_class_p (rclass)
830 || targetm.small_register_classes_for_mode_p (VOIDmode))
831 && MERGABLE_RELOADS (type, rld[i].when_needed,
832 opnum, rld[i].opnum))
834 /* Make sure reload_in ultimately has the increment,
835 not the plain register. */
836 if (REG_P (in))
837 *p_in = rld[i].in;
838 return i;
840 return n_reloads;
843 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
844 expression. MODE is the mode that X will be used in. OUTPUT is true if
845 the function is invoked for the output part of an enclosing reload. */
847 static bool
848 reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output)
850 rtx inner;
852 /* Only SUBREGs are problematical. */
853 if (GET_CODE (x) != SUBREG)
854 return false;
856 inner = SUBREG_REG (x);
858 /* If INNER is a constant or PLUS, then INNER will need reloading. */
859 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
860 return true;
862 /* If INNER is not a hard register, then INNER will not need reloading. */
863 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
864 return false;
866 /* If INNER is not ok for MODE, then INNER will need reloading. */
867 if (!HARD_REGNO_MODE_OK (subreg_regno (x), mode))
868 return true;
870 /* If this is for an output, and the outer part is a word or smaller,
871 INNER is larger than a word and the number of registers in INNER is
872 not the same as the number of words in INNER, then INNER will need
873 reloading (with an in-out reload). */
874 return (output
875 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
876 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
877 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
878 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
881 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
882 requiring an extra reload register. The caller has already found that
883 IN contains some reference to REGNO, so check that we can produce the
884 new value in a single step. E.g. if we have
885 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
886 instruction that adds one to a register, this should succeed.
887 However, if we have something like
888 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
889 needs to be loaded into a register first, we need a separate reload
890 register.
891 Such PLUS reloads are generated by find_reload_address_part.
892 The out-of-range PLUS expressions are usually introduced in the instruction
893 patterns by register elimination and substituting pseudos without a home
894 by their function-invariant equivalences. */
895 static int
896 can_reload_into (rtx in, int regno, machine_mode mode)
898 rtx dst;
899 rtx_insn *test_insn;
900 int r = 0;
901 struct recog_data_d save_recog_data;
903 /* For matching constraints, we often get notional input reloads where
904 we want to use the original register as the reload register. I.e.
905 technically this is a non-optional input-output reload, but IN is
906 already a valid register, and has been chosen as the reload register.
907 Speed this up, since it trivially works. */
908 if (REG_P (in))
909 return 1;
911 /* To test MEMs properly, we'd have to take into account all the reloads
912 that are already scheduled, which can become quite complicated.
913 And since we've already handled address reloads for this MEM, it
914 should always succeed anyway. */
915 if (MEM_P (in))
916 return 1;
918 /* If we can make a simple SET insn that does the job, everything should
919 be fine. */
920 dst = gen_rtx_REG (mode, regno);
921 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
922 save_recog_data = recog_data;
923 if (recog_memoized (test_insn) >= 0)
925 extract_insn (test_insn);
926 r = constrain_operands (1, get_enabled_alternatives (test_insn));
928 recog_data = save_recog_data;
929 return r;
932 /* Record one reload that needs to be performed.
933 IN is an rtx saying where the data are to be found before this instruction.
934 OUT says where they must be stored after the instruction.
935 (IN is zero for data not read, and OUT is zero for data not written.)
936 INLOC and OUTLOC point to the places in the instructions where
937 IN and OUT were found.
938 If IN and OUT are both nonzero, it means the same register must be used
939 to reload both IN and OUT.
941 RCLASS is a register class required for the reloaded data.
942 INMODE is the machine mode that the instruction requires
943 for the reg that replaces IN and OUTMODE is likewise for OUT.
945 If IN is zero, then OUT's location and mode should be passed as
946 INLOC and INMODE.
948 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
950 OPTIONAL nonzero means this reload does not need to be performed:
951 it can be discarded if that is more convenient.
953 OPNUM and TYPE say what the purpose of this reload is.
955 The return value is the reload-number for this reload.
957 If both IN and OUT are nonzero, in some rare cases we might
958 want to make two separate reloads. (Actually we never do this now.)
959 Therefore, the reload-number for OUT is stored in
960 output_reloadnum when we return; the return value applies to IN.
961 Usually (presently always), when IN and OUT are nonzero,
962 the two reload-numbers are equal, but the caller should be careful to
963 distinguish them. */
966 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
967 enum reg_class rclass, machine_mode inmode,
968 machine_mode outmode, int strict_low, int optional,
969 int opnum, enum reload_type type)
971 int i;
972 int dont_share = 0;
973 int dont_remove_subreg = 0;
974 #ifdef LIMIT_RELOAD_CLASS
975 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
976 #endif
977 int secondary_in_reload = -1, secondary_out_reload = -1;
978 enum insn_code secondary_in_icode = CODE_FOR_nothing;
979 enum insn_code secondary_out_icode = CODE_FOR_nothing;
980 enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
981 subreg_in_class = NO_REGS;
983 /* INMODE and/or OUTMODE could be VOIDmode if no mode
984 has been specified for the operand. In that case,
985 use the operand's mode as the mode to reload. */
986 if (inmode == VOIDmode && in != 0)
987 inmode = GET_MODE (in);
988 if (outmode == VOIDmode && out != 0)
989 outmode = GET_MODE (out);
991 /* If find_reloads and friends until now missed to replace a pseudo
992 with a constant of reg_equiv_constant something went wrong
993 beforehand.
994 Note that it can't simply be done here if we missed it earlier
995 since the constant might need to be pushed into the literal pool
996 and the resulting memref would probably need further
997 reloading. */
998 if (in != 0 && REG_P (in))
1000 int regno = REGNO (in);
1002 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1003 || reg_renumber[regno] >= 0
1004 || reg_equiv_constant (regno) == NULL_RTX);
1007 /* reg_equiv_constant only contains constants which are obviously
1008 not appropriate as destination. So if we would need to replace
1009 the destination pseudo with a constant we are in real
1010 trouble. */
1011 if (out != 0 && REG_P (out))
1013 int regno = REGNO (out);
1015 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1016 || reg_renumber[regno] >= 0
1017 || reg_equiv_constant (regno) == NULL_RTX);
1020 /* If we have a read-write operand with an address side-effect,
1021 change either IN or OUT so the side-effect happens only once. */
1022 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1023 switch (GET_CODE (XEXP (in, 0)))
1025 case POST_INC: case POST_DEC: case POST_MODIFY:
1026 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1027 break;
1029 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1030 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1031 break;
1033 default:
1034 break;
1037 /* If we are reloading a (SUBREG constant ...), really reload just the
1038 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1039 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1040 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1041 register is a pseudo, also reload the inside expression.
1042 For machines that extend byte loads, do this for any SUBREG of a pseudo
1043 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1044 M2 is an integral mode that gets extended when loaded.
1045 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1046 where either M1 is not valid for R or M2 is wider than a word but we
1047 only need one register to store an M2-sized quantity in R.
1048 (However, if OUT is nonzero, we need to reload the reg *and*
1049 the subreg, so do nothing here, and let following statement handle it.)
1051 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1052 we can't handle it here because CONST_INT does not indicate a mode.
1054 Similarly, we must reload the inside expression if we have a
1055 STRICT_LOW_PART (presumably, in == out in this case).
1057 Also reload the inner expression if it does not require a secondary
1058 reload but the SUBREG does.
1060 Finally, reload the inner expression if it is a register that is in
1061 the class whose registers cannot be referenced in a different size
1062 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1063 cannot reload just the inside since we might end up with the wrong
1064 register class. But if it is inside a STRICT_LOW_PART, we have
1065 no choice, so we hope we do get the right register class there. */
1067 if (in != 0 && GET_CODE (in) == SUBREG
1068 && (subreg_lowpart_p (in) || strict_low)
1069 #ifdef CANNOT_CHANGE_MODE_CLASS
1070 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1071 #endif
1072 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (in))]
1073 && (CONSTANT_P (SUBREG_REG (in))
1074 || GET_CODE (SUBREG_REG (in)) == PLUS
1075 || strict_low
1076 || (((REG_P (SUBREG_REG (in))
1077 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1078 || MEM_P (SUBREG_REG (in)))
1079 && ((GET_MODE_PRECISION (inmode)
1080 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1081 #ifdef LOAD_EXTEND_OP
1082 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1083 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1084 <= UNITS_PER_WORD)
1085 && (GET_MODE_PRECISION (inmode)
1086 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1087 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1088 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1089 #endif
1090 #ifdef WORD_REGISTER_OPERATIONS
1091 || ((GET_MODE_PRECISION (inmode)
1092 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1093 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1094 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1095 / UNITS_PER_WORD)))
1096 #endif
1098 || (REG_P (SUBREG_REG (in))
1099 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1100 /* The case where out is nonzero
1101 is handled differently in the following statement. */
1102 && (out == 0 || subreg_lowpart_p (in))
1103 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1104 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1105 > UNITS_PER_WORD)
1106 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1107 / UNITS_PER_WORD)
1108 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1109 [GET_MODE (SUBREG_REG (in))]))
1110 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1111 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1112 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1113 SUBREG_REG (in))
1114 == NO_REGS))
1115 #ifdef CANNOT_CHANGE_MODE_CLASS
1116 || (REG_P (SUBREG_REG (in))
1117 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1118 && REG_CANNOT_CHANGE_MODE_P
1119 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1120 #endif
1123 #ifdef LIMIT_RELOAD_CLASS
1124 in_subreg_loc = inloc;
1125 #endif
1126 inloc = &SUBREG_REG (in);
1127 in = *inloc;
1128 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1129 if (MEM_P (in))
1130 /* This is supposed to happen only for paradoxical subregs made by
1131 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1132 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1133 #endif
1134 inmode = GET_MODE (in);
1137 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1138 where M1 is not valid for R if it was not handled by the code above.
1140 Similar issue for (SUBREG constant ...) if it was not handled by the
1141 code above. This can happen if SUBREG_BYTE != 0.
1143 However, we must reload the inner reg *as well as* the subreg in
1144 that case. */
1146 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1148 if (REG_P (SUBREG_REG (in)))
1149 subreg_in_class
1150 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1151 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1152 GET_MODE (SUBREG_REG (in)),
1153 SUBREG_BYTE (in),
1154 GET_MODE (in)),
1155 REGNO (SUBREG_REG (in)));
1156 else if (GET_CODE (SUBREG_REG (in)) == SYMBOL_REF)
1157 subreg_in_class = find_valid_class_1 (inmode,
1158 GET_MODE (SUBREG_REG (in)),
1159 rclass);
1161 /* This relies on the fact that emit_reload_insns outputs the
1162 instructions for input reloads of type RELOAD_OTHER in the same
1163 order as the reloads. Thus if the outer reload is also of type
1164 RELOAD_OTHER, we are guaranteed that this inner reload will be
1165 output before the outer reload. */
1166 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1167 subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1168 dont_remove_subreg = 1;
1171 /* Similarly for paradoxical and problematical SUBREGs on the output.
1172 Note that there is no reason we need worry about the previous value
1173 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1174 entitled to clobber it all (except in the case of a word mode subreg
1175 or of a STRICT_LOW_PART, in that latter case the constraint should
1176 label it input-output.) */
1177 if (out != 0 && GET_CODE (out) == SUBREG
1178 && (subreg_lowpart_p (out) || strict_low)
1179 #ifdef CANNOT_CHANGE_MODE_CLASS
1180 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1181 #endif
1182 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (out))]
1183 && (CONSTANT_P (SUBREG_REG (out))
1184 || strict_low
1185 || (((REG_P (SUBREG_REG (out))
1186 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1187 || MEM_P (SUBREG_REG (out)))
1188 && ((GET_MODE_PRECISION (outmode)
1189 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1190 #ifdef WORD_REGISTER_OPERATIONS
1191 || ((GET_MODE_PRECISION (outmode)
1192 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1193 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1194 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1195 / UNITS_PER_WORD)))
1196 #endif
1198 || (REG_P (SUBREG_REG (out))
1199 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1200 /* The case of a word mode subreg
1201 is handled differently in the following statement. */
1202 && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1203 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1204 > UNITS_PER_WORD))
1205 && ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode))
1206 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1207 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1208 SUBREG_REG (out))
1209 == NO_REGS))
1210 #ifdef CANNOT_CHANGE_MODE_CLASS
1211 || (REG_P (SUBREG_REG (out))
1212 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1213 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1214 GET_MODE (SUBREG_REG (out)),
1215 outmode))
1216 #endif
1219 #ifdef LIMIT_RELOAD_CLASS
1220 out_subreg_loc = outloc;
1221 #endif
1222 outloc = &SUBREG_REG (out);
1223 out = *outloc;
1224 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1225 gcc_assert (!MEM_P (out)
1226 || GET_MODE_SIZE (GET_MODE (out))
1227 <= GET_MODE_SIZE (outmode));
1228 #endif
1229 outmode = GET_MODE (out);
1232 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1233 where either M1 is not valid for R or M2 is wider than a word but we
1234 only need one register to store an M2-sized quantity in R.
1236 However, we must reload the inner reg *as well as* the subreg in
1237 that case and the inner reg is an in-out reload. */
1239 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1241 enum reg_class in_out_class
1242 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1243 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1244 GET_MODE (SUBREG_REG (out)),
1245 SUBREG_BYTE (out),
1246 GET_MODE (out)),
1247 REGNO (SUBREG_REG (out)));
1249 /* This relies on the fact that emit_reload_insns outputs the
1250 instructions for output reloads of type RELOAD_OTHER in reverse
1251 order of the reloads. Thus if the outer reload is also of type
1252 RELOAD_OTHER, we are guaranteed that this inner reload will be
1253 output after the outer reload. */
1254 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1255 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1256 0, 0, opnum, RELOAD_OTHER);
1257 dont_remove_subreg = 1;
1260 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1261 if (in != 0 && out != 0 && MEM_P (out)
1262 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1263 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1264 dont_share = 1;
1266 /* If IN is a SUBREG of a hard register, make a new REG. This
1267 simplifies some of the cases below. */
1269 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1270 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1271 && ! dont_remove_subreg)
1272 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1274 /* Similarly for OUT. */
1275 if (out != 0 && GET_CODE (out) == SUBREG
1276 && REG_P (SUBREG_REG (out))
1277 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1278 && ! dont_remove_subreg)
1279 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1281 /* Narrow down the class of register wanted if that is
1282 desirable on this machine for efficiency. */
1284 reg_class_t preferred_class = rclass;
1286 if (in != 0)
1287 preferred_class = targetm.preferred_reload_class (in, rclass);
1289 /* Output reloads may need analogous treatment, different in detail. */
1290 if (out != 0)
1291 preferred_class
1292 = targetm.preferred_output_reload_class (out, preferred_class);
1294 /* Discard what the target said if we cannot do it. */
1295 if (preferred_class != NO_REGS
1296 || (optional && type == RELOAD_FOR_OUTPUT))
1297 rclass = (enum reg_class) preferred_class;
1300 /* Make sure we use a class that can handle the actual pseudo
1301 inside any subreg. For example, on the 386, QImode regs
1302 can appear within SImode subregs. Although GENERAL_REGS
1303 can handle SImode, QImode needs a smaller class. */
1304 #ifdef LIMIT_RELOAD_CLASS
1305 if (in_subreg_loc)
1306 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1307 else if (in != 0 && GET_CODE (in) == SUBREG)
1308 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1310 if (out_subreg_loc)
1311 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1312 if (out != 0 && GET_CODE (out) == SUBREG)
1313 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1314 #endif
1316 /* Verify that this class is at least possible for the mode that
1317 is specified. */
1318 if (this_insn_is_asm)
1320 machine_mode mode;
1321 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1322 mode = inmode;
1323 else
1324 mode = outmode;
1325 if (mode == VOIDmode)
1327 error_for_asm (this_insn, "cannot reload integer constant "
1328 "operand in %<asm%>");
1329 mode = word_mode;
1330 if (in != 0)
1331 inmode = word_mode;
1332 if (out != 0)
1333 outmode = word_mode;
1335 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1336 if (HARD_REGNO_MODE_OK (i, mode)
1337 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1338 break;
1339 if (i == FIRST_PSEUDO_REGISTER)
1341 error_for_asm (this_insn, "impossible register constraint "
1342 "in %<asm%>");
1343 /* Avoid further trouble with this insn. */
1344 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1345 /* We used to continue here setting class to ALL_REGS, but it triggers
1346 sanity check on i386 for:
1347 void foo(long double d)
1349 asm("" :: "a" (d));
1351 Returning zero here ought to be safe as we take care in
1352 find_reloads to not process the reloads when instruction was
1353 replaced by USE. */
1355 return 0;
1359 /* Optional output reloads are always OK even if we have no register class,
1360 since the function of these reloads is only to have spill_reg_store etc.
1361 set, so that the storing insn can be deleted later. */
1362 gcc_assert (rclass != NO_REGS
1363 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1365 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1367 if (i == n_reloads)
1369 /* See if we need a secondary reload register to move between CLASS
1370 and IN or CLASS and OUT. Get the icode and push any required reloads
1371 needed for each of them if so. */
1373 if (in != 0)
1374 secondary_in_reload
1375 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1376 &secondary_in_icode, NULL);
1377 if (out != 0 && GET_CODE (out) != SCRATCH)
1378 secondary_out_reload
1379 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1380 type, &secondary_out_icode, NULL);
1382 /* We found no existing reload suitable for re-use.
1383 So add an additional reload. */
1385 #ifdef SECONDARY_MEMORY_NEEDED
1386 if (subreg_in_class == NO_REGS
1387 && in != 0
1388 && (REG_P (in)
1389 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1390 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1391 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1392 /* If a memory location is needed for the copy, make one. */
1393 if (subreg_in_class != NO_REGS
1394 && SECONDARY_MEMORY_NEEDED (subreg_in_class, rclass, inmode))
1395 get_secondary_mem (in, inmode, opnum, type);
1396 #endif
1398 i = n_reloads;
1399 rld[i].in = in;
1400 rld[i].out = out;
1401 rld[i].rclass = rclass;
1402 rld[i].inmode = inmode;
1403 rld[i].outmode = outmode;
1404 rld[i].reg_rtx = 0;
1405 rld[i].optional = optional;
1406 rld[i].inc = 0;
1407 rld[i].nocombine = 0;
1408 rld[i].in_reg = inloc ? *inloc : 0;
1409 rld[i].out_reg = outloc ? *outloc : 0;
1410 rld[i].opnum = opnum;
1411 rld[i].when_needed = type;
1412 rld[i].secondary_in_reload = secondary_in_reload;
1413 rld[i].secondary_out_reload = secondary_out_reload;
1414 rld[i].secondary_in_icode = secondary_in_icode;
1415 rld[i].secondary_out_icode = secondary_out_icode;
1416 rld[i].secondary_p = 0;
1418 n_reloads++;
1420 #ifdef SECONDARY_MEMORY_NEEDED
1421 if (out != 0
1422 && (REG_P (out)
1423 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1424 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1425 && SECONDARY_MEMORY_NEEDED (rclass,
1426 REGNO_REG_CLASS (reg_or_subregno (out)),
1427 outmode))
1428 get_secondary_mem (out, outmode, opnum, type);
1429 #endif
1431 else
1433 /* We are reusing an existing reload,
1434 but we may have additional information for it.
1435 For example, we may now have both IN and OUT
1436 while the old one may have just one of them. */
1438 /* The modes can be different. If they are, we want to reload in
1439 the larger mode, so that the value is valid for both modes. */
1440 if (inmode != VOIDmode
1441 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1442 rld[i].inmode = inmode;
1443 if (outmode != VOIDmode
1444 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1445 rld[i].outmode = outmode;
1446 if (in != 0)
1448 rtx in_reg = inloc ? *inloc : 0;
1449 /* If we merge reloads for two distinct rtl expressions that
1450 are identical in content, there might be duplicate address
1451 reloads. Remove the extra set now, so that if we later find
1452 that we can inherit this reload, we can get rid of the
1453 address reloads altogether.
1455 Do not do this if both reloads are optional since the result
1456 would be an optional reload which could potentially leave
1457 unresolved address replacements.
1459 It is not sufficient to call transfer_replacements since
1460 choose_reload_regs will remove the replacements for address
1461 reloads of inherited reloads which results in the same
1462 problem. */
1463 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1464 && ! (rld[i].optional && optional))
1466 /* We must keep the address reload with the lower operand
1467 number alive. */
1468 if (opnum > rld[i].opnum)
1470 remove_address_replacements (in);
1471 in = rld[i].in;
1472 in_reg = rld[i].in_reg;
1474 else
1475 remove_address_replacements (rld[i].in);
1477 /* When emitting reloads we don't necessarily look at the in-
1478 and outmode, but also directly at the operands (in and out).
1479 So we can't simply overwrite them with whatever we have found
1480 for this (to-be-merged) reload, we have to "merge" that too.
1481 Reusing another reload already verified that we deal with the
1482 same operands, just possibly in different modes. So we
1483 overwrite the operands only when the new mode is larger.
1484 See also PR33613. */
1485 if (!rld[i].in
1486 || GET_MODE_SIZE (GET_MODE (in))
1487 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1488 rld[i].in = in;
1489 if (!rld[i].in_reg
1490 || (in_reg
1491 && GET_MODE_SIZE (GET_MODE (in_reg))
1492 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1493 rld[i].in_reg = in_reg;
1495 if (out != 0)
1497 if (!rld[i].out
1498 || (out
1499 && GET_MODE_SIZE (GET_MODE (out))
1500 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1501 rld[i].out = out;
1502 if (outloc
1503 && (!rld[i].out_reg
1504 || GET_MODE_SIZE (GET_MODE (*outloc))
1505 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1506 rld[i].out_reg = *outloc;
1508 if (reg_class_subset_p (rclass, rld[i].rclass))
1509 rld[i].rclass = rclass;
1510 rld[i].optional &= optional;
1511 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1512 opnum, rld[i].opnum))
1513 rld[i].when_needed = RELOAD_OTHER;
1514 rld[i].opnum = MIN (rld[i].opnum, opnum);
1517 /* If the ostensible rtx being reloaded differs from the rtx found
1518 in the location to substitute, this reload is not safe to combine
1519 because we cannot reliably tell whether it appears in the insn. */
1521 if (in != 0 && in != *inloc)
1522 rld[i].nocombine = 1;
1524 #if 0
1525 /* This was replaced by changes in find_reloads_address_1 and the new
1526 function inc_for_reload, which go with a new meaning of reload_inc. */
1528 /* If this is an IN/OUT reload in an insn that sets the CC,
1529 it must be for an autoincrement. It doesn't work to store
1530 the incremented value after the insn because that would clobber the CC.
1531 So we must do the increment of the value reloaded from,
1532 increment it, store it back, then decrement again. */
1533 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1535 out = 0;
1536 rld[i].out = 0;
1537 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1538 /* If we did not find a nonzero amount-to-increment-by,
1539 that contradicts the belief that IN is being incremented
1540 in an address in this insn. */
1541 gcc_assert (rld[i].inc != 0);
1543 #endif
1545 /* If we will replace IN and OUT with the reload-reg,
1546 record where they are located so that substitution need
1547 not do a tree walk. */
1549 if (replace_reloads)
1551 if (inloc != 0)
1553 struct replacement *r = &replacements[n_replacements++];
1554 r->what = i;
1555 r->where = inloc;
1556 r->mode = inmode;
1558 if (outloc != 0 && outloc != inloc)
1560 struct replacement *r = &replacements[n_replacements++];
1561 r->what = i;
1562 r->where = outloc;
1563 r->mode = outmode;
1567 /* If this reload is just being introduced and it has both
1568 an incoming quantity and an outgoing quantity that are
1569 supposed to be made to match, see if either one of the two
1570 can serve as the place to reload into.
1572 If one of them is acceptable, set rld[i].reg_rtx
1573 to that one. */
1575 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1577 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1578 inmode, outmode,
1579 rld[i].rclass, i,
1580 earlyclobber_operand_p (out));
1582 /* If the outgoing register already contains the same value
1583 as the incoming one, we can dispense with loading it.
1584 The easiest way to tell the caller that is to give a phony
1585 value for the incoming operand (same as outgoing one). */
1586 if (rld[i].reg_rtx == out
1587 && (REG_P (in) || CONSTANT_P (in))
1588 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1589 static_reload_reg_p, i, inmode))
1590 rld[i].in = out;
1593 /* If this is an input reload and the operand contains a register that
1594 dies in this insn and is used nowhere else, see if it is the right class
1595 to be used for this reload. Use it if so. (This occurs most commonly
1596 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1597 this if it is also an output reload that mentions the register unless
1598 the output is a SUBREG that clobbers an entire register.
1600 Note that the operand might be one of the spill regs, if it is a
1601 pseudo reg and we are in a block where spilling has not taken place.
1602 But if there is no spilling in this block, that is OK.
1603 An explicitly used hard reg cannot be a spill reg. */
1605 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1607 rtx note;
1608 int regno;
1609 machine_mode rel_mode = inmode;
1611 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1612 rel_mode = outmode;
1614 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1615 if (REG_NOTE_KIND (note) == REG_DEAD
1616 && REG_P (XEXP (note, 0))
1617 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1618 && reg_mentioned_p (XEXP (note, 0), in)
1619 /* Check that a former pseudo is valid; see find_dummy_reload. */
1620 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1621 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1622 ORIGINAL_REGNO (XEXP (note, 0)))
1623 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1624 && ! refers_to_regno_for_reload_p (regno,
1625 end_hard_regno (rel_mode,
1626 regno),
1627 PATTERN (this_insn), inloc)
1628 && ! find_reg_fusage (this_insn, USE, XEXP (note, 0))
1629 /* If this is also an output reload, IN cannot be used as
1630 the reload register if it is set in this insn unless IN
1631 is also OUT. */
1632 && (out == 0 || in == out
1633 || ! hard_reg_set_here_p (regno,
1634 end_hard_regno (rel_mode, regno),
1635 PATTERN (this_insn)))
1636 /* ??? Why is this code so different from the previous?
1637 Is there any simple coherent way to describe the two together?
1638 What's going on here. */
1639 && (in != out
1640 || (GET_CODE (in) == SUBREG
1641 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1642 / UNITS_PER_WORD)
1643 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1644 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1645 /* Make sure the operand fits in the reg that dies. */
1646 && (GET_MODE_SIZE (rel_mode)
1647 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1648 && HARD_REGNO_MODE_OK (regno, inmode)
1649 && HARD_REGNO_MODE_OK (regno, outmode))
1651 unsigned int offs;
1652 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1653 hard_regno_nregs[regno][outmode]);
1655 for (offs = 0; offs < nregs; offs++)
1656 if (fixed_regs[regno + offs]
1657 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1658 regno + offs))
1659 break;
1661 if (offs == nregs
1662 && (! (refers_to_regno_for_reload_p
1663 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1664 || can_reload_into (in, regno, inmode)))
1666 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1667 break;
1672 if (out)
1673 output_reloadnum = i;
1675 return i;
1678 /* Record an additional place we must replace a value
1679 for which we have already recorded a reload.
1680 RELOADNUM is the value returned by push_reload
1681 when the reload was recorded.
1682 This is used in insn patterns that use match_dup. */
1684 static void
1685 push_replacement (rtx *loc, int reloadnum, machine_mode mode)
1687 if (replace_reloads)
1689 struct replacement *r = &replacements[n_replacements++];
1690 r->what = reloadnum;
1691 r->where = loc;
1692 r->mode = mode;
1696 /* Duplicate any replacement we have recorded to apply at
1697 location ORIG_LOC to also be performed at DUP_LOC.
1698 This is used in insn patterns that use match_dup. */
1700 static void
1701 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1703 int i, n = n_replacements;
1705 for (i = 0; i < n; i++)
1707 struct replacement *r = &replacements[i];
1708 if (r->where == orig_loc)
1709 push_replacement (dup_loc, r->what, r->mode);
1713 /* Transfer all replacements that used to be in reload FROM to be in
1714 reload TO. */
1716 void
1717 transfer_replacements (int to, int from)
1719 int i;
1721 for (i = 0; i < n_replacements; i++)
1722 if (replacements[i].what == from)
1723 replacements[i].what = to;
1726 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1727 or a subpart of it. If we have any replacements registered for IN_RTX,
1728 cancel the reloads that were supposed to load them.
1729 Return nonzero if we canceled any reloads. */
1731 remove_address_replacements (rtx in_rtx)
1733 int i, j;
1734 char reload_flags[MAX_RELOADS];
1735 int something_changed = 0;
1737 memset (reload_flags, 0, sizeof reload_flags);
1738 for (i = 0, j = 0; i < n_replacements; i++)
1740 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1741 reload_flags[replacements[i].what] |= 1;
1742 else
1744 replacements[j++] = replacements[i];
1745 reload_flags[replacements[i].what] |= 2;
1748 /* Note that the following store must be done before the recursive calls. */
1749 n_replacements = j;
1751 for (i = n_reloads - 1; i >= 0; i--)
1753 if (reload_flags[i] == 1)
1755 deallocate_reload_reg (i);
1756 remove_address_replacements (rld[i].in);
1757 rld[i].in = 0;
1758 something_changed = 1;
1761 return something_changed;
1764 /* If there is only one output reload, and it is not for an earlyclobber
1765 operand, try to combine it with a (logically unrelated) input reload
1766 to reduce the number of reload registers needed.
1768 This is safe if the input reload does not appear in
1769 the value being output-reloaded, because this implies
1770 it is not needed any more once the original insn completes.
1772 If that doesn't work, see we can use any of the registers that
1773 die in this insn as a reload register. We can if it is of the right
1774 class and does not appear in the value being output-reloaded. */
1776 static void
1777 combine_reloads (void)
1779 int i, regno;
1780 int output_reload = -1;
1781 int secondary_out = -1;
1782 rtx note;
1784 /* Find the output reload; return unless there is exactly one
1785 and that one is mandatory. */
1787 for (i = 0; i < n_reloads; i++)
1788 if (rld[i].out != 0)
1790 if (output_reload >= 0)
1791 return;
1792 output_reload = i;
1795 if (output_reload < 0 || rld[output_reload].optional)
1796 return;
1798 /* An input-output reload isn't combinable. */
1800 if (rld[output_reload].in != 0)
1801 return;
1803 /* If this reload is for an earlyclobber operand, we can't do anything. */
1804 if (earlyclobber_operand_p (rld[output_reload].out))
1805 return;
1807 /* If there is a reload for part of the address of this operand, we would
1808 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1809 its life to the point where doing this combine would not lower the
1810 number of spill registers needed. */
1811 for (i = 0; i < n_reloads; i++)
1812 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1813 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1814 && rld[i].opnum == rld[output_reload].opnum)
1815 return;
1817 /* Check each input reload; can we combine it? */
1819 for (i = 0; i < n_reloads; i++)
1820 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1821 /* Life span of this reload must not extend past main insn. */
1822 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1823 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1824 && rld[i].when_needed != RELOAD_OTHER
1825 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1826 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1827 [(int) rld[output_reload].outmode])
1828 && rld[i].inc == 0
1829 && rld[i].reg_rtx == 0
1830 #ifdef SECONDARY_MEMORY_NEEDED
1831 /* Don't combine two reloads with different secondary
1832 memory locations. */
1833 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1834 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1835 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1836 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1837 #endif
1838 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1839 ? (rld[i].rclass == rld[output_reload].rclass)
1840 : (reg_class_subset_p (rld[i].rclass,
1841 rld[output_reload].rclass)
1842 || reg_class_subset_p (rld[output_reload].rclass,
1843 rld[i].rclass)))
1844 && (MATCHES (rld[i].in, rld[output_reload].out)
1845 /* Args reversed because the first arg seems to be
1846 the one that we imagine being modified
1847 while the second is the one that might be affected. */
1848 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1849 rld[i].in)
1850 /* However, if the input is a register that appears inside
1851 the output, then we also can't share.
1852 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1853 If the same reload reg is used for both reg 69 and the
1854 result to be stored in memory, then that result
1855 will clobber the address of the memory ref. */
1856 && ! (REG_P (rld[i].in)
1857 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1858 rld[output_reload].out))))
1859 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1860 rld[i].when_needed != RELOAD_FOR_INPUT)
1861 && (reg_class_size[(int) rld[i].rclass]
1862 || targetm.small_register_classes_for_mode_p (VOIDmode))
1863 /* We will allow making things slightly worse by combining an
1864 input and an output, but no worse than that. */
1865 && (rld[i].when_needed == RELOAD_FOR_INPUT
1866 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1868 int j;
1870 /* We have found a reload to combine with! */
1871 rld[i].out = rld[output_reload].out;
1872 rld[i].out_reg = rld[output_reload].out_reg;
1873 rld[i].outmode = rld[output_reload].outmode;
1874 /* Mark the old output reload as inoperative. */
1875 rld[output_reload].out = 0;
1876 /* The combined reload is needed for the entire insn. */
1877 rld[i].when_needed = RELOAD_OTHER;
1878 /* If the output reload had a secondary reload, copy it. */
1879 if (rld[output_reload].secondary_out_reload != -1)
1881 rld[i].secondary_out_reload
1882 = rld[output_reload].secondary_out_reload;
1883 rld[i].secondary_out_icode
1884 = rld[output_reload].secondary_out_icode;
1887 #ifdef SECONDARY_MEMORY_NEEDED
1888 /* Copy any secondary MEM. */
1889 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1890 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1891 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1892 #endif
1893 /* If required, minimize the register class. */
1894 if (reg_class_subset_p (rld[output_reload].rclass,
1895 rld[i].rclass))
1896 rld[i].rclass = rld[output_reload].rclass;
1898 /* Transfer all replacements from the old reload to the combined. */
1899 for (j = 0; j < n_replacements; j++)
1900 if (replacements[j].what == output_reload)
1901 replacements[j].what = i;
1903 return;
1906 /* If this insn has only one operand that is modified or written (assumed
1907 to be the first), it must be the one corresponding to this reload. It
1908 is safe to use anything that dies in this insn for that output provided
1909 that it does not occur in the output (we already know it isn't an
1910 earlyclobber. If this is an asm insn, give up. */
1912 if (INSN_CODE (this_insn) == -1)
1913 return;
1915 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1916 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1917 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1918 return;
1920 /* See if some hard register that dies in this insn and is not used in
1921 the output is the right class. Only works if the register we pick
1922 up can fully hold our output reload. */
1923 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1924 if (REG_NOTE_KIND (note) == REG_DEAD
1925 && REG_P (XEXP (note, 0))
1926 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1927 rld[output_reload].out)
1928 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1929 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1930 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1931 regno)
1932 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1933 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1934 /* Ensure that a secondary or tertiary reload for this output
1935 won't want this register. */
1936 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1937 || (!(TEST_HARD_REG_BIT
1938 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1939 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1940 || !(TEST_HARD_REG_BIT
1941 (reg_class_contents[(int) rld[secondary_out].rclass],
1942 regno)))))
1943 && !fixed_regs[regno]
1944 /* Check that a former pseudo is valid; see find_dummy_reload. */
1945 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1946 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1947 ORIGINAL_REGNO (XEXP (note, 0)))
1948 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1950 rld[output_reload].reg_rtx
1951 = gen_rtx_REG (rld[output_reload].outmode, regno);
1952 return;
1956 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1957 See if one of IN and OUT is a register that may be used;
1958 this is desirable since a spill-register won't be needed.
1959 If so, return the register rtx that proves acceptable.
1961 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1962 RCLASS is the register class required for the reload.
1964 If FOR_REAL is >= 0, it is the number of the reload,
1965 and in some cases when it can be discovered that OUT doesn't need
1966 to be computed, clear out rld[FOR_REAL].out.
1968 If FOR_REAL is -1, this should not be done, because this call
1969 is just to see if a register can be found, not to find and install it.
1971 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1972 puts an additional constraint on being able to use IN for OUT since
1973 IN must not appear elsewhere in the insn (it is assumed that IN itself
1974 is safe from the earlyclobber). */
1976 static rtx
1977 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1978 machine_mode inmode, machine_mode outmode,
1979 reg_class_t rclass, int for_real, int earlyclobber)
1981 rtx in = real_in;
1982 rtx out = real_out;
1983 int in_offset = 0;
1984 int out_offset = 0;
1985 rtx value = 0;
1987 /* If operands exceed a word, we can't use either of them
1988 unless they have the same size. */
1989 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1990 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1991 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1992 return 0;
1994 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1995 respectively refers to a hard register. */
1997 /* Find the inside of any subregs. */
1998 while (GET_CODE (out) == SUBREG)
2000 if (REG_P (SUBREG_REG (out))
2001 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
2002 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
2003 GET_MODE (SUBREG_REG (out)),
2004 SUBREG_BYTE (out),
2005 GET_MODE (out));
2006 out = SUBREG_REG (out);
2008 while (GET_CODE (in) == SUBREG)
2010 if (REG_P (SUBREG_REG (in))
2011 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
2012 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
2013 GET_MODE (SUBREG_REG (in)),
2014 SUBREG_BYTE (in),
2015 GET_MODE (in));
2016 in = SUBREG_REG (in);
2019 /* Narrow down the reg class, the same way push_reload will;
2020 otherwise we might find a dummy now, but push_reload won't. */
2022 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
2023 if (preferred_class != NO_REGS)
2024 rclass = (enum reg_class) preferred_class;
2027 /* See if OUT will do. */
2028 if (REG_P (out)
2029 && REGNO (out) < FIRST_PSEUDO_REGISTER)
2031 unsigned int regno = REGNO (out) + out_offset;
2032 unsigned int nwords = hard_regno_nregs[regno][outmode];
2033 rtx saved_rtx;
2035 /* When we consider whether the insn uses OUT,
2036 ignore references within IN. They don't prevent us
2037 from copying IN into OUT, because those refs would
2038 move into the insn that reloads IN.
2040 However, we only ignore IN in its role as this reload.
2041 If the insn uses IN elsewhere and it contains OUT,
2042 that counts. We can't be sure it's the "same" operand
2043 so it might not go through this reload.
2045 We also need to avoid using OUT if it, or part of it, is a
2046 fixed register. Modifying such registers, even transiently,
2047 may have undefined effects on the machine, such as modifying
2048 the stack pointer. */
2049 saved_rtx = *inloc;
2050 *inloc = const0_rtx;
2052 if (regno < FIRST_PSEUDO_REGISTER
2053 && HARD_REGNO_MODE_OK (regno, outmode)
2054 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2055 PATTERN (this_insn), outloc))
2057 unsigned int i;
2059 for (i = 0; i < nwords; i++)
2060 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2061 regno + i)
2062 || fixed_regs[regno + i])
2063 break;
2065 if (i == nwords)
2067 if (REG_P (real_out))
2068 value = real_out;
2069 else
2070 value = gen_rtx_REG (outmode, regno);
2074 *inloc = saved_rtx;
2077 /* Consider using IN if OUT was not acceptable
2078 or if OUT dies in this insn (like the quotient in a divmod insn).
2079 We can't use IN unless it is dies in this insn,
2080 which means we must know accurately which hard regs are live.
2081 Also, the result can't go in IN if IN is used within OUT,
2082 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2083 if (hard_regs_live_known
2084 && REG_P (in)
2085 && REGNO (in) < FIRST_PSEUDO_REGISTER
2086 && (value == 0
2087 || find_reg_note (this_insn, REG_UNUSED, real_out))
2088 && find_reg_note (this_insn, REG_DEAD, real_in)
2089 && !fixed_regs[REGNO (in)]
2090 && HARD_REGNO_MODE_OK (REGNO (in),
2091 /* The only case where out and real_out might
2092 have different modes is where real_out
2093 is a subreg, and in that case, out
2094 has a real mode. */
2095 (GET_MODE (out) != VOIDmode
2096 ? GET_MODE (out) : outmode))
2097 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2098 /* However only do this if we can be sure that this input
2099 operand doesn't correspond with an uninitialized pseudo.
2100 global can assign some hardreg to it that is the same as
2101 the one assigned to a different, also live pseudo (as it
2102 can ignore the conflict). We must never introduce writes
2103 to such hardregs, as they would clobber the other live
2104 pseudo. See PR 20973. */
2105 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2106 ORIGINAL_REGNO (in))
2107 /* Similarly, only do this if we can be sure that the death
2108 note is still valid. global can assign some hardreg to
2109 the pseudo referenced in the note and simultaneously a
2110 subword of this hardreg to a different, also live pseudo,
2111 because only another subword of the hardreg is actually
2112 used in the insn. This cannot happen if the pseudo has
2113 been assigned exactly one hardreg. See PR 33732. */
2114 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2116 unsigned int regno = REGNO (in) + in_offset;
2117 unsigned int nwords = hard_regno_nregs[regno][inmode];
2119 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2120 && ! hard_reg_set_here_p (regno, regno + nwords,
2121 PATTERN (this_insn))
2122 && (! earlyclobber
2123 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2124 PATTERN (this_insn), inloc)))
2126 unsigned int i;
2128 for (i = 0; i < nwords; i++)
2129 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2130 regno + i))
2131 break;
2133 if (i == nwords)
2135 /* If we were going to use OUT as the reload reg
2136 and changed our mind, it means OUT is a dummy that
2137 dies here. So don't bother copying value to it. */
2138 if (for_real >= 0 && value == real_out)
2139 rld[for_real].out = 0;
2140 if (REG_P (real_in))
2141 value = real_in;
2142 else
2143 value = gen_rtx_REG (inmode, regno);
2148 return value;
2151 /* This page contains subroutines used mainly for determining
2152 whether the IN or an OUT of a reload can serve as the
2153 reload register. */
2155 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2158 earlyclobber_operand_p (rtx x)
2160 int i;
2162 for (i = 0; i < n_earlyclobbers; i++)
2163 if (reload_earlyclobbers[i] == x)
2164 return 1;
2166 return 0;
2169 /* Return 1 if expression X alters a hard reg in the range
2170 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2171 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2172 X should be the body of an instruction. */
2174 static int
2175 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2177 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2179 rtx op0 = SET_DEST (x);
2181 while (GET_CODE (op0) == SUBREG)
2182 op0 = SUBREG_REG (op0);
2183 if (REG_P (op0))
2185 unsigned int r = REGNO (op0);
2187 /* See if this reg overlaps range under consideration. */
2188 if (r < end_regno
2189 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2190 return 1;
2193 else if (GET_CODE (x) == PARALLEL)
2195 int i = XVECLEN (x, 0) - 1;
2197 for (; i >= 0; i--)
2198 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2199 return 1;
2202 return 0;
2205 /* Return 1 if ADDR is a valid memory address for mode MODE
2206 in address space AS, and check that each pseudo reg has the
2207 proper kind of hard reg. */
2210 strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
2211 rtx addr, addr_space_t as)
2213 #ifdef GO_IF_LEGITIMATE_ADDRESS
2214 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2215 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2216 return 0;
2218 win:
2219 return 1;
2220 #else
2221 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2222 #endif
2225 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2226 if they are the same hard reg, and has special hacks for
2227 autoincrement and autodecrement.
2228 This is specifically intended for find_reloads to use
2229 in determining whether two operands match.
2230 X is the operand whose number is the lower of the two.
2232 The value is 2 if Y contains a pre-increment that matches
2233 a non-incrementing address in X. */
2235 /* ??? To be completely correct, we should arrange to pass
2236 for X the output operand and for Y the input operand.
2237 For now, we assume that the output operand has the lower number
2238 because that is natural in (SET output (... input ...)). */
2241 operands_match_p (rtx x, rtx y)
2243 int i;
2244 RTX_CODE code = GET_CODE (x);
2245 const char *fmt;
2246 int success_2;
2248 if (x == y)
2249 return 1;
2250 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2251 && (REG_P (y) || (GET_CODE (y) == SUBREG
2252 && REG_P (SUBREG_REG (y)))))
2254 int j;
2256 if (code == SUBREG)
2258 i = REGNO (SUBREG_REG (x));
2259 if (i >= FIRST_PSEUDO_REGISTER)
2260 goto slow;
2261 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2262 GET_MODE (SUBREG_REG (x)),
2263 SUBREG_BYTE (x),
2264 GET_MODE (x));
2266 else
2267 i = REGNO (x);
2269 if (GET_CODE (y) == SUBREG)
2271 j = REGNO (SUBREG_REG (y));
2272 if (j >= FIRST_PSEUDO_REGISTER)
2273 goto slow;
2274 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2275 GET_MODE (SUBREG_REG (y)),
2276 SUBREG_BYTE (y),
2277 GET_MODE (y));
2279 else
2280 j = REGNO (y);
2282 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2283 multiple hard register group of scalar integer registers, so that
2284 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2285 register. */
2286 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2287 && SCALAR_INT_MODE_P (GET_MODE (x))
2288 && i < FIRST_PSEUDO_REGISTER)
2289 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2290 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2291 && SCALAR_INT_MODE_P (GET_MODE (y))
2292 && j < FIRST_PSEUDO_REGISTER)
2293 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2295 return i == j;
2297 /* If two operands must match, because they are really a single
2298 operand of an assembler insn, then two postincrements are invalid
2299 because the assembler insn would increment only once.
2300 On the other hand, a postincrement matches ordinary indexing
2301 if the postincrement is the output operand. */
2302 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2303 return operands_match_p (XEXP (x, 0), y);
2304 /* Two preincrements are invalid
2305 because the assembler insn would increment only once.
2306 On the other hand, a preincrement matches ordinary indexing
2307 if the preincrement is the input operand.
2308 In this case, return 2, since some callers need to do special
2309 things when this happens. */
2310 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2311 || GET_CODE (y) == PRE_MODIFY)
2312 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2314 slow:
2316 /* Now we have disposed of all the cases in which different rtx codes
2317 can match. */
2318 if (code != GET_CODE (y))
2319 return 0;
2321 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2322 if (GET_MODE (x) != GET_MODE (y))
2323 return 0;
2325 /* MEMs referring to different address space are not equivalent. */
2326 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2327 return 0;
2329 switch (code)
2331 CASE_CONST_UNIQUE:
2332 return 0;
2334 case LABEL_REF:
2335 return LABEL_REF_LABEL (x) == LABEL_REF_LABEL (y);
2336 case SYMBOL_REF:
2337 return XSTR (x, 0) == XSTR (y, 0);
2339 default:
2340 break;
2343 /* Compare the elements. If any pair of corresponding elements
2344 fail to match, return 0 for the whole things. */
2346 success_2 = 0;
2347 fmt = GET_RTX_FORMAT (code);
2348 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2350 int val, j;
2351 switch (fmt[i])
2353 case 'w':
2354 if (XWINT (x, i) != XWINT (y, i))
2355 return 0;
2356 break;
2358 case 'i':
2359 if (XINT (x, i) != XINT (y, i))
2360 return 0;
2361 break;
2363 case 'e':
2364 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2365 if (val == 0)
2366 return 0;
2367 /* If any subexpression returns 2,
2368 we should return 2 if we are successful. */
2369 if (val == 2)
2370 success_2 = 1;
2371 break;
2373 case '0':
2374 break;
2376 case 'E':
2377 if (XVECLEN (x, i) != XVECLEN (y, i))
2378 return 0;
2379 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2381 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2382 if (val == 0)
2383 return 0;
2384 if (val == 2)
2385 success_2 = 1;
2387 break;
2389 /* It is believed that rtx's at this level will never
2390 contain anything but integers and other rtx's,
2391 except for within LABEL_REFs and SYMBOL_REFs. */
2392 default:
2393 gcc_unreachable ();
2396 return 1 + success_2;
2399 /* Describe the range of registers or memory referenced by X.
2400 If X is a register, set REG_FLAG and put the first register
2401 number into START and the last plus one into END.
2402 If X is a memory reference, put a base address into BASE
2403 and a range of integer offsets into START and END.
2404 If X is pushing on the stack, we can assume it causes no trouble,
2405 so we set the SAFE field. */
2407 static struct decomposition
2408 decompose (rtx x)
2410 struct decomposition val;
2411 int all_const = 0;
2413 memset (&val, 0, sizeof (val));
2415 switch (GET_CODE (x))
2417 case MEM:
2419 rtx base = NULL_RTX, offset = 0;
2420 rtx addr = XEXP (x, 0);
2422 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2423 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2425 val.base = XEXP (addr, 0);
2426 val.start = -GET_MODE_SIZE (GET_MODE (x));
2427 val.end = GET_MODE_SIZE (GET_MODE (x));
2428 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2429 return val;
2432 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2434 if (GET_CODE (XEXP (addr, 1)) == PLUS
2435 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2436 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2438 val.base = XEXP (addr, 0);
2439 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2440 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2441 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2442 return val;
2446 if (GET_CODE (addr) == CONST)
2448 addr = XEXP (addr, 0);
2449 all_const = 1;
2451 if (GET_CODE (addr) == PLUS)
2453 if (CONSTANT_P (XEXP (addr, 0)))
2455 base = XEXP (addr, 1);
2456 offset = XEXP (addr, 0);
2458 else if (CONSTANT_P (XEXP (addr, 1)))
2460 base = XEXP (addr, 0);
2461 offset = XEXP (addr, 1);
2465 if (offset == 0)
2467 base = addr;
2468 offset = const0_rtx;
2470 if (GET_CODE (offset) == CONST)
2471 offset = XEXP (offset, 0);
2472 if (GET_CODE (offset) == PLUS)
2474 if (CONST_INT_P (XEXP (offset, 0)))
2476 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2477 offset = XEXP (offset, 0);
2479 else if (CONST_INT_P (XEXP (offset, 1)))
2481 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2482 offset = XEXP (offset, 1);
2484 else
2486 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2487 offset = const0_rtx;
2490 else if (!CONST_INT_P (offset))
2492 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2493 offset = const0_rtx;
2496 if (all_const && GET_CODE (base) == PLUS)
2497 base = gen_rtx_CONST (GET_MODE (base), base);
2499 gcc_assert (CONST_INT_P (offset));
2501 val.start = INTVAL (offset);
2502 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2503 val.base = base;
2505 break;
2507 case REG:
2508 val.reg_flag = 1;
2509 val.start = true_regnum (x);
2510 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2512 /* A pseudo with no hard reg. */
2513 val.start = REGNO (x);
2514 val.end = val.start + 1;
2516 else
2517 /* A hard reg. */
2518 val.end = end_hard_regno (GET_MODE (x), val.start);
2519 break;
2521 case SUBREG:
2522 if (!REG_P (SUBREG_REG (x)))
2523 /* This could be more precise, but it's good enough. */
2524 return decompose (SUBREG_REG (x));
2525 val.reg_flag = 1;
2526 val.start = true_regnum (x);
2527 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2528 return decompose (SUBREG_REG (x));
2529 else
2530 /* A hard reg. */
2531 val.end = val.start + subreg_nregs (x);
2532 break;
2534 case SCRATCH:
2535 /* This hasn't been assigned yet, so it can't conflict yet. */
2536 val.safe = 1;
2537 break;
2539 default:
2540 gcc_assert (CONSTANT_P (x));
2541 val.safe = 1;
2542 break;
2544 return val;
2547 /* Return 1 if altering Y will not modify the value of X.
2548 Y is also described by YDATA, which should be decompose (Y). */
2550 static int
2551 immune_p (rtx x, rtx y, struct decomposition ydata)
2553 struct decomposition xdata;
2555 if (ydata.reg_flag)
2556 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2557 if (ydata.safe)
2558 return 1;
2560 gcc_assert (MEM_P (y));
2561 /* If Y is memory and X is not, Y can't affect X. */
2562 if (!MEM_P (x))
2563 return 1;
2565 xdata = decompose (x);
2567 if (! rtx_equal_p (xdata.base, ydata.base))
2569 /* If bases are distinct symbolic constants, there is no overlap. */
2570 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2571 return 1;
2572 /* Constants and stack slots never overlap. */
2573 if (CONSTANT_P (xdata.base)
2574 && (ydata.base == frame_pointer_rtx
2575 || ydata.base == hard_frame_pointer_rtx
2576 || ydata.base == stack_pointer_rtx))
2577 return 1;
2578 if (CONSTANT_P (ydata.base)
2579 && (xdata.base == frame_pointer_rtx
2580 || xdata.base == hard_frame_pointer_rtx
2581 || xdata.base == stack_pointer_rtx))
2582 return 1;
2583 /* If either base is variable, we don't know anything. */
2584 return 0;
2587 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2590 /* Similar, but calls decompose. */
2593 safe_from_earlyclobber (rtx op, rtx clobber)
2595 struct decomposition early_data;
2597 early_data = decompose (clobber);
2598 return immune_p (op, clobber, early_data);
2601 /* Main entry point of this file: search the body of INSN
2602 for values that need reloading and record them with push_reload.
2603 REPLACE nonzero means record also where the values occur
2604 so that subst_reloads can be used.
2606 IND_LEVELS says how many levels of indirection are supported by this
2607 machine; a value of zero means that a memory reference is not a valid
2608 memory address.
2610 LIVE_KNOWN says we have valid information about which hard
2611 regs are live at each point in the program; this is true when
2612 we are called from global_alloc but false when stupid register
2613 allocation has been done.
2615 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2616 which is nonnegative if the reg has been commandeered for reloading into.
2617 It is copied into STATIC_RELOAD_REG_P and referenced from there
2618 by various subroutines.
2620 Return TRUE if some operands need to be changed, because of swapping
2621 commutative operands, reg_equiv_address substitution, or whatever. */
2624 find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2625 short *reload_reg_p)
2627 int insn_code_number;
2628 int i, j;
2629 int noperands;
2630 /* These start out as the constraints for the insn
2631 and they are chewed up as we consider alternatives. */
2632 const char *constraints[MAX_RECOG_OPERANDS];
2633 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2634 a register. */
2635 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2636 char pref_or_nothing[MAX_RECOG_OPERANDS];
2637 /* Nonzero for a MEM operand whose entire address needs a reload.
2638 May be -1 to indicate the entire address may or may not need a reload. */
2639 int address_reloaded[MAX_RECOG_OPERANDS];
2640 /* Nonzero for an address operand that needs to be completely reloaded.
2641 May be -1 to indicate the entire operand may or may not need a reload. */
2642 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2643 /* Value of enum reload_type to use for operand. */
2644 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2645 /* Value of enum reload_type to use within address of operand. */
2646 enum reload_type address_type[MAX_RECOG_OPERANDS];
2647 /* Save the usage of each operand. */
2648 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2649 int no_input_reloads = 0, no_output_reloads = 0;
2650 int n_alternatives;
2651 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2652 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2653 char this_alternative_win[MAX_RECOG_OPERANDS];
2654 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2655 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2656 int this_alternative_matches[MAX_RECOG_OPERANDS];
2657 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2658 int this_alternative_number;
2659 int goal_alternative_number = 0;
2660 int operand_reloadnum[MAX_RECOG_OPERANDS];
2661 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2662 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2663 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2664 char goal_alternative_win[MAX_RECOG_OPERANDS];
2665 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2666 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2667 int goal_alternative_swapped;
2668 int best;
2669 int commutative;
2670 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2671 rtx substed_operand[MAX_RECOG_OPERANDS];
2672 rtx body = PATTERN (insn);
2673 rtx set = single_set (insn);
2674 int goal_earlyclobber = 0, this_earlyclobber;
2675 machine_mode operand_mode[MAX_RECOG_OPERANDS];
2676 int retval = 0;
2678 this_insn = insn;
2679 n_reloads = 0;
2680 n_replacements = 0;
2681 n_earlyclobbers = 0;
2682 replace_reloads = replace;
2683 hard_regs_live_known = live_known;
2684 static_reload_reg_p = reload_reg_p;
2686 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2687 neither are insns that SET cc0. Insns that use CC0 are not allowed
2688 to have any input reloads. */
2689 if (JUMP_P (insn) || CALL_P (insn))
2690 no_output_reloads = 1;
2692 #ifdef HAVE_cc0
2693 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2694 no_input_reloads = 1;
2695 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2696 no_output_reloads = 1;
2697 #endif
2699 #ifdef SECONDARY_MEMORY_NEEDED
2700 /* The eliminated forms of any secondary memory locations are per-insn, so
2701 clear them out here. */
2703 if (secondary_memlocs_elim_used)
2705 memset (secondary_memlocs_elim, 0,
2706 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2707 secondary_memlocs_elim_used = 0;
2709 #endif
2711 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2712 is cheap to move between them. If it is not, there may not be an insn
2713 to do the copy, so we may need a reload. */
2714 if (GET_CODE (body) == SET
2715 && REG_P (SET_DEST (body))
2716 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2717 && REG_P (SET_SRC (body))
2718 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2719 && register_move_cost (GET_MODE (SET_SRC (body)),
2720 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2721 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2722 return 0;
2724 extract_insn (insn);
2726 noperands = reload_n_operands = recog_data.n_operands;
2727 n_alternatives = recog_data.n_alternatives;
2729 /* Just return "no reloads" if insn has no operands with constraints. */
2730 if (noperands == 0 || n_alternatives == 0)
2731 return 0;
2733 insn_code_number = INSN_CODE (insn);
2734 this_insn_is_asm = insn_code_number < 0;
2736 memcpy (operand_mode, recog_data.operand_mode,
2737 noperands * sizeof (machine_mode));
2738 memcpy (constraints, recog_data.constraints,
2739 noperands * sizeof (const char *));
2741 commutative = -1;
2743 /* If we will need to know, later, whether some pair of operands
2744 are the same, we must compare them now and save the result.
2745 Reloading the base and index registers will clobber them
2746 and afterward they will fail to match. */
2748 for (i = 0; i < noperands; i++)
2750 const char *p;
2751 int c;
2752 char *end;
2754 substed_operand[i] = recog_data.operand[i];
2755 p = constraints[i];
2757 modified[i] = RELOAD_READ;
2759 /* Scan this operand's constraint to see if it is an output operand,
2760 an in-out operand, is commutative, or should match another. */
2762 while ((c = *p))
2764 p += CONSTRAINT_LEN (c, p);
2765 switch (c)
2767 case '=':
2768 modified[i] = RELOAD_WRITE;
2769 break;
2770 case '+':
2771 modified[i] = RELOAD_READ_WRITE;
2772 break;
2773 case '%':
2775 /* The last operand should not be marked commutative. */
2776 gcc_assert (i != noperands - 1);
2778 /* We currently only support one commutative pair of
2779 operands. Some existing asm code currently uses more
2780 than one pair. Previously, that would usually work,
2781 but sometimes it would crash the compiler. We
2782 continue supporting that case as well as we can by
2783 silently ignoring all but the first pair. In the
2784 future we may handle it correctly. */
2785 if (commutative < 0)
2786 commutative = i;
2787 else
2788 gcc_assert (this_insn_is_asm);
2790 break;
2791 /* Use of ISDIGIT is tempting here, but it may get expensive because
2792 of locale support we don't want. */
2793 case '0': case '1': case '2': case '3': case '4':
2794 case '5': case '6': case '7': case '8': case '9':
2796 c = strtoul (p - 1, &end, 10);
2797 p = end;
2799 operands_match[c][i]
2800 = operands_match_p (recog_data.operand[c],
2801 recog_data.operand[i]);
2803 /* An operand may not match itself. */
2804 gcc_assert (c != i);
2806 /* If C can be commuted with C+1, and C might need to match I,
2807 then C+1 might also need to match I. */
2808 if (commutative >= 0)
2810 if (c == commutative || c == commutative + 1)
2812 int other = c + (c == commutative ? 1 : -1);
2813 operands_match[other][i]
2814 = operands_match_p (recog_data.operand[other],
2815 recog_data.operand[i]);
2817 if (i == commutative || i == commutative + 1)
2819 int other = i + (i == commutative ? 1 : -1);
2820 operands_match[c][other]
2821 = operands_match_p (recog_data.operand[c],
2822 recog_data.operand[other]);
2824 /* Note that C is supposed to be less than I.
2825 No need to consider altering both C and I because in
2826 that case we would alter one into the other. */
2833 /* Examine each operand that is a memory reference or memory address
2834 and reload parts of the addresses into index registers.
2835 Also here any references to pseudo regs that didn't get hard regs
2836 but are equivalent to constants get replaced in the insn itself
2837 with those constants. Nobody will ever see them again.
2839 Finally, set up the preferred classes of each operand. */
2841 for (i = 0; i < noperands; i++)
2843 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2845 address_reloaded[i] = 0;
2846 address_operand_reloaded[i] = 0;
2847 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2848 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2849 : RELOAD_OTHER);
2850 address_type[i]
2851 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2852 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2853 : RELOAD_OTHER);
2855 if (*constraints[i] == 0)
2856 /* Ignore things like match_operator operands. */
2858 else if (insn_extra_address_constraint
2859 (lookup_constraint (constraints[i])))
2861 address_operand_reloaded[i]
2862 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2863 recog_data.operand[i],
2864 recog_data.operand_loc[i],
2865 i, operand_type[i], ind_levels, insn);
2867 /* If we now have a simple operand where we used to have a
2868 PLUS or MULT, re-recognize and try again. */
2869 if ((OBJECT_P (*recog_data.operand_loc[i])
2870 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2871 && (GET_CODE (recog_data.operand[i]) == MULT
2872 || GET_CODE (recog_data.operand[i]) == PLUS))
2874 INSN_CODE (insn) = -1;
2875 retval = find_reloads (insn, replace, ind_levels, live_known,
2876 reload_reg_p);
2877 return retval;
2880 recog_data.operand[i] = *recog_data.operand_loc[i];
2881 substed_operand[i] = recog_data.operand[i];
2883 /* Address operands are reloaded in their existing mode,
2884 no matter what is specified in the machine description. */
2885 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2887 /* If the address is a single CONST_INT pick address mode
2888 instead otherwise we will later not know in which mode
2889 the reload should be performed. */
2890 if (operand_mode[i] == VOIDmode)
2891 operand_mode[i] = Pmode;
2894 else if (code == MEM)
2896 address_reloaded[i]
2897 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2898 recog_data.operand_loc[i],
2899 XEXP (recog_data.operand[i], 0),
2900 &XEXP (recog_data.operand[i], 0),
2901 i, address_type[i], ind_levels, insn);
2902 recog_data.operand[i] = *recog_data.operand_loc[i];
2903 substed_operand[i] = recog_data.operand[i];
2905 else if (code == SUBREG)
2907 rtx reg = SUBREG_REG (recog_data.operand[i]);
2908 rtx op
2909 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2910 ind_levels,
2911 set != 0
2912 && &SET_DEST (set) == recog_data.operand_loc[i],
2913 insn,
2914 &address_reloaded[i]);
2916 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2917 that didn't get a hard register, emit a USE with a REG_EQUAL
2918 note in front so that we might inherit a previous, possibly
2919 wider reload. */
2921 if (replace
2922 && MEM_P (op)
2923 && REG_P (reg)
2924 && (GET_MODE_SIZE (GET_MODE (reg))
2925 >= GET_MODE_SIZE (GET_MODE (op)))
2926 && reg_equiv_constant (REGNO (reg)) == 0)
2927 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2928 insn),
2929 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2931 substed_operand[i] = recog_data.operand[i] = op;
2933 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2934 /* We can get a PLUS as an "operand" as a result of register
2935 elimination. See eliminate_regs and gen_reload. We handle
2936 a unary operator by reloading the operand. */
2937 substed_operand[i] = recog_data.operand[i]
2938 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2939 ind_levels, 0, insn,
2940 &address_reloaded[i]);
2941 else if (code == REG)
2943 /* This is equivalent to calling find_reloads_toplev.
2944 The code is duplicated for speed.
2945 When we find a pseudo always equivalent to a constant,
2946 we replace it by the constant. We must be sure, however,
2947 that we don't try to replace it in the insn in which it
2948 is being set. */
2949 int regno = REGNO (recog_data.operand[i]);
2950 if (reg_equiv_constant (regno) != 0
2951 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2953 /* Record the existing mode so that the check if constants are
2954 allowed will work when operand_mode isn't specified. */
2956 if (operand_mode[i] == VOIDmode)
2957 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2959 substed_operand[i] = recog_data.operand[i]
2960 = reg_equiv_constant (regno);
2962 if (reg_equiv_memory_loc (regno) != 0
2963 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2964 /* We need not give a valid is_set_dest argument since the case
2965 of a constant equivalence was checked above. */
2966 substed_operand[i] = recog_data.operand[i]
2967 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2968 ind_levels, 0, insn,
2969 &address_reloaded[i]);
2971 /* If the operand is still a register (we didn't replace it with an
2972 equivalent), get the preferred class to reload it into. */
2973 code = GET_CODE (recog_data.operand[i]);
2974 preferred_class[i]
2975 = ((code == REG && REGNO (recog_data.operand[i])
2976 >= FIRST_PSEUDO_REGISTER)
2977 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2978 : NO_REGS);
2979 pref_or_nothing[i]
2980 = (code == REG
2981 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2982 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2985 /* If this is simply a copy from operand 1 to operand 0, merge the
2986 preferred classes for the operands. */
2987 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2988 && recog_data.operand[1] == SET_SRC (set))
2990 preferred_class[0] = preferred_class[1]
2991 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2992 pref_or_nothing[0] |= pref_or_nothing[1];
2993 pref_or_nothing[1] |= pref_or_nothing[0];
2996 /* Now see what we need for pseudo-regs that didn't get hard regs
2997 or got the wrong kind of hard reg. For this, we must consider
2998 all the operands together against the register constraints. */
3000 best = MAX_RECOG_OPERANDS * 2 + 600;
3002 goal_alternative_swapped = 0;
3004 /* The constraints are made of several alternatives.
3005 Each operand's constraint looks like foo,bar,... with commas
3006 separating the alternatives. The first alternatives for all
3007 operands go together, the second alternatives go together, etc.
3009 First loop over alternatives. */
3011 alternative_mask enabled = get_enabled_alternatives (insn);
3012 for (this_alternative_number = 0;
3013 this_alternative_number < n_alternatives;
3014 this_alternative_number++)
3016 int swapped;
3018 if (!TEST_BIT (enabled, this_alternative_number))
3020 int i;
3022 for (i = 0; i < recog_data.n_operands; i++)
3023 constraints[i] = skip_alternative (constraints[i]);
3025 continue;
3028 /* If insn is commutative (it's safe to exchange a certain pair
3029 of operands) then we need to try each alternative twice, the
3030 second time matching those two operands as if we had
3031 exchanged them. To do this, really exchange them in
3032 operands. */
3033 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3035 /* Loop over operands for one constraint alternative. */
3036 /* LOSERS counts those that don't fit this alternative
3037 and would require loading. */
3038 int losers = 0;
3039 /* BAD is set to 1 if it some operand can't fit this alternative
3040 even after reloading. */
3041 int bad = 0;
3042 /* REJECT is a count of how undesirable this alternative says it is
3043 if any reloading is required. If the alternative matches exactly
3044 then REJECT is ignored, but otherwise it gets this much
3045 counted against it in addition to the reloading needed. Each
3046 ? counts three times here since we want the disparaging caused by
3047 a bad register class to only count 1/3 as much. */
3048 int reject = 0;
3050 if (swapped)
3052 enum reg_class tclass;
3053 int t;
3055 recog_data.operand[commutative] = substed_operand[commutative + 1];
3056 recog_data.operand[commutative + 1] = substed_operand[commutative];
3057 /* Swap the duplicates too. */
3058 for (i = 0; i < recog_data.n_dups; i++)
3059 if (recog_data.dup_num[i] == commutative
3060 || recog_data.dup_num[i] == commutative + 1)
3061 *recog_data.dup_loc[i]
3062 = recog_data.operand[(int) recog_data.dup_num[i]];
3064 tclass = preferred_class[commutative];
3065 preferred_class[commutative] = preferred_class[commutative + 1];
3066 preferred_class[commutative + 1] = tclass;
3068 t = pref_or_nothing[commutative];
3069 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3070 pref_or_nothing[commutative + 1] = t;
3072 t = address_reloaded[commutative];
3073 address_reloaded[commutative] = address_reloaded[commutative + 1];
3074 address_reloaded[commutative + 1] = t;
3077 this_earlyclobber = 0;
3079 for (i = 0; i < noperands; i++)
3081 const char *p = constraints[i];
3082 char *end;
3083 int len;
3084 int win = 0;
3085 int did_match = 0;
3086 /* 0 => this operand can be reloaded somehow for this alternative. */
3087 int badop = 1;
3088 /* 0 => this operand can be reloaded if the alternative allows regs. */
3089 int winreg = 0;
3090 int c;
3091 int m;
3092 rtx operand = recog_data.operand[i];
3093 int offset = 0;
3094 /* Nonzero means this is a MEM that must be reloaded into a reg
3095 regardless of what the constraint says. */
3096 int force_reload = 0;
3097 int offmemok = 0;
3098 /* Nonzero if a constant forced into memory would be OK for this
3099 operand. */
3100 int constmemok = 0;
3101 int earlyclobber = 0;
3102 enum constraint_num cn;
3103 enum reg_class cl;
3105 /* If the predicate accepts a unary operator, it means that
3106 we need to reload the operand, but do not do this for
3107 match_operator and friends. */
3108 if (UNARY_P (operand) && *p != 0)
3109 operand = XEXP (operand, 0);
3111 /* If the operand is a SUBREG, extract
3112 the REG or MEM (or maybe even a constant) within.
3113 (Constants can occur as a result of reg_equiv_constant.) */
3115 while (GET_CODE (operand) == SUBREG)
3117 /* Offset only matters when operand is a REG and
3118 it is a hard reg. This is because it is passed
3119 to reg_fits_class_p if it is a REG and all pseudos
3120 return 0 from that function. */
3121 if (REG_P (SUBREG_REG (operand))
3122 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3124 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3125 GET_MODE (SUBREG_REG (operand)),
3126 SUBREG_BYTE (operand),
3127 GET_MODE (operand)) < 0)
3128 force_reload = 1;
3129 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3130 GET_MODE (SUBREG_REG (operand)),
3131 SUBREG_BYTE (operand),
3132 GET_MODE (operand));
3134 operand = SUBREG_REG (operand);
3135 /* Force reload if this is a constant or PLUS or if there may
3136 be a problem accessing OPERAND in the outer mode. */
3137 if (CONSTANT_P (operand)
3138 || GET_CODE (operand) == PLUS
3139 /* We must force a reload of paradoxical SUBREGs
3140 of a MEM because the alignment of the inner value
3141 may not be enough to do the outer reference. On
3142 big-endian machines, it may also reference outside
3143 the object.
3145 On machines that extend byte operations and we have a
3146 SUBREG where both the inner and outer modes are no wider
3147 than a word and the inner mode is narrower, is integral,
3148 and gets extended when loaded from memory, combine.c has
3149 made assumptions about the behavior of the machine in such
3150 register access. If the data is, in fact, in memory we
3151 must always load using the size assumed to be in the
3152 register and let the insn do the different-sized
3153 accesses.
3155 This is doubly true if WORD_REGISTER_OPERATIONS. In
3156 this case eliminate_regs has left non-paradoxical
3157 subregs for push_reload to see. Make sure it does
3158 by forcing the reload.
3160 ??? When is it right at this stage to have a subreg
3161 of a mem that is _not_ to be handled specially? IMO
3162 those should have been reduced to just a mem. */
3163 || ((MEM_P (operand)
3164 || (REG_P (operand)
3165 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3166 #ifndef WORD_REGISTER_OPERATIONS
3167 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3168 < BIGGEST_ALIGNMENT)
3169 && (GET_MODE_SIZE (operand_mode[i])
3170 > GET_MODE_SIZE (GET_MODE (operand))))
3171 || BYTES_BIG_ENDIAN
3172 #ifdef LOAD_EXTEND_OP
3173 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3174 && (GET_MODE_SIZE (GET_MODE (operand))
3175 <= UNITS_PER_WORD)
3176 && (GET_MODE_SIZE (operand_mode[i])
3177 > GET_MODE_SIZE (GET_MODE (operand)))
3178 && INTEGRAL_MODE_P (GET_MODE (operand))
3179 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3180 #endif
3182 #endif
3185 force_reload = 1;
3188 this_alternative[i] = NO_REGS;
3189 this_alternative_win[i] = 0;
3190 this_alternative_match_win[i] = 0;
3191 this_alternative_offmemok[i] = 0;
3192 this_alternative_earlyclobber[i] = 0;
3193 this_alternative_matches[i] = -1;
3195 /* An empty constraint or empty alternative
3196 allows anything which matched the pattern. */
3197 if (*p == 0 || *p == ',')
3198 win = 1, badop = 0;
3200 /* Scan this alternative's specs for this operand;
3201 set WIN if the operand fits any letter in this alternative.
3202 Otherwise, clear BADOP if this operand could
3203 fit some letter after reloads,
3204 or set WINREG if this operand could fit after reloads
3205 provided the constraint allows some registers. */
3208 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3210 case '\0':
3211 len = 0;
3212 break;
3213 case ',':
3214 c = '\0';
3215 break;
3217 case '?':
3218 reject += 6;
3219 break;
3221 case '!':
3222 reject = 600;
3223 break;
3225 case '#':
3226 /* Ignore rest of this alternative as far as
3227 reloading is concerned. */
3229 p++;
3230 while (*p && *p != ',');
3231 len = 0;
3232 break;
3234 case '0': case '1': case '2': case '3': case '4':
3235 case '5': case '6': case '7': case '8': case '9':
3236 m = strtoul (p, &end, 10);
3237 p = end;
3238 len = 0;
3240 this_alternative_matches[i] = m;
3241 /* We are supposed to match a previous operand.
3242 If we do, we win if that one did.
3243 If we do not, count both of the operands as losers.
3244 (This is too conservative, since most of the time
3245 only a single reload insn will be needed to make
3246 the two operands win. As a result, this alternative
3247 may be rejected when it is actually desirable.) */
3248 if ((swapped && (m != commutative || i != commutative + 1))
3249 /* If we are matching as if two operands were swapped,
3250 also pretend that operands_match had been computed
3251 with swapped.
3252 But if I is the second of those and C is the first,
3253 don't exchange them, because operands_match is valid
3254 only on one side of its diagonal. */
3255 ? (operands_match
3256 [(m == commutative || m == commutative + 1)
3257 ? 2 * commutative + 1 - m : m]
3258 [(i == commutative || i == commutative + 1)
3259 ? 2 * commutative + 1 - i : i])
3260 : operands_match[m][i])
3262 /* If we are matching a non-offsettable address where an
3263 offsettable address was expected, then we must reject
3264 this combination, because we can't reload it. */
3265 if (this_alternative_offmemok[m]
3266 && MEM_P (recog_data.operand[m])
3267 && this_alternative[m] == NO_REGS
3268 && ! this_alternative_win[m])
3269 bad = 1;
3271 did_match = this_alternative_win[m];
3273 else
3275 /* Operands don't match. */
3276 rtx value;
3277 int loc1, loc2;
3278 /* Retroactively mark the operand we had to match
3279 as a loser, if it wasn't already. */
3280 if (this_alternative_win[m])
3281 losers++;
3282 this_alternative_win[m] = 0;
3283 if (this_alternative[m] == NO_REGS)
3284 bad = 1;
3285 /* But count the pair only once in the total badness of
3286 this alternative, if the pair can be a dummy reload.
3287 The pointers in operand_loc are not swapped; swap
3288 them by hand if necessary. */
3289 if (swapped && i == commutative)
3290 loc1 = commutative + 1;
3291 else if (swapped && i == commutative + 1)
3292 loc1 = commutative;
3293 else
3294 loc1 = i;
3295 if (swapped && m == commutative)
3296 loc2 = commutative + 1;
3297 else if (swapped && m == commutative + 1)
3298 loc2 = commutative;
3299 else
3300 loc2 = m;
3301 value
3302 = find_dummy_reload (recog_data.operand[i],
3303 recog_data.operand[m],
3304 recog_data.operand_loc[loc1],
3305 recog_data.operand_loc[loc2],
3306 operand_mode[i], operand_mode[m],
3307 this_alternative[m], -1,
3308 this_alternative_earlyclobber[m]);
3310 if (value != 0)
3311 losers--;
3313 /* This can be fixed with reloads if the operand
3314 we are supposed to match can be fixed with reloads. */
3315 badop = 0;
3316 this_alternative[i] = this_alternative[m];
3318 /* If we have to reload this operand and some previous
3319 operand also had to match the same thing as this
3320 operand, we don't know how to do that. So reject this
3321 alternative. */
3322 if (! did_match || force_reload)
3323 for (j = 0; j < i; j++)
3324 if (this_alternative_matches[j]
3325 == this_alternative_matches[i])
3327 badop = 1;
3328 break;
3330 break;
3332 case 'p':
3333 /* All necessary reloads for an address_operand
3334 were handled in find_reloads_address. */
3335 this_alternative[i]
3336 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3337 ADDRESS, SCRATCH);
3338 win = 1;
3339 badop = 0;
3340 break;
3342 case TARGET_MEM_CONSTRAINT:
3343 if (force_reload)
3344 break;
3345 if (MEM_P (operand)
3346 || (REG_P (operand)
3347 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3348 && reg_renumber[REGNO (operand)] < 0))
3349 win = 1;
3350 if (CONST_POOL_OK_P (operand_mode[i], operand))
3351 badop = 0;
3352 constmemok = 1;
3353 break;
3355 case '<':
3356 if (MEM_P (operand)
3357 && ! address_reloaded[i]
3358 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3359 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3360 win = 1;
3361 break;
3363 case '>':
3364 if (MEM_P (operand)
3365 && ! address_reloaded[i]
3366 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3367 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3368 win = 1;
3369 break;
3371 /* Memory operand whose address is not offsettable. */
3372 case 'V':
3373 if (force_reload)
3374 break;
3375 if (MEM_P (operand)
3376 && ! (ind_levels ? offsettable_memref_p (operand)
3377 : offsettable_nonstrict_memref_p (operand))
3378 /* Certain mem addresses will become offsettable
3379 after they themselves are reloaded. This is important;
3380 we don't want our own handling of unoffsettables
3381 to override the handling of reg_equiv_address. */
3382 && !(REG_P (XEXP (operand, 0))
3383 && (ind_levels == 0
3384 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3385 win = 1;
3386 break;
3388 /* Memory operand whose address is offsettable. */
3389 case 'o':
3390 if (force_reload)
3391 break;
3392 if ((MEM_P (operand)
3393 /* If IND_LEVELS, find_reloads_address won't reload a
3394 pseudo that didn't get a hard reg, so we have to
3395 reject that case. */
3396 && ((ind_levels ? offsettable_memref_p (operand)
3397 : offsettable_nonstrict_memref_p (operand))
3398 /* A reloaded address is offsettable because it is now
3399 just a simple register indirect. */
3400 || address_reloaded[i] == 1))
3401 || (REG_P (operand)
3402 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3403 && reg_renumber[REGNO (operand)] < 0
3404 /* If reg_equiv_address is nonzero, we will be
3405 loading it into a register; hence it will be
3406 offsettable, but we cannot say that reg_equiv_mem
3407 is offsettable without checking. */
3408 && ((reg_equiv_mem (REGNO (operand)) != 0
3409 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3410 || (reg_equiv_address (REGNO (operand)) != 0))))
3411 win = 1;
3412 if (CONST_POOL_OK_P (operand_mode[i], operand)
3413 || MEM_P (operand))
3414 badop = 0;
3415 constmemok = 1;
3416 offmemok = 1;
3417 break;
3419 case '&':
3420 /* Output operand that is stored before the need for the
3421 input operands (and their index registers) is over. */
3422 earlyclobber = 1, this_earlyclobber = 1;
3423 break;
3425 case 'X':
3426 force_reload = 0;
3427 win = 1;
3428 break;
3430 case 'g':
3431 if (! force_reload
3432 /* A PLUS is never a valid operand, but reload can make
3433 it from a register when eliminating registers. */
3434 && GET_CODE (operand) != PLUS
3435 /* A SCRATCH is not a valid operand. */
3436 && GET_CODE (operand) != SCRATCH
3437 && (! CONSTANT_P (operand)
3438 || ! flag_pic
3439 || LEGITIMATE_PIC_OPERAND_P (operand))
3440 && (GENERAL_REGS == ALL_REGS
3441 || !REG_P (operand)
3442 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3443 && reg_renumber[REGNO (operand)] < 0)))
3444 win = 1;
3445 cl = GENERAL_REGS;
3446 goto reg;
3448 default:
3449 cn = lookup_constraint (p);
3450 switch (get_constraint_type (cn))
3452 case CT_REGISTER:
3453 cl = reg_class_for_constraint (cn);
3454 if (cl != NO_REGS)
3455 goto reg;
3456 break;
3458 case CT_CONST_INT:
3459 if (CONST_INT_P (operand)
3460 && (insn_const_int_ok_for_constraint
3461 (INTVAL (operand), cn)))
3462 win = true;
3463 break;
3465 case CT_MEMORY:
3466 if (force_reload)
3467 break;
3468 if (constraint_satisfied_p (operand, cn))
3469 win = 1;
3470 /* If the address was already reloaded,
3471 we win as well. */
3472 else if (MEM_P (operand) && address_reloaded[i] == 1)
3473 win = 1;
3474 /* Likewise if the address will be reloaded because
3475 reg_equiv_address is nonzero. For reg_equiv_mem
3476 we have to check. */
3477 else if (REG_P (operand)
3478 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3479 && reg_renumber[REGNO (operand)] < 0
3480 && ((reg_equiv_mem (REGNO (operand)) != 0
3481 && (constraint_satisfied_p
3482 (reg_equiv_mem (REGNO (operand)),
3483 cn)))
3484 || (reg_equiv_address (REGNO (operand))
3485 != 0)))
3486 win = 1;
3488 /* If we didn't already win, we can reload
3489 constants via force_const_mem, and other
3490 MEMs by reloading the address like for 'o'. */
3491 if (CONST_POOL_OK_P (operand_mode[i], operand)
3492 || MEM_P (operand))
3493 badop = 0;
3494 constmemok = 1;
3495 offmemok = 1;
3496 break;
3498 case CT_ADDRESS:
3499 if (constraint_satisfied_p (operand, cn))
3500 win = 1;
3502 /* If we didn't already win, we can reload
3503 the address into a base register. */
3504 this_alternative[i]
3505 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3506 ADDRESS, SCRATCH);
3507 badop = 0;
3508 break;
3510 case CT_FIXED_FORM:
3511 if (constraint_satisfied_p (operand, cn))
3512 win = 1;
3513 break;
3515 break;
3517 reg:
3518 this_alternative[i]
3519 = reg_class_subunion[this_alternative[i]][cl];
3520 if (GET_MODE (operand) == BLKmode)
3521 break;
3522 winreg = 1;
3523 if (REG_P (operand)
3524 && reg_fits_class_p (operand, this_alternative[i],
3525 offset, GET_MODE (recog_data.operand[i])))
3526 win = 1;
3527 break;
3529 while ((p += len), c);
3531 if (swapped == (commutative >= 0 ? 1 : 0))
3532 constraints[i] = p;
3534 /* If this operand could be handled with a reg,
3535 and some reg is allowed, then this operand can be handled. */
3536 if (winreg && this_alternative[i] != NO_REGS
3537 && (win || !class_only_fixed_regs[this_alternative[i]]))
3538 badop = 0;
3540 /* Record which operands fit this alternative. */
3541 this_alternative_earlyclobber[i] = earlyclobber;
3542 if (win && ! force_reload)
3543 this_alternative_win[i] = 1;
3544 else if (did_match && ! force_reload)
3545 this_alternative_match_win[i] = 1;
3546 else
3548 int const_to_mem = 0;
3550 this_alternative_offmemok[i] = offmemok;
3551 losers++;
3552 if (badop)
3553 bad = 1;
3554 /* Alternative loses if it has no regs for a reg operand. */
3555 if (REG_P (operand)
3556 && this_alternative[i] == NO_REGS
3557 && this_alternative_matches[i] < 0)
3558 bad = 1;
3560 /* If this is a constant that is reloaded into the desired
3561 class by copying it to memory first, count that as another
3562 reload. This is consistent with other code and is
3563 required to avoid choosing another alternative when
3564 the constant is moved into memory by this function on
3565 an early reload pass. Note that the test here is
3566 precisely the same as in the code below that calls
3567 force_const_mem. */
3568 if (CONST_POOL_OK_P (operand_mode[i], operand)
3569 && ((targetm.preferred_reload_class (operand,
3570 this_alternative[i])
3571 == NO_REGS)
3572 || no_input_reloads))
3574 const_to_mem = 1;
3575 if (this_alternative[i] != NO_REGS)
3576 losers++;
3579 /* Alternative loses if it requires a type of reload not
3580 permitted for this insn. We can always reload SCRATCH
3581 and objects with a REG_UNUSED note. */
3582 if (GET_CODE (operand) != SCRATCH
3583 && modified[i] != RELOAD_READ && no_output_reloads
3584 && ! find_reg_note (insn, REG_UNUSED, operand))
3585 bad = 1;
3586 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3587 && ! const_to_mem)
3588 bad = 1;
3590 /* If we can't reload this value at all, reject this
3591 alternative. Note that we could also lose due to
3592 LIMIT_RELOAD_CLASS, but we don't check that
3593 here. */
3595 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3597 if (targetm.preferred_reload_class (operand,
3598 this_alternative[i])
3599 == NO_REGS)
3600 reject = 600;
3602 if (operand_type[i] == RELOAD_FOR_OUTPUT
3603 && (targetm.preferred_output_reload_class (operand,
3604 this_alternative[i])
3605 == NO_REGS))
3606 reject = 600;
3609 /* We prefer to reload pseudos over reloading other things,
3610 since such reloads may be able to be eliminated later.
3611 If we are reloading a SCRATCH, we won't be generating any
3612 insns, just using a register, so it is also preferred.
3613 So bump REJECT in other cases. Don't do this in the
3614 case where we are forcing a constant into memory and
3615 it will then win since we don't want to have a different
3616 alternative match then. */
3617 if (! (REG_P (operand)
3618 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3619 && GET_CODE (operand) != SCRATCH
3620 && ! (const_to_mem && constmemok))
3621 reject += 2;
3623 /* Input reloads can be inherited more often than output
3624 reloads can be removed, so penalize output reloads. */
3625 if (operand_type[i] != RELOAD_FOR_INPUT
3626 && GET_CODE (operand) != SCRATCH)
3627 reject++;
3630 /* If this operand is a pseudo register that didn't get
3631 a hard reg and this alternative accepts some
3632 register, see if the class that we want is a subset
3633 of the preferred class for this register. If not,
3634 but it intersects that class, use the preferred class
3635 instead. If it does not intersect the preferred
3636 class, show that usage of this alternative should be
3637 discouraged; it will be discouraged more still if the
3638 register is `preferred or nothing'. We do this
3639 because it increases the chance of reusing our spill
3640 register in a later insn and avoiding a pair of
3641 memory stores and loads.
3643 Don't bother with this if this alternative will
3644 accept this operand.
3646 Don't do this for a multiword operand, since it is
3647 only a small win and has the risk of requiring more
3648 spill registers, which could cause a large loss.
3650 Don't do this if the preferred class has only one
3651 register because we might otherwise exhaust the
3652 class. */
3654 if (! win && ! did_match
3655 && this_alternative[i] != NO_REGS
3656 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3657 && reg_class_size [(int) preferred_class[i]] > 0
3658 && ! small_register_class_p (preferred_class[i]))
3660 if (! reg_class_subset_p (this_alternative[i],
3661 preferred_class[i]))
3663 /* Since we don't have a way of forming the intersection,
3664 we just do something special if the preferred class
3665 is a subset of the class we have; that's the most
3666 common case anyway. */
3667 if (reg_class_subset_p (preferred_class[i],
3668 this_alternative[i]))
3669 this_alternative[i] = preferred_class[i];
3670 else
3671 reject += (2 + 2 * pref_or_nothing[i]);
3676 /* Now see if any output operands that are marked "earlyclobber"
3677 in this alternative conflict with any input operands
3678 or any memory addresses. */
3680 for (i = 0; i < noperands; i++)
3681 if (this_alternative_earlyclobber[i]
3682 && (this_alternative_win[i] || this_alternative_match_win[i]))
3684 struct decomposition early_data;
3686 early_data = decompose (recog_data.operand[i]);
3688 gcc_assert (modified[i] != RELOAD_READ);
3690 if (this_alternative[i] == NO_REGS)
3692 this_alternative_earlyclobber[i] = 0;
3693 gcc_assert (this_insn_is_asm);
3694 error_for_asm (this_insn,
3695 "%<&%> constraint used with no register class");
3698 for (j = 0; j < noperands; j++)
3699 /* Is this an input operand or a memory ref? */
3700 if ((MEM_P (recog_data.operand[j])
3701 || modified[j] != RELOAD_WRITE)
3702 && j != i
3703 /* Ignore things like match_operator operands. */
3704 && !recog_data.is_operator[j]
3705 /* Don't count an input operand that is constrained to match
3706 the early clobber operand. */
3707 && ! (this_alternative_matches[j] == i
3708 && rtx_equal_p (recog_data.operand[i],
3709 recog_data.operand[j]))
3710 /* Is it altered by storing the earlyclobber operand? */
3711 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3712 early_data))
3714 /* If the output is in a non-empty few-regs class,
3715 it's costly to reload it, so reload the input instead. */
3716 if (small_register_class_p (this_alternative[i])
3717 && (REG_P (recog_data.operand[j])
3718 || GET_CODE (recog_data.operand[j]) == SUBREG))
3720 losers++;
3721 this_alternative_win[j] = 0;
3722 this_alternative_match_win[j] = 0;
3724 else
3725 break;
3727 /* If an earlyclobber operand conflicts with something,
3728 it must be reloaded, so request this and count the cost. */
3729 if (j != noperands)
3731 losers++;
3732 this_alternative_win[i] = 0;
3733 this_alternative_match_win[j] = 0;
3734 for (j = 0; j < noperands; j++)
3735 if (this_alternative_matches[j] == i
3736 && this_alternative_match_win[j])
3738 this_alternative_win[j] = 0;
3739 this_alternative_match_win[j] = 0;
3740 losers++;
3745 /* If one alternative accepts all the operands, no reload required,
3746 choose that alternative; don't consider the remaining ones. */
3747 if (losers == 0)
3749 /* Unswap these so that they are never swapped at `finish'. */
3750 if (swapped)
3752 recog_data.operand[commutative] = substed_operand[commutative];
3753 recog_data.operand[commutative + 1]
3754 = substed_operand[commutative + 1];
3756 for (i = 0; i < noperands; i++)
3758 goal_alternative_win[i] = this_alternative_win[i];
3759 goal_alternative_match_win[i] = this_alternative_match_win[i];
3760 goal_alternative[i] = this_alternative[i];
3761 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3762 goal_alternative_matches[i] = this_alternative_matches[i];
3763 goal_alternative_earlyclobber[i]
3764 = this_alternative_earlyclobber[i];
3766 goal_alternative_number = this_alternative_number;
3767 goal_alternative_swapped = swapped;
3768 goal_earlyclobber = this_earlyclobber;
3769 goto finish;
3772 /* REJECT, set by the ! and ? constraint characters and when a register
3773 would be reloaded into a non-preferred class, discourages the use of
3774 this alternative for a reload goal. REJECT is incremented by six
3775 for each ? and two for each non-preferred class. */
3776 losers = losers * 6 + reject;
3778 /* If this alternative can be made to work by reloading,
3779 and it needs less reloading than the others checked so far,
3780 record it as the chosen goal for reloading. */
3781 if (! bad)
3783 if (best > losers)
3785 for (i = 0; i < noperands; i++)
3787 goal_alternative[i] = this_alternative[i];
3788 goal_alternative_win[i] = this_alternative_win[i];
3789 goal_alternative_match_win[i]
3790 = this_alternative_match_win[i];
3791 goal_alternative_offmemok[i]
3792 = this_alternative_offmemok[i];
3793 goal_alternative_matches[i] = this_alternative_matches[i];
3794 goal_alternative_earlyclobber[i]
3795 = this_alternative_earlyclobber[i];
3797 goal_alternative_swapped = swapped;
3798 best = losers;
3799 goal_alternative_number = this_alternative_number;
3800 goal_earlyclobber = this_earlyclobber;
3804 if (swapped)
3806 enum reg_class tclass;
3807 int t;
3809 /* If the commutative operands have been swapped, swap
3810 them back in order to check the next alternative. */
3811 recog_data.operand[commutative] = substed_operand[commutative];
3812 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3813 /* Unswap the duplicates too. */
3814 for (i = 0; i < recog_data.n_dups; i++)
3815 if (recog_data.dup_num[i] == commutative
3816 || recog_data.dup_num[i] == commutative + 1)
3817 *recog_data.dup_loc[i]
3818 = recog_data.operand[(int) recog_data.dup_num[i]];
3820 /* Unswap the operand related information as well. */
3821 tclass = preferred_class[commutative];
3822 preferred_class[commutative] = preferred_class[commutative + 1];
3823 preferred_class[commutative + 1] = tclass;
3825 t = pref_or_nothing[commutative];
3826 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3827 pref_or_nothing[commutative + 1] = t;
3829 t = address_reloaded[commutative];
3830 address_reloaded[commutative] = address_reloaded[commutative + 1];
3831 address_reloaded[commutative + 1] = t;
3836 /* The operands don't meet the constraints.
3837 goal_alternative describes the alternative
3838 that we could reach by reloading the fewest operands.
3839 Reload so as to fit it. */
3841 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3843 /* No alternative works with reloads?? */
3844 if (insn_code_number >= 0)
3845 fatal_insn ("unable to generate reloads for:", insn);
3846 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3847 /* Avoid further trouble with this insn. */
3848 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3849 n_reloads = 0;
3850 return 0;
3853 /* Jump to `finish' from above if all operands are valid already.
3854 In that case, goal_alternative_win is all 1. */
3855 finish:
3857 /* Right now, for any pair of operands I and J that are required to match,
3858 with I < J,
3859 goal_alternative_matches[J] is I.
3860 Set up goal_alternative_matched as the inverse function:
3861 goal_alternative_matched[I] = J. */
3863 for (i = 0; i < noperands; i++)
3864 goal_alternative_matched[i] = -1;
3866 for (i = 0; i < noperands; i++)
3867 if (! goal_alternative_win[i]
3868 && goal_alternative_matches[i] >= 0)
3869 goal_alternative_matched[goal_alternative_matches[i]] = i;
3871 for (i = 0; i < noperands; i++)
3872 goal_alternative_win[i] |= goal_alternative_match_win[i];
3874 /* If the best alternative is with operands 1 and 2 swapped,
3875 consider them swapped before reporting the reloads. Update the
3876 operand numbers of any reloads already pushed. */
3878 if (goal_alternative_swapped)
3880 rtx tem;
3882 tem = substed_operand[commutative];
3883 substed_operand[commutative] = substed_operand[commutative + 1];
3884 substed_operand[commutative + 1] = tem;
3885 tem = recog_data.operand[commutative];
3886 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3887 recog_data.operand[commutative + 1] = tem;
3888 tem = *recog_data.operand_loc[commutative];
3889 *recog_data.operand_loc[commutative]
3890 = *recog_data.operand_loc[commutative + 1];
3891 *recog_data.operand_loc[commutative + 1] = tem;
3893 for (i = 0; i < n_reloads; i++)
3895 if (rld[i].opnum == commutative)
3896 rld[i].opnum = commutative + 1;
3897 else if (rld[i].opnum == commutative + 1)
3898 rld[i].opnum = commutative;
3902 for (i = 0; i < noperands; i++)
3904 operand_reloadnum[i] = -1;
3906 /* If this is an earlyclobber operand, we need to widen the scope.
3907 The reload must remain valid from the start of the insn being
3908 reloaded until after the operand is stored into its destination.
3909 We approximate this with RELOAD_OTHER even though we know that we
3910 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3912 One special case that is worth checking is when we have an
3913 output that is earlyclobber but isn't used past the insn (typically
3914 a SCRATCH). In this case, we only need have the reload live
3915 through the insn itself, but not for any of our input or output
3916 reloads.
3917 But we must not accidentally narrow the scope of an existing
3918 RELOAD_OTHER reload - leave these alone.
3920 In any case, anything needed to address this operand can remain
3921 however they were previously categorized. */
3923 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3924 operand_type[i]
3925 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3926 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3929 /* Any constants that aren't allowed and can't be reloaded
3930 into registers are here changed into memory references. */
3931 for (i = 0; i < noperands; i++)
3932 if (! goal_alternative_win[i])
3934 rtx op = recog_data.operand[i];
3935 rtx subreg = NULL_RTX;
3936 rtx plus = NULL_RTX;
3937 machine_mode mode = operand_mode[i];
3939 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3940 push_reload so we have to let them pass here. */
3941 if (GET_CODE (op) == SUBREG)
3943 subreg = op;
3944 op = SUBREG_REG (op);
3945 mode = GET_MODE (op);
3948 if (GET_CODE (op) == PLUS)
3950 plus = op;
3951 op = XEXP (op, 1);
3954 if (CONST_POOL_OK_P (mode, op)
3955 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3956 == NO_REGS)
3957 || no_input_reloads))
3959 int this_address_reloaded;
3960 rtx tem = force_const_mem (mode, op);
3962 /* If we stripped a SUBREG or a PLUS above add it back. */
3963 if (plus != NULL_RTX)
3964 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3966 if (subreg != NULL_RTX)
3967 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3969 this_address_reloaded = 0;
3970 substed_operand[i] = recog_data.operand[i]
3971 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3972 0, insn, &this_address_reloaded);
3974 /* If the alternative accepts constant pool refs directly
3975 there will be no reload needed at all. */
3976 if (plus == NULL_RTX
3977 && subreg == NULL_RTX
3978 && alternative_allows_const_pool_ref (this_address_reloaded == 0
3979 ? substed_operand[i]
3980 : NULL,
3981 recog_data.constraints[i],
3982 goal_alternative_number))
3983 goal_alternative_win[i] = 1;
3987 /* Record the values of the earlyclobber operands for the caller. */
3988 if (goal_earlyclobber)
3989 for (i = 0; i < noperands; i++)
3990 if (goal_alternative_earlyclobber[i])
3991 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3993 /* Now record reloads for all the operands that need them. */
3994 for (i = 0; i < noperands; i++)
3995 if (! goal_alternative_win[i])
3997 /* Operands that match previous ones have already been handled. */
3998 if (goal_alternative_matches[i] >= 0)
4000 /* Handle an operand with a nonoffsettable address
4001 appearing where an offsettable address will do
4002 by reloading the address into a base register.
4004 ??? We can also do this when the operand is a register and
4005 reg_equiv_mem is not offsettable, but this is a bit tricky,
4006 so we don't bother with it. It may not be worth doing. */
4007 else if (goal_alternative_matched[i] == -1
4008 && goal_alternative_offmemok[i]
4009 && MEM_P (recog_data.operand[i]))
4011 /* If the address to be reloaded is a VOIDmode constant,
4012 use the default address mode as mode of the reload register,
4013 as would have been done by find_reloads_address. */
4014 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
4015 machine_mode address_mode;
4017 address_mode = get_address_mode (recog_data.operand[i]);
4018 operand_reloadnum[i]
4019 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
4020 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
4021 base_reg_class (VOIDmode, as, MEM, SCRATCH),
4022 address_mode,
4023 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
4024 rld[operand_reloadnum[i]].inc
4025 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4027 /* If this operand is an output, we will have made any
4028 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4029 now we are treating part of the operand as an input, so
4030 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
4032 if (modified[i] == RELOAD_WRITE)
4034 for (j = 0; j < n_reloads; j++)
4036 if (rld[j].opnum == i)
4038 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4039 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4040 else if (rld[j].when_needed
4041 == RELOAD_FOR_OUTADDR_ADDRESS)
4042 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4047 else if (goal_alternative_matched[i] == -1)
4049 operand_reloadnum[i]
4050 = push_reload ((modified[i] != RELOAD_WRITE
4051 ? recog_data.operand[i] : 0),
4052 (modified[i] != RELOAD_READ
4053 ? recog_data.operand[i] : 0),
4054 (modified[i] != RELOAD_WRITE
4055 ? recog_data.operand_loc[i] : 0),
4056 (modified[i] != RELOAD_READ
4057 ? recog_data.operand_loc[i] : 0),
4058 (enum reg_class) goal_alternative[i],
4059 (modified[i] == RELOAD_WRITE
4060 ? VOIDmode : operand_mode[i]),
4061 (modified[i] == RELOAD_READ
4062 ? VOIDmode : operand_mode[i]),
4063 (insn_code_number < 0 ? 0
4064 : insn_data[insn_code_number].operand[i].strict_low),
4065 0, i, operand_type[i]);
4067 /* In a matching pair of operands, one must be input only
4068 and the other must be output only.
4069 Pass the input operand as IN and the other as OUT. */
4070 else if (modified[i] == RELOAD_READ
4071 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4073 operand_reloadnum[i]
4074 = push_reload (recog_data.operand[i],
4075 recog_data.operand[goal_alternative_matched[i]],
4076 recog_data.operand_loc[i],
4077 recog_data.operand_loc[goal_alternative_matched[i]],
4078 (enum reg_class) goal_alternative[i],
4079 operand_mode[i],
4080 operand_mode[goal_alternative_matched[i]],
4081 0, 0, i, RELOAD_OTHER);
4082 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4084 else if (modified[i] == RELOAD_WRITE
4085 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4087 operand_reloadnum[goal_alternative_matched[i]]
4088 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4089 recog_data.operand[i],
4090 recog_data.operand_loc[goal_alternative_matched[i]],
4091 recog_data.operand_loc[i],
4092 (enum reg_class) goal_alternative[i],
4093 operand_mode[goal_alternative_matched[i]],
4094 operand_mode[i],
4095 0, 0, i, RELOAD_OTHER);
4096 operand_reloadnum[i] = output_reloadnum;
4098 else
4100 gcc_assert (insn_code_number < 0);
4101 error_for_asm (insn, "inconsistent operand constraints "
4102 "in an %<asm%>");
4103 /* Avoid further trouble with this insn. */
4104 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4105 n_reloads = 0;
4106 return 0;
4109 else if (goal_alternative_matched[i] < 0
4110 && goal_alternative_matches[i] < 0
4111 && address_operand_reloaded[i] != 1
4112 && optimize)
4114 /* For each non-matching operand that's a MEM or a pseudo-register
4115 that didn't get a hard register, make an optional reload.
4116 This may get done even if the insn needs no reloads otherwise. */
4118 rtx operand = recog_data.operand[i];
4120 while (GET_CODE (operand) == SUBREG)
4121 operand = SUBREG_REG (operand);
4122 if ((MEM_P (operand)
4123 || (REG_P (operand)
4124 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4125 /* If this is only for an output, the optional reload would not
4126 actually cause us to use a register now, just note that
4127 something is stored here. */
4128 && (goal_alternative[i] != NO_REGS
4129 || modified[i] == RELOAD_WRITE)
4130 && ! no_input_reloads
4131 /* An optional output reload might allow to delete INSN later.
4132 We mustn't make in-out reloads on insns that are not permitted
4133 output reloads.
4134 If this is an asm, we can't delete it; we must not even call
4135 push_reload for an optional output reload in this case,
4136 because we can't be sure that the constraint allows a register,
4137 and push_reload verifies the constraints for asms. */
4138 && (modified[i] == RELOAD_READ
4139 || (! no_output_reloads && ! this_insn_is_asm)))
4140 operand_reloadnum[i]
4141 = push_reload ((modified[i] != RELOAD_WRITE
4142 ? recog_data.operand[i] : 0),
4143 (modified[i] != RELOAD_READ
4144 ? recog_data.operand[i] : 0),
4145 (modified[i] != RELOAD_WRITE
4146 ? recog_data.operand_loc[i] : 0),
4147 (modified[i] != RELOAD_READ
4148 ? recog_data.operand_loc[i] : 0),
4149 (enum reg_class) goal_alternative[i],
4150 (modified[i] == RELOAD_WRITE
4151 ? VOIDmode : operand_mode[i]),
4152 (modified[i] == RELOAD_READ
4153 ? VOIDmode : operand_mode[i]),
4154 (insn_code_number < 0 ? 0
4155 : insn_data[insn_code_number].operand[i].strict_low),
4156 1, i, operand_type[i]);
4157 /* If a memory reference remains (either as a MEM or a pseudo that
4158 did not get a hard register), yet we can't make an optional
4159 reload, check if this is actually a pseudo register reference;
4160 we then need to emit a USE and/or a CLOBBER so that reload
4161 inheritance will do the right thing. */
4162 else if (replace
4163 && (MEM_P (operand)
4164 || (REG_P (operand)
4165 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4166 && reg_renumber [REGNO (operand)] < 0)))
4168 operand = *recog_data.operand_loc[i];
4170 while (GET_CODE (operand) == SUBREG)
4171 operand = SUBREG_REG (operand);
4172 if (REG_P (operand))
4174 if (modified[i] != RELOAD_WRITE)
4175 /* We mark the USE with QImode so that we recognize
4176 it as one that can be safely deleted at the end
4177 of reload. */
4178 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4179 insn), QImode);
4180 if (modified[i] != RELOAD_READ)
4181 emit_insn_after (gen_clobber (operand), insn);
4185 else if (goal_alternative_matches[i] >= 0
4186 && goal_alternative_win[goal_alternative_matches[i]]
4187 && modified[i] == RELOAD_READ
4188 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4189 && ! no_input_reloads && ! no_output_reloads
4190 && optimize)
4192 /* Similarly, make an optional reload for a pair of matching
4193 objects that are in MEM or a pseudo that didn't get a hard reg. */
4195 rtx operand = recog_data.operand[i];
4197 while (GET_CODE (operand) == SUBREG)
4198 operand = SUBREG_REG (operand);
4199 if ((MEM_P (operand)
4200 || (REG_P (operand)
4201 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4202 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4203 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4204 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4205 recog_data.operand[i],
4206 recog_data.operand_loc[goal_alternative_matches[i]],
4207 recog_data.operand_loc[i],
4208 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4209 operand_mode[goal_alternative_matches[i]],
4210 operand_mode[i],
4211 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4214 /* Perform whatever substitutions on the operands we are supposed
4215 to make due to commutativity or replacement of registers
4216 with equivalent constants or memory slots. */
4218 for (i = 0; i < noperands; i++)
4220 /* We only do this on the last pass through reload, because it is
4221 possible for some data (like reg_equiv_address) to be changed during
4222 later passes. Moreover, we lose the opportunity to get a useful
4223 reload_{in,out}_reg when we do these replacements. */
4225 if (replace)
4227 rtx substitution = substed_operand[i];
4229 *recog_data.operand_loc[i] = substitution;
4231 /* If we're replacing an operand with a LABEL_REF, we need to
4232 make sure that there's a REG_LABEL_OPERAND note attached to
4233 this instruction. */
4234 if (GET_CODE (substitution) == LABEL_REF
4235 && !find_reg_note (insn, REG_LABEL_OPERAND,
4236 LABEL_REF_LABEL (substitution))
4237 /* For a JUMP_P, if it was a branch target it must have
4238 already been recorded as such. */
4239 && (!JUMP_P (insn)
4240 || !label_is_jump_target_p (LABEL_REF_LABEL (substitution),
4241 insn)))
4243 add_reg_note (insn, REG_LABEL_OPERAND,
4244 LABEL_REF_LABEL (substitution));
4245 if (LABEL_P (LABEL_REF_LABEL (substitution)))
4246 ++LABEL_NUSES (LABEL_REF_LABEL (substitution));
4250 else
4251 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4254 /* If this insn pattern contains any MATCH_DUP's, make sure that
4255 they will be substituted if the operands they match are substituted.
4256 Also do now any substitutions we already did on the operands.
4258 Don't do this if we aren't making replacements because we might be
4259 propagating things allocated by frame pointer elimination into places
4260 it doesn't expect. */
4262 if (insn_code_number >= 0 && replace)
4263 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4265 int opno = recog_data.dup_num[i];
4266 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4267 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4270 #if 0
4271 /* This loses because reloading of prior insns can invalidate the equivalence
4272 (or at least find_equiv_reg isn't smart enough to find it any more),
4273 causing this insn to need more reload regs than it needed before.
4274 It may be too late to make the reload regs available.
4275 Now this optimization is done safely in choose_reload_regs. */
4277 /* For each reload of a reg into some other class of reg,
4278 search for an existing equivalent reg (same value now) in the right class.
4279 We can use it as long as we don't need to change its contents. */
4280 for (i = 0; i < n_reloads; i++)
4281 if (rld[i].reg_rtx == 0
4282 && rld[i].in != 0
4283 && REG_P (rld[i].in)
4284 && rld[i].out == 0)
4286 rld[i].reg_rtx
4287 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4288 static_reload_reg_p, 0, rld[i].inmode);
4289 /* Prevent generation of insn to load the value
4290 because the one we found already has the value. */
4291 if (rld[i].reg_rtx)
4292 rld[i].in = rld[i].reg_rtx;
4294 #endif
4296 /* If we detected error and replaced asm instruction by USE, forget about the
4297 reloads. */
4298 if (GET_CODE (PATTERN (insn)) == USE
4299 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4300 n_reloads = 0;
4302 /* Perhaps an output reload can be combined with another
4303 to reduce needs by one. */
4304 if (!goal_earlyclobber)
4305 combine_reloads ();
4307 /* If we have a pair of reloads for parts of an address, they are reloading
4308 the same object, the operands themselves were not reloaded, and they
4309 are for two operands that are supposed to match, merge the reloads and
4310 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4312 for (i = 0; i < n_reloads; i++)
4314 int k;
4316 for (j = i + 1; j < n_reloads; j++)
4317 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4318 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4319 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4320 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4321 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4322 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4323 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4324 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4325 && rtx_equal_p (rld[i].in, rld[j].in)
4326 && (operand_reloadnum[rld[i].opnum] < 0
4327 || rld[operand_reloadnum[rld[i].opnum]].optional)
4328 && (operand_reloadnum[rld[j].opnum] < 0
4329 || rld[operand_reloadnum[rld[j].opnum]].optional)
4330 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4331 || (goal_alternative_matches[rld[j].opnum]
4332 == rld[i].opnum)))
4334 for (k = 0; k < n_replacements; k++)
4335 if (replacements[k].what == j)
4336 replacements[k].what = i;
4338 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4339 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4340 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4341 else
4342 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4343 rld[j].in = 0;
4347 /* Scan all the reloads and update their type.
4348 If a reload is for the address of an operand and we didn't reload
4349 that operand, change the type. Similarly, change the operand number
4350 of a reload when two operands match. If a reload is optional, treat it
4351 as though the operand isn't reloaded.
4353 ??? This latter case is somewhat odd because if we do the optional
4354 reload, it means the object is hanging around. Thus we need only
4355 do the address reload if the optional reload was NOT done.
4357 Change secondary reloads to be the address type of their operand, not
4358 the normal type.
4360 If an operand's reload is now RELOAD_OTHER, change any
4361 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4362 RELOAD_FOR_OTHER_ADDRESS. */
4364 for (i = 0; i < n_reloads; i++)
4366 if (rld[i].secondary_p
4367 && rld[i].when_needed == operand_type[rld[i].opnum])
4368 rld[i].when_needed = address_type[rld[i].opnum];
4370 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4371 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4372 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4373 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4374 && (operand_reloadnum[rld[i].opnum] < 0
4375 || rld[operand_reloadnum[rld[i].opnum]].optional))
4377 /* If we have a secondary reload to go along with this reload,
4378 change its type to RELOAD_FOR_OPADDR_ADDR. */
4380 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4381 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4382 && rld[i].secondary_in_reload != -1)
4384 int secondary_in_reload = rld[i].secondary_in_reload;
4386 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4388 /* If there's a tertiary reload we have to change it also. */
4389 if (secondary_in_reload > 0
4390 && rld[secondary_in_reload].secondary_in_reload != -1)
4391 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4392 = RELOAD_FOR_OPADDR_ADDR;
4395 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4396 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4397 && rld[i].secondary_out_reload != -1)
4399 int secondary_out_reload = rld[i].secondary_out_reload;
4401 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4403 /* If there's a tertiary reload we have to change it also. */
4404 if (secondary_out_reload
4405 && rld[secondary_out_reload].secondary_out_reload != -1)
4406 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4407 = RELOAD_FOR_OPADDR_ADDR;
4410 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4411 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4412 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4413 else
4414 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4417 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4418 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4419 && operand_reloadnum[rld[i].opnum] >= 0
4420 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4421 == RELOAD_OTHER))
4422 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4424 if (goal_alternative_matches[rld[i].opnum] >= 0)
4425 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4428 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4429 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4430 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4432 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4433 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4434 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4435 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4436 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4437 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4438 This is complicated by the fact that a single operand can have more
4439 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4440 choose_reload_regs without affecting code quality, and cases that
4441 actually fail are extremely rare, so it turns out to be better to fix
4442 the problem here by not generating cases that choose_reload_regs will
4443 fail for. */
4444 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4445 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4446 a single operand.
4447 We can reduce the register pressure by exploiting that a
4448 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4449 does not conflict with any of them, if it is only used for the first of
4450 the RELOAD_FOR_X_ADDRESS reloads. */
4452 int first_op_addr_num = -2;
4453 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4454 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4455 int need_change = 0;
4456 /* We use last_op_addr_reload and the contents of the above arrays
4457 first as flags - -2 means no instance encountered, -1 means exactly
4458 one instance encountered.
4459 If more than one instance has been encountered, we store the reload
4460 number of the first reload of the kind in question; reload numbers
4461 are known to be non-negative. */
4462 for (i = 0; i < noperands; i++)
4463 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4464 for (i = n_reloads - 1; i >= 0; i--)
4466 switch (rld[i].when_needed)
4468 case RELOAD_FOR_OPERAND_ADDRESS:
4469 if (++first_op_addr_num >= 0)
4471 first_op_addr_num = i;
4472 need_change = 1;
4474 break;
4475 case RELOAD_FOR_INPUT_ADDRESS:
4476 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4478 first_inpaddr_num[rld[i].opnum] = i;
4479 need_change = 1;
4481 break;
4482 case RELOAD_FOR_OUTPUT_ADDRESS:
4483 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4485 first_outpaddr_num[rld[i].opnum] = i;
4486 need_change = 1;
4488 break;
4489 default:
4490 break;
4494 if (need_change)
4496 for (i = 0; i < n_reloads; i++)
4498 int first_num;
4499 enum reload_type type;
4501 switch (rld[i].when_needed)
4503 case RELOAD_FOR_OPADDR_ADDR:
4504 first_num = first_op_addr_num;
4505 type = RELOAD_FOR_OPERAND_ADDRESS;
4506 break;
4507 case RELOAD_FOR_INPADDR_ADDRESS:
4508 first_num = first_inpaddr_num[rld[i].opnum];
4509 type = RELOAD_FOR_INPUT_ADDRESS;
4510 break;
4511 case RELOAD_FOR_OUTADDR_ADDRESS:
4512 first_num = first_outpaddr_num[rld[i].opnum];
4513 type = RELOAD_FOR_OUTPUT_ADDRESS;
4514 break;
4515 default:
4516 continue;
4518 if (first_num < 0)
4519 continue;
4520 else if (i > first_num)
4521 rld[i].when_needed = type;
4522 else
4524 /* Check if the only TYPE reload that uses reload I is
4525 reload FIRST_NUM. */
4526 for (j = n_reloads - 1; j > first_num; j--)
4528 if (rld[j].when_needed == type
4529 && (rld[i].secondary_p
4530 ? rld[j].secondary_in_reload == i
4531 : reg_mentioned_p (rld[i].in, rld[j].in)))
4533 rld[i].when_needed = type;
4534 break;
4542 /* See if we have any reloads that are now allowed to be merged
4543 because we've changed when the reload is needed to
4544 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4545 check for the most common cases. */
4547 for (i = 0; i < n_reloads; i++)
4548 if (rld[i].in != 0 && rld[i].out == 0
4549 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4550 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4551 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4552 for (j = 0; j < n_reloads; j++)
4553 if (i != j && rld[j].in != 0 && rld[j].out == 0
4554 && rld[j].when_needed == rld[i].when_needed
4555 && MATCHES (rld[i].in, rld[j].in)
4556 && rld[i].rclass == rld[j].rclass
4557 && !rld[i].nocombine && !rld[j].nocombine
4558 && rld[i].reg_rtx == rld[j].reg_rtx)
4560 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4561 transfer_replacements (i, j);
4562 rld[j].in = 0;
4565 #ifdef HAVE_cc0
4566 /* If we made any reloads for addresses, see if they violate a
4567 "no input reloads" requirement for this insn. But loads that we
4568 do after the insn (such as for output addresses) are fine. */
4569 if (no_input_reloads)
4570 for (i = 0; i < n_reloads; i++)
4571 gcc_assert (rld[i].in == 0
4572 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4573 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4574 #endif
4576 /* Compute reload_mode and reload_nregs. */
4577 for (i = 0; i < n_reloads; i++)
4579 rld[i].mode
4580 = (rld[i].inmode == VOIDmode
4581 || (GET_MODE_SIZE (rld[i].outmode)
4582 > GET_MODE_SIZE (rld[i].inmode)))
4583 ? rld[i].outmode : rld[i].inmode;
4585 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4588 /* Special case a simple move with an input reload and a
4589 destination of a hard reg, if the hard reg is ok, use it. */
4590 for (i = 0; i < n_reloads; i++)
4591 if (rld[i].when_needed == RELOAD_FOR_INPUT
4592 && GET_CODE (PATTERN (insn)) == SET
4593 && REG_P (SET_DEST (PATTERN (insn)))
4594 && (SET_SRC (PATTERN (insn)) == rld[i].in
4595 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4596 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4598 rtx dest = SET_DEST (PATTERN (insn));
4599 unsigned int regno = REGNO (dest);
4601 if (regno < FIRST_PSEUDO_REGISTER
4602 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4603 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4605 int nr = hard_regno_nregs[regno][rld[i].mode];
4606 int ok = 1, nri;
4608 for (nri = 1; nri < nr; nri ++)
4609 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4611 ok = 0;
4612 break;
4615 if (ok)
4616 rld[i].reg_rtx = dest;
4620 return retval;
4623 /* Return true if alternative number ALTNUM in constraint-string
4624 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4625 MEM gives the reference if it didn't need any reloads, otherwise it
4626 is null. */
4628 static bool
4629 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4630 const char *constraint, int altnum)
4632 int c;
4634 /* Skip alternatives before the one requested. */
4635 while (altnum > 0)
4637 while (*constraint++ != ',')
4639 altnum--;
4641 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4642 If one of them is present, this alternative accepts the result of
4643 passing a constant-pool reference through find_reloads_toplev.
4645 The same is true of extra memory constraints if the address
4646 was reloaded into a register. However, the target may elect
4647 to disallow the original constant address, forcing it to be
4648 reloaded into a register instead. */
4649 for (; (c = *constraint) && c != ',' && c != '#';
4650 constraint += CONSTRAINT_LEN (c, constraint))
4652 enum constraint_num cn = lookup_constraint (constraint);
4653 if (insn_extra_memory_constraint (cn)
4654 && (mem == NULL || constraint_satisfied_p (mem, cn)))
4655 return true;
4657 return false;
4660 /* Scan X for memory references and scan the addresses for reloading.
4661 Also checks for references to "constant" regs that we want to eliminate
4662 and replaces them with the values they stand for.
4663 We may alter X destructively if it contains a reference to such.
4664 If X is just a constant reg, we return the equivalent value
4665 instead of X.
4667 IND_LEVELS says how many levels of indirect addressing this machine
4668 supports.
4670 OPNUM and TYPE identify the purpose of the reload.
4672 IS_SET_DEST is true if X is the destination of a SET, which is not
4673 appropriate to be replaced by a constant.
4675 INSN, if nonzero, is the insn in which we do the reload. It is used
4676 to determine if we may generate output reloads, and where to put USEs
4677 for pseudos that we have to replace with stack slots.
4679 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4680 result of find_reloads_address. */
4682 static rtx
4683 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4684 int ind_levels, int is_set_dest, rtx_insn *insn,
4685 int *address_reloaded)
4687 RTX_CODE code = GET_CODE (x);
4689 const char *fmt = GET_RTX_FORMAT (code);
4690 int i;
4691 int copied;
4693 if (code == REG)
4695 /* This code is duplicated for speed in find_reloads. */
4696 int regno = REGNO (x);
4697 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4698 x = reg_equiv_constant (regno);
4699 #if 0
4700 /* This creates (subreg (mem...)) which would cause an unnecessary
4701 reload of the mem. */
4702 else if (reg_equiv_mem (regno) != 0)
4703 x = reg_equiv_mem (regno);
4704 #endif
4705 else if (reg_equiv_memory_loc (regno)
4706 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4708 rtx mem = make_memloc (x, regno);
4709 if (reg_equiv_address (regno)
4710 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4712 /* If this is not a toplevel operand, find_reloads doesn't see
4713 this substitution. We have to emit a USE of the pseudo so
4714 that delete_output_reload can see it. */
4715 if (replace_reloads && recog_data.operand[opnum] != x)
4716 /* We mark the USE with QImode so that we recognize it
4717 as one that can be safely deleted at the end of
4718 reload. */
4719 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4720 QImode);
4721 x = mem;
4722 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4723 opnum, type, ind_levels, insn);
4724 if (!rtx_equal_p (x, mem))
4725 push_reg_equiv_alt_mem (regno, x);
4726 if (address_reloaded)
4727 *address_reloaded = i;
4730 return x;
4732 if (code == MEM)
4734 rtx tem = x;
4736 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4737 opnum, type, ind_levels, insn);
4738 if (address_reloaded)
4739 *address_reloaded = i;
4741 return tem;
4744 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4746 /* Check for SUBREG containing a REG that's equivalent to a
4747 constant. If the constant has a known value, truncate it
4748 right now. Similarly if we are extracting a single-word of a
4749 multi-word constant. If the constant is symbolic, allow it
4750 to be substituted normally. push_reload will strip the
4751 subreg later. The constant must not be VOIDmode, because we
4752 will lose the mode of the register (this should never happen
4753 because one of the cases above should handle it). */
4755 int regno = REGNO (SUBREG_REG (x));
4756 rtx tem;
4758 if (regno >= FIRST_PSEUDO_REGISTER
4759 && reg_renumber[regno] < 0
4760 && reg_equiv_constant (regno) != 0)
4762 tem =
4763 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4764 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4765 gcc_assert (tem);
4766 if (CONSTANT_P (tem)
4767 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4769 tem = force_const_mem (GET_MODE (x), tem);
4770 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4771 &XEXP (tem, 0), opnum, type,
4772 ind_levels, insn);
4773 if (address_reloaded)
4774 *address_reloaded = i;
4776 return tem;
4779 /* If the subreg contains a reg that will be converted to a mem,
4780 attempt to convert the whole subreg to a (narrower or wider)
4781 memory reference instead. If this succeeds, we're done --
4782 otherwise fall through to check whether the inner reg still
4783 needs address reloads anyway. */
4785 if (regno >= FIRST_PSEUDO_REGISTER
4786 && reg_equiv_memory_loc (regno) != 0)
4788 tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4789 insn, address_reloaded);
4790 if (tem)
4791 return tem;
4795 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4797 if (fmt[i] == 'e')
4799 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4800 ind_levels, is_set_dest, insn,
4801 address_reloaded);
4802 /* If we have replaced a reg with it's equivalent memory loc -
4803 that can still be handled here e.g. if it's in a paradoxical
4804 subreg - we must make the change in a copy, rather than using
4805 a destructive change. This way, find_reloads can still elect
4806 not to do the change. */
4807 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4809 x = shallow_copy_rtx (x);
4810 copied = 1;
4812 XEXP (x, i) = new_part;
4815 return x;
4818 /* Return a mem ref for the memory equivalent of reg REGNO.
4819 This mem ref is not shared with anything. */
4821 static rtx
4822 make_memloc (rtx ad, int regno)
4824 /* We must rerun eliminate_regs, in case the elimination
4825 offsets have changed. */
4826 rtx tem
4827 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4830 /* If TEM might contain a pseudo, we must copy it to avoid
4831 modifying it when we do the substitution for the reload. */
4832 if (rtx_varies_p (tem, 0))
4833 tem = copy_rtx (tem);
4835 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4836 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4838 /* Copy the result if it's still the same as the equivalence, to avoid
4839 modifying it when we do the substitution for the reload. */
4840 if (tem == reg_equiv_memory_loc (regno))
4841 tem = copy_rtx (tem);
4842 return tem;
4845 /* Returns true if AD could be turned into a valid memory reference
4846 to mode MODE in address space AS by reloading the part pointed to
4847 by PART into a register. */
4849 static int
4850 maybe_memory_address_addr_space_p (machine_mode mode, rtx ad,
4851 addr_space_t as, rtx *part)
4853 int retv;
4854 rtx tem = *part;
4855 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4857 *part = reg;
4858 retv = memory_address_addr_space_p (mode, ad, as);
4859 *part = tem;
4861 return retv;
4864 /* Record all reloads needed for handling memory address AD
4865 which appears in *LOC in a memory reference to mode MODE
4866 which itself is found in location *MEMREFLOC.
4867 Note that we take shortcuts assuming that no multi-reg machine mode
4868 occurs as part of an address.
4870 OPNUM and TYPE specify the purpose of this reload.
4872 IND_LEVELS says how many levels of indirect addressing this machine
4873 supports.
4875 INSN, if nonzero, is the insn in which we do the reload. It is used
4876 to determine if we may generate output reloads, and where to put USEs
4877 for pseudos that we have to replace with stack slots.
4879 Value is one if this address is reloaded or replaced as a whole; it is
4880 zero if the top level of this address was not reloaded or replaced, and
4881 it is -1 if it may or may not have been reloaded or replaced.
4883 Note that there is no verification that the address will be valid after
4884 this routine does its work. Instead, we rely on the fact that the address
4885 was valid when reload started. So we need only undo things that reload
4886 could have broken. These are wrong register types, pseudos not allocated
4887 to a hard register, and frame pointer elimination. */
4889 static int
4890 find_reloads_address (machine_mode mode, rtx *memrefloc, rtx ad,
4891 rtx *loc, int opnum, enum reload_type type,
4892 int ind_levels, rtx_insn *insn)
4894 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4895 : ADDR_SPACE_GENERIC;
4896 int regno;
4897 int removed_and = 0;
4898 int op_index;
4899 rtx tem;
4901 /* If the address is a register, see if it is a legitimate address and
4902 reload if not. We first handle the cases where we need not reload
4903 or where we must reload in a non-standard way. */
4905 if (REG_P (ad))
4907 regno = REGNO (ad);
4909 if (reg_equiv_constant (regno) != 0)
4911 find_reloads_address_part (reg_equiv_constant (regno), loc,
4912 base_reg_class (mode, as, MEM, SCRATCH),
4913 GET_MODE (ad), opnum, type, ind_levels);
4914 return 1;
4917 tem = reg_equiv_memory_loc (regno);
4918 if (tem != 0)
4920 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4922 tem = make_memloc (ad, regno);
4923 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4924 XEXP (tem, 0),
4925 MEM_ADDR_SPACE (tem)))
4927 rtx orig = tem;
4929 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4930 &XEXP (tem, 0), opnum,
4931 ADDR_TYPE (type), ind_levels, insn);
4932 if (!rtx_equal_p (tem, orig))
4933 push_reg_equiv_alt_mem (regno, tem);
4935 /* We can avoid a reload if the register's equivalent memory
4936 expression is valid as an indirect memory address.
4937 But not all addresses are valid in a mem used as an indirect
4938 address: only reg or reg+constant. */
4940 if (ind_levels > 0
4941 && strict_memory_address_addr_space_p (mode, tem, as)
4942 && (REG_P (XEXP (tem, 0))
4943 || (GET_CODE (XEXP (tem, 0)) == PLUS
4944 && REG_P (XEXP (XEXP (tem, 0), 0))
4945 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4947 /* TEM is not the same as what we'll be replacing the
4948 pseudo with after reload, put a USE in front of INSN
4949 in the final reload pass. */
4950 if (replace_reloads
4951 && num_not_at_initial_offset
4952 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4954 *loc = tem;
4955 /* We mark the USE with QImode so that we
4956 recognize it as one that can be safely
4957 deleted at the end of reload. */
4958 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4959 insn), QImode);
4961 /* This doesn't really count as replacing the address
4962 as a whole, since it is still a memory access. */
4964 return 0;
4966 ad = tem;
4970 /* The only remaining case where we can avoid a reload is if this is a
4971 hard register that is valid as a base register and which is not the
4972 subject of a CLOBBER in this insn. */
4974 else if (regno < FIRST_PSEUDO_REGISTER
4975 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4976 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4977 return 0;
4979 /* If we do not have one of the cases above, we must do the reload. */
4980 push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4981 base_reg_class (mode, as, MEM, SCRATCH),
4982 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4983 return 1;
4986 if (strict_memory_address_addr_space_p (mode, ad, as))
4988 /* The address appears valid, so reloads are not needed.
4989 But the address may contain an eliminable register.
4990 This can happen because a machine with indirect addressing
4991 may consider a pseudo register by itself a valid address even when
4992 it has failed to get a hard reg.
4993 So do a tree-walk to find and eliminate all such regs. */
4995 /* But first quickly dispose of a common case. */
4996 if (GET_CODE (ad) == PLUS
4997 && CONST_INT_P (XEXP (ad, 1))
4998 && REG_P (XEXP (ad, 0))
4999 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
5000 return 0;
5002 subst_reg_equivs_changed = 0;
5003 *loc = subst_reg_equivs (ad, insn);
5005 if (! subst_reg_equivs_changed)
5006 return 0;
5008 /* Check result for validity after substitution. */
5009 if (strict_memory_address_addr_space_p (mode, ad, as))
5010 return 0;
5013 #ifdef LEGITIMIZE_RELOAD_ADDRESS
5016 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
5018 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5019 ind_levels, win);
5021 break;
5022 win:
5023 *memrefloc = copy_rtx (*memrefloc);
5024 XEXP (*memrefloc, 0) = ad;
5025 move_replacements (&ad, &XEXP (*memrefloc, 0));
5026 return -1;
5028 while (0);
5029 #endif
5031 /* The address is not valid. We have to figure out why. First see if
5032 we have an outer AND and remove it if so. Then analyze what's inside. */
5034 if (GET_CODE (ad) == AND)
5036 removed_and = 1;
5037 loc = &XEXP (ad, 0);
5038 ad = *loc;
5041 /* One possibility for why the address is invalid is that it is itself
5042 a MEM. This can happen when the frame pointer is being eliminated, a
5043 pseudo is not allocated to a hard register, and the offset between the
5044 frame and stack pointers is not its initial value. In that case the
5045 pseudo will have been replaced by a MEM referring to the
5046 stack pointer. */
5047 if (MEM_P (ad))
5049 /* First ensure that the address in this MEM is valid. Then, unless
5050 indirect addresses are valid, reload the MEM into a register. */
5051 tem = ad;
5052 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5053 opnum, ADDR_TYPE (type),
5054 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5056 /* If tem was changed, then we must create a new memory reference to
5057 hold it and store it back into memrefloc. */
5058 if (tem != ad && memrefloc)
5060 *memrefloc = copy_rtx (*memrefloc);
5061 copy_replacements (tem, XEXP (*memrefloc, 0));
5062 loc = &XEXP (*memrefloc, 0);
5063 if (removed_and)
5064 loc = &XEXP (*loc, 0);
5067 /* Check similar cases as for indirect addresses as above except
5068 that we can allow pseudos and a MEM since they should have been
5069 taken care of above. */
5071 if (ind_levels == 0
5072 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5073 || MEM_P (XEXP (tem, 0))
5074 || ! (REG_P (XEXP (tem, 0))
5075 || (GET_CODE (XEXP (tem, 0)) == PLUS
5076 && REG_P (XEXP (XEXP (tem, 0), 0))
5077 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5079 /* Must use TEM here, not AD, since it is the one that will
5080 have any subexpressions reloaded, if needed. */
5081 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5082 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5083 VOIDmode, 0,
5084 0, opnum, type);
5085 return ! removed_and;
5087 else
5088 return 0;
5091 /* If we have address of a stack slot but it's not valid because the
5092 displacement is too large, compute the sum in a register.
5093 Handle all base registers here, not just fp/ap/sp, because on some
5094 targets (namely SH) we can also get too large displacements from
5095 big-endian corrections. */
5096 else if (GET_CODE (ad) == PLUS
5097 && REG_P (XEXP (ad, 0))
5098 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5099 && CONST_INT_P (XEXP (ad, 1))
5100 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5101 CONST_INT)
5102 /* Similarly, if we were to reload the base register and the
5103 mem+offset address is still invalid, then we want to reload
5104 the whole address, not just the base register. */
5105 || ! maybe_memory_address_addr_space_p
5106 (mode, ad, as, &(XEXP (ad, 0)))))
5109 /* Unshare the MEM rtx so we can safely alter it. */
5110 if (memrefloc)
5112 *memrefloc = copy_rtx (*memrefloc);
5113 loc = &XEXP (*memrefloc, 0);
5114 if (removed_and)
5115 loc = &XEXP (*loc, 0);
5118 if (double_reg_address_ok
5119 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5120 PLUS, CONST_INT))
5122 /* Unshare the sum as well. */
5123 *loc = ad = copy_rtx (ad);
5125 /* Reload the displacement into an index reg.
5126 We assume the frame pointer or arg pointer is a base reg. */
5127 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5128 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5129 type, ind_levels);
5130 return 0;
5132 else
5134 /* If the sum of two regs is not necessarily valid,
5135 reload the sum into a base reg.
5136 That will at least work. */
5137 find_reloads_address_part (ad, loc,
5138 base_reg_class (mode, as, MEM, SCRATCH),
5139 GET_MODE (ad), opnum, type, ind_levels);
5141 return ! removed_and;
5144 /* If we have an indexed stack slot, there are three possible reasons why
5145 it might be invalid: The index might need to be reloaded, the address
5146 might have been made by frame pointer elimination and hence have a
5147 constant out of range, or both reasons might apply.
5149 We can easily check for an index needing reload, but even if that is the
5150 case, we might also have an invalid constant. To avoid making the
5151 conservative assumption and requiring two reloads, we see if this address
5152 is valid when not interpreted strictly. If it is, the only problem is
5153 that the index needs a reload and find_reloads_address_1 will take care
5154 of it.
5156 Handle all base registers here, not just fp/ap/sp, because on some
5157 targets (namely SPARC) we can also get invalid addresses from preventive
5158 subreg big-endian corrections made by find_reloads_toplev. We
5159 can also get expressions involving LO_SUM (rather than PLUS) from
5160 find_reloads_subreg_address.
5162 If we decide to do something, it must be that `double_reg_address_ok'
5163 is true. We generate a reload of the base register + constant and
5164 rework the sum so that the reload register will be added to the index.
5165 This is safe because we know the address isn't shared.
5167 We check for the base register as both the first and second operand of
5168 the innermost PLUS and/or LO_SUM. */
5170 for (op_index = 0; op_index < 2; ++op_index)
5172 rtx operand, addend;
5173 enum rtx_code inner_code;
5175 if (GET_CODE (ad) != PLUS)
5176 continue;
5178 inner_code = GET_CODE (XEXP (ad, 0));
5179 if (!(GET_CODE (ad) == PLUS
5180 && CONST_INT_P (XEXP (ad, 1))
5181 && (inner_code == PLUS || inner_code == LO_SUM)))
5182 continue;
5184 operand = XEXP (XEXP (ad, 0), op_index);
5185 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5186 continue;
5188 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5190 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5191 GET_CODE (addend))
5192 || operand == frame_pointer_rtx
5193 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
5194 || operand == hard_frame_pointer_rtx
5195 #endif
5196 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5197 || operand == arg_pointer_rtx
5198 #endif
5199 || operand == stack_pointer_rtx)
5200 && ! maybe_memory_address_addr_space_p
5201 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5203 rtx offset_reg;
5204 enum reg_class cls;
5206 offset_reg = plus_constant (GET_MODE (ad), operand,
5207 INTVAL (XEXP (ad, 1)));
5209 /* Form the adjusted address. */
5210 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5211 ad = gen_rtx_PLUS (GET_MODE (ad),
5212 op_index == 0 ? offset_reg : addend,
5213 op_index == 0 ? addend : offset_reg);
5214 else
5215 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5216 op_index == 0 ? offset_reg : addend,
5217 op_index == 0 ? addend : offset_reg);
5218 *loc = ad;
5220 cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5221 find_reloads_address_part (XEXP (ad, op_index),
5222 &XEXP (ad, op_index), cls,
5223 GET_MODE (ad), opnum, type, ind_levels);
5224 find_reloads_address_1 (mode, as,
5225 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5226 GET_CODE (XEXP (ad, op_index)),
5227 &XEXP (ad, 1 - op_index), opnum,
5228 type, 0, insn);
5230 return 0;
5234 /* See if address becomes valid when an eliminable register
5235 in a sum is replaced. */
5237 tem = ad;
5238 if (GET_CODE (ad) == PLUS)
5239 tem = subst_indexed_address (ad);
5240 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5242 /* Ok, we win that way. Replace any additional eliminable
5243 registers. */
5245 subst_reg_equivs_changed = 0;
5246 tem = subst_reg_equivs (tem, insn);
5248 /* Make sure that didn't make the address invalid again. */
5250 if (! subst_reg_equivs_changed
5251 || strict_memory_address_addr_space_p (mode, tem, as))
5253 *loc = tem;
5254 return 0;
5258 /* If constants aren't valid addresses, reload the constant address
5259 into a register. */
5260 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5262 machine_mode address_mode = GET_MODE (ad);
5263 if (address_mode == VOIDmode)
5264 address_mode = targetm.addr_space.address_mode (as);
5266 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5267 Unshare it so we can safely alter it. */
5268 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5269 && CONSTANT_POOL_ADDRESS_P (ad))
5271 *memrefloc = copy_rtx (*memrefloc);
5272 loc = &XEXP (*memrefloc, 0);
5273 if (removed_and)
5274 loc = &XEXP (*loc, 0);
5277 find_reloads_address_part (ad, loc,
5278 base_reg_class (mode, as, MEM, SCRATCH),
5279 address_mode, opnum, type, ind_levels);
5280 return ! removed_and;
5283 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5284 opnum, type, ind_levels, insn);
5287 /* Find all pseudo regs appearing in AD
5288 that are eliminable in favor of equivalent values
5289 and do not have hard regs; replace them by their equivalents.
5290 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5291 front of it for pseudos that we have to replace with stack slots. */
5293 static rtx
5294 subst_reg_equivs (rtx ad, rtx_insn *insn)
5296 RTX_CODE code = GET_CODE (ad);
5297 int i;
5298 const char *fmt;
5300 switch (code)
5302 case HIGH:
5303 case CONST:
5304 CASE_CONST_ANY:
5305 case SYMBOL_REF:
5306 case LABEL_REF:
5307 case PC:
5308 case CC0:
5309 return ad;
5311 case REG:
5313 int regno = REGNO (ad);
5315 if (reg_equiv_constant (regno) != 0)
5317 subst_reg_equivs_changed = 1;
5318 return reg_equiv_constant (regno);
5320 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5322 rtx mem = make_memloc (ad, regno);
5323 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5325 subst_reg_equivs_changed = 1;
5326 /* We mark the USE with QImode so that we recognize it
5327 as one that can be safely deleted at the end of
5328 reload. */
5329 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5330 QImode);
5331 return mem;
5335 return ad;
5337 case PLUS:
5338 /* Quickly dispose of a common case. */
5339 if (XEXP (ad, 0) == frame_pointer_rtx
5340 && CONST_INT_P (XEXP (ad, 1)))
5341 return ad;
5342 break;
5344 default:
5345 break;
5348 fmt = GET_RTX_FORMAT (code);
5349 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5350 if (fmt[i] == 'e')
5351 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5352 return ad;
5355 /* Compute the sum of X and Y, making canonicalizations assumed in an
5356 address, namely: sum constant integers, surround the sum of two
5357 constants with a CONST, put the constant as the second operand, and
5358 group the constant on the outermost sum.
5360 This routine assumes both inputs are already in canonical form. */
5363 form_sum (machine_mode mode, rtx x, rtx y)
5365 rtx tem;
5367 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5368 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5370 if (CONST_INT_P (x))
5371 return plus_constant (mode, y, INTVAL (x));
5372 else if (CONST_INT_P (y))
5373 return plus_constant (mode, x, INTVAL (y));
5374 else if (CONSTANT_P (x))
5375 tem = x, x = y, y = tem;
5377 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5378 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5380 /* Note that if the operands of Y are specified in the opposite
5381 order in the recursive calls below, infinite recursion will occur. */
5382 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5383 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5385 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5386 constant will have been placed second. */
5387 if (CONSTANT_P (x) && CONSTANT_P (y))
5389 if (GET_CODE (x) == CONST)
5390 x = XEXP (x, 0);
5391 if (GET_CODE (y) == CONST)
5392 y = XEXP (y, 0);
5394 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5397 return gen_rtx_PLUS (mode, x, y);
5400 /* If ADDR is a sum containing a pseudo register that should be
5401 replaced with a constant (from reg_equiv_constant),
5402 return the result of doing so, and also apply the associative
5403 law so that the result is more likely to be a valid address.
5404 (But it is not guaranteed to be one.)
5406 Note that at most one register is replaced, even if more are
5407 replaceable. Also, we try to put the result into a canonical form
5408 so it is more likely to be a valid address.
5410 In all other cases, return ADDR. */
5412 static rtx
5413 subst_indexed_address (rtx addr)
5415 rtx op0 = 0, op1 = 0, op2 = 0;
5416 rtx tem;
5417 int regno;
5419 if (GET_CODE (addr) == PLUS)
5421 /* Try to find a register to replace. */
5422 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5423 if (REG_P (op0)
5424 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5425 && reg_renumber[regno] < 0
5426 && reg_equiv_constant (regno) != 0)
5427 op0 = reg_equiv_constant (regno);
5428 else if (REG_P (op1)
5429 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5430 && reg_renumber[regno] < 0
5431 && reg_equiv_constant (regno) != 0)
5432 op1 = reg_equiv_constant (regno);
5433 else if (GET_CODE (op0) == PLUS
5434 && (tem = subst_indexed_address (op0)) != op0)
5435 op0 = tem;
5436 else if (GET_CODE (op1) == PLUS
5437 && (tem = subst_indexed_address (op1)) != op1)
5438 op1 = tem;
5439 else
5440 return addr;
5442 /* Pick out up to three things to add. */
5443 if (GET_CODE (op1) == PLUS)
5444 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5445 else if (GET_CODE (op0) == PLUS)
5446 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5448 /* Compute the sum. */
5449 if (op2 != 0)
5450 op1 = form_sum (GET_MODE (addr), op1, op2);
5451 if (op1 != 0)
5452 op0 = form_sum (GET_MODE (addr), op0, op1);
5454 return op0;
5456 return addr;
5459 /* Update the REG_INC notes for an insn. It updates all REG_INC
5460 notes for the instruction which refer to REGNO the to refer
5461 to the reload number.
5463 INSN is the insn for which any REG_INC notes need updating.
5465 REGNO is the register number which has been reloaded.
5467 RELOADNUM is the reload number. */
5469 static void
5470 update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5471 int reloadnum ATTRIBUTE_UNUSED)
5473 #ifdef AUTO_INC_DEC
5474 rtx link;
5476 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5477 if (REG_NOTE_KIND (link) == REG_INC
5478 && (int) REGNO (XEXP (link, 0)) == regno)
5479 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5480 #endif
5483 /* Record the pseudo registers we must reload into hard registers in a
5484 subexpression of a would-be memory address, X referring to a value
5485 in mode MODE. (This function is not called if the address we find
5486 is strictly valid.)
5488 CONTEXT = 1 means we are considering regs as index regs,
5489 = 0 means we are considering them as base regs.
5490 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5491 or an autoinc code.
5492 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5493 is the code of the index part of the address. Otherwise, pass SCRATCH
5494 for this argument.
5495 OPNUM and TYPE specify the purpose of any reloads made.
5497 IND_LEVELS says how many levels of indirect addressing are
5498 supported at this point in the address.
5500 INSN, if nonzero, is the insn in which we do the reload. It is used
5501 to determine if we may generate output reloads.
5503 We return nonzero if X, as a whole, is reloaded or replaced. */
5505 /* Note that we take shortcuts assuming that no multi-reg machine mode
5506 occurs as part of an address.
5507 Also, this is not fully machine-customizable; it works for machines
5508 such as VAXen and 68000's and 32000's, but other possible machines
5509 could have addressing modes that this does not handle right.
5510 If you add push_reload calls here, you need to make sure gen_reload
5511 handles those cases gracefully. */
5513 static int
5514 find_reloads_address_1 (machine_mode mode, addr_space_t as,
5515 rtx x, int context,
5516 enum rtx_code outer_code, enum rtx_code index_code,
5517 rtx *loc, int opnum, enum reload_type type,
5518 int ind_levels, rtx_insn *insn)
5520 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5521 ((CONTEXT) == 0 \
5522 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5523 : REGNO_OK_FOR_INDEX_P (REGNO))
5525 enum reg_class context_reg_class;
5526 RTX_CODE code = GET_CODE (x);
5527 bool reloaded_inner_of_autoinc = false;
5529 if (context == 1)
5530 context_reg_class = INDEX_REG_CLASS;
5531 else
5532 context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5534 switch (code)
5536 case PLUS:
5538 rtx orig_op0 = XEXP (x, 0);
5539 rtx orig_op1 = XEXP (x, 1);
5540 RTX_CODE code0 = GET_CODE (orig_op0);
5541 RTX_CODE code1 = GET_CODE (orig_op1);
5542 rtx op0 = orig_op0;
5543 rtx op1 = orig_op1;
5545 if (GET_CODE (op0) == SUBREG)
5547 op0 = SUBREG_REG (op0);
5548 code0 = GET_CODE (op0);
5549 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5550 op0 = gen_rtx_REG (word_mode,
5551 (REGNO (op0) +
5552 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5553 GET_MODE (SUBREG_REG (orig_op0)),
5554 SUBREG_BYTE (orig_op0),
5555 GET_MODE (orig_op0))));
5558 if (GET_CODE (op1) == SUBREG)
5560 op1 = SUBREG_REG (op1);
5561 code1 = GET_CODE (op1);
5562 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5563 /* ??? Why is this given op1's mode and above for
5564 ??? op0 SUBREGs we use word_mode? */
5565 op1 = gen_rtx_REG (GET_MODE (op1),
5566 (REGNO (op1) +
5567 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5568 GET_MODE (SUBREG_REG (orig_op1)),
5569 SUBREG_BYTE (orig_op1),
5570 GET_MODE (orig_op1))));
5572 /* Plus in the index register may be created only as a result of
5573 register rematerialization for expression like &localvar*4. Reload it.
5574 It may be possible to combine the displacement on the outer level,
5575 but it is probably not worthwhile to do so. */
5576 if (context == 1)
5578 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5579 opnum, ADDR_TYPE (type), ind_levels, insn);
5580 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5581 context_reg_class,
5582 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5583 return 1;
5586 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5587 || code0 == ZERO_EXTEND || code1 == MEM)
5589 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5590 &XEXP (x, 0), opnum, type, ind_levels,
5591 insn);
5592 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5593 &XEXP (x, 1), opnum, type, ind_levels,
5594 insn);
5597 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5598 || code1 == ZERO_EXTEND || code0 == MEM)
5600 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5601 &XEXP (x, 0), opnum, type, ind_levels,
5602 insn);
5603 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5604 &XEXP (x, 1), opnum, type, ind_levels,
5605 insn);
5608 else if (code0 == CONST_INT || code0 == CONST
5609 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5610 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5611 &XEXP (x, 1), opnum, type, ind_levels,
5612 insn);
5614 else if (code1 == CONST_INT || code1 == CONST
5615 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5616 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5617 &XEXP (x, 0), opnum, type, ind_levels,
5618 insn);
5620 else if (code0 == REG && code1 == REG)
5622 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5623 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5624 return 0;
5625 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5626 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5627 return 0;
5628 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5629 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5630 &XEXP (x, 1), opnum, type, ind_levels,
5631 insn);
5632 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5633 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5634 &XEXP (x, 0), opnum, type, ind_levels,
5635 insn);
5636 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5637 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5638 &XEXP (x, 0), opnum, type, ind_levels,
5639 insn);
5640 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5641 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5642 &XEXP (x, 1), opnum, type, ind_levels,
5643 insn);
5644 else
5646 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5647 &XEXP (x, 0), opnum, type, ind_levels,
5648 insn);
5649 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5650 &XEXP (x, 1), opnum, type, ind_levels,
5651 insn);
5655 else if (code0 == REG)
5657 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5658 &XEXP (x, 0), opnum, type, ind_levels,
5659 insn);
5660 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5661 &XEXP (x, 1), opnum, type, ind_levels,
5662 insn);
5665 else if (code1 == REG)
5667 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5668 &XEXP (x, 1), opnum, type, ind_levels,
5669 insn);
5670 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5671 &XEXP (x, 0), opnum, type, ind_levels,
5672 insn);
5676 return 0;
5678 case POST_MODIFY:
5679 case PRE_MODIFY:
5681 rtx op0 = XEXP (x, 0);
5682 rtx op1 = XEXP (x, 1);
5683 enum rtx_code index_code;
5684 int regno;
5685 int reloadnum;
5687 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5688 return 0;
5690 /* Currently, we only support {PRE,POST}_MODIFY constructs
5691 where a base register is {inc,dec}remented by the contents
5692 of another register or by a constant value. Thus, these
5693 operands must match. */
5694 gcc_assert (op0 == XEXP (op1, 0));
5696 /* Require index register (or constant). Let's just handle the
5697 register case in the meantime... If the target allows
5698 auto-modify by a constant then we could try replacing a pseudo
5699 register with its equivalent constant where applicable.
5701 We also handle the case where the register was eliminated
5702 resulting in a PLUS subexpression.
5704 If we later decide to reload the whole PRE_MODIFY or
5705 POST_MODIFY, inc_for_reload might clobber the reload register
5706 before reading the index. The index register might therefore
5707 need to live longer than a TYPE reload normally would, so be
5708 conservative and class it as RELOAD_OTHER. */
5709 if ((REG_P (XEXP (op1, 1))
5710 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5711 || GET_CODE (XEXP (op1, 1)) == PLUS)
5712 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5713 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5714 ind_levels, insn);
5716 gcc_assert (REG_P (XEXP (op1, 0)));
5718 regno = REGNO (XEXP (op1, 0));
5719 index_code = GET_CODE (XEXP (op1, 1));
5721 /* A register that is incremented cannot be constant! */
5722 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5723 || reg_equiv_constant (regno) == 0);
5725 /* Handle a register that is equivalent to a memory location
5726 which cannot be addressed directly. */
5727 if (reg_equiv_memory_loc (regno) != 0
5728 && (reg_equiv_address (regno) != 0
5729 || num_not_at_initial_offset))
5731 rtx tem = make_memloc (XEXP (x, 0), regno);
5733 if (reg_equiv_address (regno)
5734 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5736 rtx orig = tem;
5738 /* First reload the memory location's address.
5739 We can't use ADDR_TYPE (type) here, because we need to
5740 write back the value after reading it, hence we actually
5741 need two registers. */
5742 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5743 &XEXP (tem, 0), opnum,
5744 RELOAD_OTHER,
5745 ind_levels, insn);
5747 if (!rtx_equal_p (tem, orig))
5748 push_reg_equiv_alt_mem (regno, tem);
5750 /* Then reload the memory location into a base
5751 register. */
5752 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5753 &XEXP (op1, 0),
5754 base_reg_class (mode, as,
5755 code, index_code),
5756 GET_MODE (x), GET_MODE (x), 0,
5757 0, opnum, RELOAD_OTHER);
5759 update_auto_inc_notes (this_insn, regno, reloadnum);
5760 return 0;
5764 if (reg_renumber[regno] >= 0)
5765 regno = reg_renumber[regno];
5767 /* We require a base register here... */
5768 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5770 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5771 &XEXP (op1, 0), &XEXP (x, 0),
5772 base_reg_class (mode, as,
5773 code, index_code),
5774 GET_MODE (x), GET_MODE (x), 0, 0,
5775 opnum, RELOAD_OTHER);
5777 update_auto_inc_notes (this_insn, regno, reloadnum);
5778 return 0;
5781 return 0;
5783 case POST_INC:
5784 case POST_DEC:
5785 case PRE_INC:
5786 case PRE_DEC:
5787 if (REG_P (XEXP (x, 0)))
5789 int regno = REGNO (XEXP (x, 0));
5790 int value = 0;
5791 rtx x_orig = x;
5793 /* A register that is incremented cannot be constant! */
5794 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5795 || reg_equiv_constant (regno) == 0);
5797 /* Handle a register that is equivalent to a memory location
5798 which cannot be addressed directly. */
5799 if (reg_equiv_memory_loc (regno) != 0
5800 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5802 rtx tem = make_memloc (XEXP (x, 0), regno);
5803 if (reg_equiv_address (regno)
5804 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5806 rtx orig = tem;
5808 /* First reload the memory location's address.
5809 We can't use ADDR_TYPE (type) here, because we need to
5810 write back the value after reading it, hence we actually
5811 need two registers. */
5812 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5813 &XEXP (tem, 0), opnum, type,
5814 ind_levels, insn);
5815 reloaded_inner_of_autoinc = true;
5816 if (!rtx_equal_p (tem, orig))
5817 push_reg_equiv_alt_mem (regno, tem);
5818 /* Put this inside a new increment-expression. */
5819 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5820 /* Proceed to reload that, as if it contained a register. */
5824 /* If we have a hard register that is ok in this incdec context,
5825 don't make a reload. If the register isn't nice enough for
5826 autoincdec, we can reload it. But, if an autoincrement of a
5827 register that we here verified as playing nice, still outside
5828 isn't "valid", it must be that no autoincrement is "valid".
5829 If that is true and something made an autoincrement anyway,
5830 this must be a special context where one is allowed.
5831 (For example, a "push" instruction.)
5832 We can't improve this address, so leave it alone. */
5834 /* Otherwise, reload the autoincrement into a suitable hard reg
5835 and record how much to increment by. */
5837 if (reg_renumber[regno] >= 0)
5838 regno = reg_renumber[regno];
5839 if (regno >= FIRST_PSEUDO_REGISTER
5840 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5841 index_code))
5843 int reloadnum;
5845 /* If we can output the register afterwards, do so, this
5846 saves the extra update.
5847 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5848 CALL_INSN - and it does not set CC0.
5849 But don't do this if we cannot directly address the
5850 memory location, since this will make it harder to
5851 reuse address reloads, and increases register pressure.
5852 Also don't do this if we can probably update x directly. */
5853 rtx equiv = (MEM_P (XEXP (x, 0))
5854 ? XEXP (x, 0)
5855 : reg_equiv_mem (regno));
5856 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5857 if (insn && NONJUMP_INSN_P (insn) && equiv
5858 && memory_operand (equiv, GET_MODE (equiv))
5859 #ifdef HAVE_cc0
5860 && ! sets_cc0_p (PATTERN (insn))
5861 #endif
5862 && ! (icode != CODE_FOR_nothing
5863 && insn_operand_matches (icode, 0, equiv)
5864 && insn_operand_matches (icode, 1, equiv))
5865 /* Using RELOAD_OTHER means we emit this and the reload we
5866 made earlier in the wrong order. */
5867 && !reloaded_inner_of_autoinc)
5869 /* We use the original pseudo for loc, so that
5870 emit_reload_insns() knows which pseudo this
5871 reload refers to and updates the pseudo rtx, not
5872 its equivalent memory location, as well as the
5873 corresponding entry in reg_last_reload_reg. */
5874 loc = &XEXP (x_orig, 0);
5875 x = XEXP (x, 0);
5876 reloadnum
5877 = push_reload (x, x, loc, loc,
5878 context_reg_class,
5879 GET_MODE (x), GET_MODE (x), 0, 0,
5880 opnum, RELOAD_OTHER);
5882 else
5884 reloadnum
5885 = push_reload (x, x, loc, (rtx*) 0,
5886 context_reg_class,
5887 GET_MODE (x), GET_MODE (x), 0, 0,
5888 opnum, type);
5889 rld[reloadnum].inc
5890 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5892 value = 1;
5895 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5896 reloadnum);
5898 return value;
5900 return 0;
5902 case TRUNCATE:
5903 case SIGN_EXTEND:
5904 case ZERO_EXTEND:
5905 /* Look for parts to reload in the inner expression and reload them
5906 too, in addition to this operation. Reloading all inner parts in
5907 addition to this one shouldn't be necessary, but at this point,
5908 we don't know if we can possibly omit any part that *can* be
5909 reloaded. Targets that are better off reloading just either part
5910 (or perhaps even a different part of an outer expression), should
5911 define LEGITIMIZE_RELOAD_ADDRESS. */
5912 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5913 context, code, SCRATCH, &XEXP (x, 0), opnum,
5914 type, ind_levels, insn);
5915 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5916 context_reg_class,
5917 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5918 return 1;
5920 case MEM:
5921 /* This is probably the result of a substitution, by eliminate_regs, of
5922 an equivalent address for a pseudo that was not allocated to a hard
5923 register. Verify that the specified address is valid and reload it
5924 into a register.
5926 Since we know we are going to reload this item, don't decrement for
5927 the indirection level.
5929 Note that this is actually conservative: it would be slightly more
5930 efficient to use the value of SPILL_INDIRECT_LEVELS from
5931 reload1.c here. */
5933 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5934 opnum, ADDR_TYPE (type), ind_levels, insn);
5935 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5936 context_reg_class,
5937 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5938 return 1;
5940 case REG:
5942 int regno = REGNO (x);
5944 if (reg_equiv_constant (regno) != 0)
5946 find_reloads_address_part (reg_equiv_constant (regno), loc,
5947 context_reg_class,
5948 GET_MODE (x), opnum, type, ind_levels);
5949 return 1;
5952 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5953 that feeds this insn. */
5954 if (reg_equiv_mem (regno) != 0)
5956 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5957 context_reg_class,
5958 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5959 return 1;
5961 #endif
5963 if (reg_equiv_memory_loc (regno)
5964 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5966 rtx tem = make_memloc (x, regno);
5967 if (reg_equiv_address (regno) != 0
5968 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5970 x = tem;
5971 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5972 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5973 ind_levels, insn);
5974 if (!rtx_equal_p (x, tem))
5975 push_reg_equiv_alt_mem (regno, x);
5979 if (reg_renumber[regno] >= 0)
5980 regno = reg_renumber[regno];
5982 if (regno >= FIRST_PSEUDO_REGISTER
5983 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5984 index_code))
5986 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5987 context_reg_class,
5988 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5989 return 1;
5992 /* If a register appearing in an address is the subject of a CLOBBER
5993 in this insn, reload it into some other register to be safe.
5994 The CLOBBER is supposed to make the register unavailable
5995 from before this insn to after it. */
5996 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5998 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5999 context_reg_class,
6000 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6001 return 1;
6004 return 0;
6006 case SUBREG:
6007 if (REG_P (SUBREG_REG (x)))
6009 /* If this is a SUBREG of a hard register and the resulting register
6010 is of the wrong class, reload the whole SUBREG. This avoids
6011 needless copies if SUBREG_REG is multi-word. */
6012 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6014 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
6016 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6017 index_code))
6019 push_reload (x, NULL_RTX, loc, (rtx*) 0,
6020 context_reg_class,
6021 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6022 return 1;
6025 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6026 is larger than the class size, then reload the whole SUBREG. */
6027 else
6029 enum reg_class rclass = context_reg_class;
6030 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6031 > reg_class_size[(int) rclass])
6033 /* If the inner register will be replaced by a memory
6034 reference, we can do this only if we can replace the
6035 whole subreg by a (narrower) memory reference. If
6036 this is not possible, fall through and reload just
6037 the inner register (including address reloads). */
6038 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6040 rtx tem = find_reloads_subreg_address (x, opnum,
6041 ADDR_TYPE (type),
6042 ind_levels, insn,
6043 NULL);
6044 if (tem)
6046 push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6047 GET_MODE (tem), VOIDmode, 0, 0,
6048 opnum, type);
6049 return 1;
6052 else
6054 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6055 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6056 return 1;
6061 break;
6063 default:
6064 break;
6068 const char *fmt = GET_RTX_FORMAT (code);
6069 int i;
6071 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6073 if (fmt[i] == 'e')
6074 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6075 we get here. */
6076 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6077 code, SCRATCH, &XEXP (x, i),
6078 opnum, type, ind_levels, insn);
6082 #undef REG_OK_FOR_CONTEXT
6083 return 0;
6086 /* X, which is found at *LOC, is a part of an address that needs to be
6087 reloaded into a register of class RCLASS. If X is a constant, or if
6088 X is a PLUS that contains a constant, check that the constant is a
6089 legitimate operand and that we are supposed to be able to load
6090 it into the register.
6092 If not, force the constant into memory and reload the MEM instead.
6094 MODE is the mode to use, in case X is an integer constant.
6096 OPNUM and TYPE describe the purpose of any reloads made.
6098 IND_LEVELS says how many levels of indirect addressing this machine
6099 supports. */
6101 static void
6102 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6103 machine_mode mode, int opnum,
6104 enum reload_type type, int ind_levels)
6106 if (CONSTANT_P (x)
6107 && (!targetm.legitimate_constant_p (mode, x)
6108 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6110 x = force_const_mem (mode, x);
6111 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6112 opnum, type, ind_levels, 0);
6115 else if (GET_CODE (x) == PLUS
6116 && CONSTANT_P (XEXP (x, 1))
6117 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6118 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6119 == NO_REGS))
6121 rtx tem;
6123 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6124 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6125 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6126 opnum, type, ind_levels, 0);
6129 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6130 mode, VOIDmode, 0, 0, opnum, type);
6133 /* X, a subreg of a pseudo, is a part of an address that needs to be
6134 reloaded, and the pseusdo is equivalent to a memory location.
6136 Attempt to replace the whole subreg by a (possibly narrower or wider)
6137 memory reference. If this is possible, return this new memory
6138 reference, and push all required address reloads. Otherwise,
6139 return NULL.
6141 OPNUM and TYPE identify the purpose of the reload.
6143 IND_LEVELS says how many levels of indirect addressing are
6144 supported at this point in the address.
6146 INSN, if nonzero, is the insn in which we do the reload. It is used
6147 to determine where to put USEs for pseudos that we have to replace with
6148 stack slots. */
6150 static rtx
6151 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6152 int ind_levels, rtx_insn *insn,
6153 int *address_reloaded)
6155 machine_mode outer_mode = GET_MODE (x);
6156 machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6157 int regno = REGNO (SUBREG_REG (x));
6158 int reloaded = 0;
6159 rtx tem, orig;
6160 int offset;
6162 gcc_assert (reg_equiv_memory_loc (regno) != 0);
6164 /* We cannot replace the subreg with a modified memory reference if:
6166 - we have a paradoxical subreg that implicitly acts as a zero or
6167 sign extension operation due to LOAD_EXTEND_OP;
6169 - we have a subreg that is implicitly supposed to act on the full
6170 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6172 - the address of the equivalent memory location is mode-dependent; or
6174 - we have a paradoxical subreg and the resulting memory is not
6175 sufficiently aligned to allow access in the wider mode.
6177 In addition, we choose not to perform the replacement for *any*
6178 paradoxical subreg, even if it were possible in principle. This
6179 is to avoid generating wider memory references than necessary.
6181 This corresponds to how previous versions of reload used to handle
6182 paradoxical subregs where no address reload was required. */
6184 if (paradoxical_subreg_p (x))
6185 return NULL;
6187 #ifdef WORD_REGISTER_OPERATIONS
6188 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode)
6189 && ((GET_MODE_SIZE (outer_mode) - 1) / UNITS_PER_WORD
6190 == (GET_MODE_SIZE (inner_mode) - 1) / UNITS_PER_WORD))
6191 return NULL;
6192 #endif
6194 /* Since we don't attempt to handle paradoxical subregs, we can just
6195 call into simplify_subreg, which will handle all remaining checks
6196 for us. */
6197 orig = make_memloc (SUBREG_REG (x), regno);
6198 offset = SUBREG_BYTE (x);
6199 tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6200 if (!tem || !MEM_P (tem))
6201 return NULL;
6203 /* Now push all required address reloads, if any. */
6204 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6205 XEXP (tem, 0), &XEXP (tem, 0),
6206 opnum, type, ind_levels, insn);
6207 /* ??? Do we need to handle nonzero offsets somehow? */
6208 if (!offset && !rtx_equal_p (tem, orig))
6209 push_reg_equiv_alt_mem (regno, tem);
6211 /* For some processors an address may be valid in the original mode but
6212 not in a smaller mode. For example, ARM accepts a scaled index register
6213 in SImode but not in HImode. Note that this is only a problem if the
6214 address in reg_equiv_mem is already invalid in the new mode; other
6215 cases would be fixed by find_reloads_address as usual.
6217 ??? We attempt to handle such cases here by doing an additional reload
6218 of the full address after the usual processing by find_reloads_address.
6219 Note that this may not work in the general case, but it seems to cover
6220 the cases where this situation currently occurs. A more general fix
6221 might be to reload the *value* instead of the address, but this would
6222 not be expected by the callers of this routine as-is.
6224 If find_reloads_address already completed replaced the address, there
6225 is nothing further to do. */
6226 if (reloaded == 0
6227 && reg_equiv_mem (regno) != 0
6228 && !strict_memory_address_addr_space_p
6229 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6230 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6232 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6233 base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6234 MEM, SCRATCH),
6235 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6236 reloaded = 1;
6239 /* If this is not a toplevel operand, find_reloads doesn't see this
6240 substitution. We have to emit a USE of the pseudo so that
6241 delete_output_reload can see it. */
6242 if (replace_reloads && recog_data.operand[opnum] != x)
6243 /* We mark the USE with QImode so that we recognize it as one that
6244 can be safely deleted at the end of reload. */
6245 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6246 QImode);
6248 if (address_reloaded)
6249 *address_reloaded = reloaded;
6251 return tem;
6254 /* Substitute into the current INSN the registers into which we have reloaded
6255 the things that need reloading. The array `replacements'
6256 contains the locations of all pointers that must be changed
6257 and says what to replace them with.
6259 Return the rtx that X translates into; usually X, but modified. */
6261 void
6262 subst_reloads (rtx_insn *insn)
6264 int i;
6266 for (i = 0; i < n_replacements; i++)
6268 struct replacement *r = &replacements[i];
6269 rtx reloadreg = rld[r->what].reg_rtx;
6270 if (reloadreg)
6272 #ifdef DEBUG_RELOAD
6273 /* This checking takes a very long time on some platforms
6274 causing the gcc.c-torture/compile/limits-fnargs.c test
6275 to time out during testing. See PR 31850.
6277 Internal consistency test. Check that we don't modify
6278 anything in the equivalence arrays. Whenever something from
6279 those arrays needs to be reloaded, it must be unshared before
6280 being substituted into; the equivalence must not be modified.
6281 Otherwise, if the equivalence is used after that, it will
6282 have been modified, and the thing substituted (probably a
6283 register) is likely overwritten and not a usable equivalence. */
6284 int check_regno;
6286 for (check_regno = 0; check_regno < max_regno; check_regno++)
6288 #define CHECK_MODF(ARRAY) \
6289 gcc_assert (!(*reg_equivs)[check_regno].ARRAY \
6290 || !loc_mentioned_in_p (r->where, \
6291 (*reg_equivs)[check_regno].ARRAY))
6293 CHECK_MODF (constant);
6294 CHECK_MODF (memory_loc);
6295 CHECK_MODF (address);
6296 CHECK_MODF (mem);
6297 #undef CHECK_MODF
6299 #endif /* DEBUG_RELOAD */
6301 /* If we're replacing a LABEL_REF with a register, there must
6302 already be an indication (to e.g. flow) which label this
6303 register refers to. */
6304 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6305 || !JUMP_P (insn)
6306 || find_reg_note (insn,
6307 REG_LABEL_OPERAND,
6308 XEXP (*r->where, 0))
6309 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6311 /* Encapsulate RELOADREG so its machine mode matches what
6312 used to be there. Note that gen_lowpart_common will
6313 do the wrong thing if RELOADREG is multi-word. RELOADREG
6314 will always be a REG here. */
6315 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6316 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6318 *r->where = reloadreg;
6320 /* If reload got no reg and isn't optional, something's wrong. */
6321 else
6322 gcc_assert (rld[r->what].optional);
6326 /* Make a copy of any replacements being done into X and move those
6327 copies to locations in Y, a copy of X. */
6329 void
6330 copy_replacements (rtx x, rtx y)
6332 copy_replacements_1 (&x, &y, n_replacements);
6335 static void
6336 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6338 int i, j;
6339 rtx x, y;
6340 struct replacement *r;
6341 enum rtx_code code;
6342 const char *fmt;
6344 for (j = 0; j < orig_replacements; j++)
6345 if (replacements[j].where == px)
6347 r = &replacements[n_replacements++];
6348 r->where = py;
6349 r->what = replacements[j].what;
6350 r->mode = replacements[j].mode;
6353 x = *px;
6354 y = *py;
6355 code = GET_CODE (x);
6356 fmt = GET_RTX_FORMAT (code);
6358 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6360 if (fmt[i] == 'e')
6361 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6362 else if (fmt[i] == 'E')
6363 for (j = XVECLEN (x, i); --j >= 0; )
6364 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6365 orig_replacements);
6369 /* Change any replacements being done to *X to be done to *Y. */
6371 void
6372 move_replacements (rtx *x, rtx *y)
6374 int i;
6376 for (i = 0; i < n_replacements; i++)
6377 if (replacements[i].where == x)
6378 replacements[i].where = y;
6381 /* If LOC was scheduled to be replaced by something, return the replacement.
6382 Otherwise, return *LOC. */
6385 find_replacement (rtx *loc)
6387 struct replacement *r;
6389 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6391 rtx reloadreg = rld[r->what].reg_rtx;
6393 if (reloadreg && r->where == loc)
6395 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6396 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6398 return reloadreg;
6400 else if (reloadreg && GET_CODE (*loc) == SUBREG
6401 && r->where == &SUBREG_REG (*loc))
6403 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6404 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6406 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6407 GET_MODE (SUBREG_REG (*loc)),
6408 SUBREG_BYTE (*loc));
6412 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6413 what's inside and make a new rtl if so. */
6414 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6415 || GET_CODE (*loc) == MULT)
6417 rtx x = find_replacement (&XEXP (*loc, 0));
6418 rtx y = find_replacement (&XEXP (*loc, 1));
6420 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6421 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6424 return *loc;
6427 /* Return nonzero if register in range [REGNO, ENDREGNO)
6428 appears either explicitly or implicitly in X
6429 other than being stored into (except for earlyclobber operands).
6431 References contained within the substructure at LOC do not count.
6432 LOC may be zero, meaning don't ignore anything.
6434 This is similar to refers_to_regno_p in rtlanal.c except that we
6435 look at equivalences for pseudos that didn't get hard registers. */
6437 static int
6438 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6439 rtx x, rtx *loc)
6441 int i;
6442 unsigned int r;
6443 RTX_CODE code;
6444 const char *fmt;
6446 if (x == 0)
6447 return 0;
6449 repeat:
6450 code = GET_CODE (x);
6452 switch (code)
6454 case REG:
6455 r = REGNO (x);
6457 /* If this is a pseudo, a hard register must not have been allocated.
6458 X must therefore either be a constant or be in memory. */
6459 if (r >= FIRST_PSEUDO_REGISTER)
6461 if (reg_equiv_memory_loc (r))
6462 return refers_to_regno_for_reload_p (regno, endregno,
6463 reg_equiv_memory_loc (r),
6464 (rtx*) 0);
6466 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6467 return 0;
6470 return (endregno > r
6471 && regno < r + (r < FIRST_PSEUDO_REGISTER
6472 ? hard_regno_nregs[r][GET_MODE (x)]
6473 : 1));
6475 case SUBREG:
6476 /* If this is a SUBREG of a hard reg, we can see exactly which
6477 registers are being modified. Otherwise, handle normally. */
6478 if (REG_P (SUBREG_REG (x))
6479 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6481 unsigned int inner_regno = subreg_regno (x);
6482 unsigned int inner_endregno
6483 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6484 ? subreg_nregs (x) : 1);
6486 return endregno > inner_regno && regno < inner_endregno;
6488 break;
6490 case CLOBBER:
6491 case SET:
6492 if (&SET_DEST (x) != loc
6493 /* Note setting a SUBREG counts as referring to the REG it is in for
6494 a pseudo but not for hard registers since we can
6495 treat each word individually. */
6496 && ((GET_CODE (SET_DEST (x)) == SUBREG
6497 && loc != &SUBREG_REG (SET_DEST (x))
6498 && REG_P (SUBREG_REG (SET_DEST (x)))
6499 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6500 && refers_to_regno_for_reload_p (regno, endregno,
6501 SUBREG_REG (SET_DEST (x)),
6502 loc))
6503 /* If the output is an earlyclobber operand, this is
6504 a conflict. */
6505 || ((!REG_P (SET_DEST (x))
6506 || earlyclobber_operand_p (SET_DEST (x)))
6507 && refers_to_regno_for_reload_p (regno, endregno,
6508 SET_DEST (x), loc))))
6509 return 1;
6511 if (code == CLOBBER || loc == &SET_SRC (x))
6512 return 0;
6513 x = SET_SRC (x);
6514 goto repeat;
6516 default:
6517 break;
6520 /* X does not match, so try its subexpressions. */
6522 fmt = GET_RTX_FORMAT (code);
6523 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6525 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6527 if (i == 0)
6529 x = XEXP (x, 0);
6530 goto repeat;
6532 else
6533 if (refers_to_regno_for_reload_p (regno, endregno,
6534 XEXP (x, i), loc))
6535 return 1;
6537 else if (fmt[i] == 'E')
6539 int j;
6540 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6541 if (loc != &XVECEXP (x, i, j)
6542 && refers_to_regno_for_reload_p (regno, endregno,
6543 XVECEXP (x, i, j), loc))
6544 return 1;
6547 return 0;
6550 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6551 we check if any register number in X conflicts with the relevant register
6552 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6553 contains a MEM (we don't bother checking for memory addresses that can't
6554 conflict because we expect this to be a rare case.
6556 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6557 that we look at equivalences for pseudos that didn't get hard registers. */
6560 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6562 int regno, endregno;
6564 /* Overly conservative. */
6565 if (GET_CODE (x) == STRICT_LOW_PART
6566 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6567 x = XEXP (x, 0);
6569 /* If either argument is a constant, then modifying X can not affect IN. */
6570 if (CONSTANT_P (x) || CONSTANT_P (in))
6571 return 0;
6572 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6573 return refers_to_mem_for_reload_p (in);
6574 else if (GET_CODE (x) == SUBREG)
6576 regno = REGNO (SUBREG_REG (x));
6577 if (regno < FIRST_PSEUDO_REGISTER)
6578 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6579 GET_MODE (SUBREG_REG (x)),
6580 SUBREG_BYTE (x),
6581 GET_MODE (x));
6582 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6583 ? subreg_nregs (x) : 1);
6585 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6587 else if (REG_P (x))
6589 regno = REGNO (x);
6591 /* If this is a pseudo, it must not have been assigned a hard register.
6592 Therefore, it must either be in memory or be a constant. */
6594 if (regno >= FIRST_PSEUDO_REGISTER)
6596 if (reg_equiv_memory_loc (regno))
6597 return refers_to_mem_for_reload_p (in);
6598 gcc_assert (reg_equiv_constant (regno));
6599 return 0;
6602 endregno = END_HARD_REGNO (x);
6604 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6606 else if (MEM_P (x))
6607 return refers_to_mem_for_reload_p (in);
6608 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6609 || GET_CODE (x) == CC0)
6610 return reg_mentioned_p (x, in);
6611 else
6613 gcc_assert (GET_CODE (x) == PLUS);
6615 /* We actually want to know if X is mentioned somewhere inside IN.
6616 We must not say that (plus (sp) (const_int 124)) is in
6617 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6618 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6619 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6620 while (MEM_P (in))
6621 in = XEXP (in, 0);
6622 if (REG_P (in))
6623 return 0;
6624 else if (GET_CODE (in) == PLUS)
6625 return (rtx_equal_p (x, in)
6626 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6627 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6628 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6629 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6632 gcc_unreachable ();
6635 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6636 registers. */
6638 static int
6639 refers_to_mem_for_reload_p (rtx x)
6641 const char *fmt;
6642 int i;
6644 if (MEM_P (x))
6645 return 1;
6647 if (REG_P (x))
6648 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6649 && reg_equiv_memory_loc (REGNO (x)));
6651 fmt = GET_RTX_FORMAT (GET_CODE (x));
6652 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6653 if (fmt[i] == 'e'
6654 && (MEM_P (XEXP (x, i))
6655 || refers_to_mem_for_reload_p (XEXP (x, i))))
6656 return 1;
6658 return 0;
6661 /* Check the insns before INSN to see if there is a suitable register
6662 containing the same value as GOAL.
6663 If OTHER is -1, look for a register in class RCLASS.
6664 Otherwise, just see if register number OTHER shares GOAL's value.
6666 Return an rtx for the register found, or zero if none is found.
6668 If RELOAD_REG_P is (short *)1,
6669 we reject any hard reg that appears in reload_reg_rtx
6670 because such a hard reg is also needed coming into this insn.
6672 If RELOAD_REG_P is any other nonzero value,
6673 it is a vector indexed by hard reg number
6674 and we reject any hard reg whose element in the vector is nonnegative
6675 as well as any that appears in reload_reg_rtx.
6677 If GOAL is zero, then GOALREG is a register number; we look
6678 for an equivalent for that register.
6680 MODE is the machine mode of the value we want an equivalence for.
6681 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6683 This function is used by jump.c as well as in the reload pass.
6685 If GOAL is the sum of the stack pointer and a constant, we treat it
6686 as if it were a constant except that sp is required to be unchanging. */
6689 find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6690 short *reload_reg_p, int goalreg, machine_mode mode)
6692 rtx_insn *p = insn;
6693 rtx goaltry, valtry, value;
6694 rtx_insn *where;
6695 rtx pat;
6696 int regno = -1;
6697 int valueno;
6698 int goal_mem = 0;
6699 int goal_const = 0;
6700 int goal_mem_addr_varies = 0;
6701 int need_stable_sp = 0;
6702 int nregs;
6703 int valuenregs;
6704 int num = 0;
6706 if (goal == 0)
6707 regno = goalreg;
6708 else if (REG_P (goal))
6709 regno = REGNO (goal);
6710 else if (MEM_P (goal))
6712 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6713 if (MEM_VOLATILE_P (goal))
6714 return 0;
6715 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6716 return 0;
6717 /* An address with side effects must be reexecuted. */
6718 switch (code)
6720 case POST_INC:
6721 case PRE_INC:
6722 case POST_DEC:
6723 case PRE_DEC:
6724 case POST_MODIFY:
6725 case PRE_MODIFY:
6726 return 0;
6727 default:
6728 break;
6730 goal_mem = 1;
6732 else if (CONSTANT_P (goal))
6733 goal_const = 1;
6734 else if (GET_CODE (goal) == PLUS
6735 && XEXP (goal, 0) == stack_pointer_rtx
6736 && CONSTANT_P (XEXP (goal, 1)))
6737 goal_const = need_stable_sp = 1;
6738 else if (GET_CODE (goal) == PLUS
6739 && XEXP (goal, 0) == frame_pointer_rtx
6740 && CONSTANT_P (XEXP (goal, 1)))
6741 goal_const = 1;
6742 else
6743 return 0;
6745 num = 0;
6746 /* Scan insns back from INSN, looking for one that copies
6747 a value into or out of GOAL.
6748 Stop and give up if we reach a label. */
6750 while (1)
6752 p = PREV_INSN (p);
6753 if (p && DEBUG_INSN_P (p))
6754 continue;
6755 num++;
6756 if (p == 0 || LABEL_P (p)
6757 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6758 return 0;
6760 /* Don't reuse register contents from before a setjmp-type
6761 function call; on the second return (from the longjmp) it
6762 might have been clobbered by a later reuse. It doesn't
6763 seem worthwhile to actually go and see if it is actually
6764 reused even if that information would be readily available;
6765 just don't reuse it across the setjmp call. */
6766 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6767 return 0;
6769 if (NONJUMP_INSN_P (p)
6770 /* If we don't want spill regs ... */
6771 && (! (reload_reg_p != 0
6772 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6773 /* ... then ignore insns introduced by reload; they aren't
6774 useful and can cause results in reload_as_needed to be
6775 different from what they were when calculating the need for
6776 spills. If we notice an input-reload insn here, we will
6777 reject it below, but it might hide a usable equivalent.
6778 That makes bad code. It may even fail: perhaps no reg was
6779 spilled for this insn because it was assumed we would find
6780 that equivalent. */
6781 || INSN_UID (p) < reload_first_uid))
6783 rtx tem;
6784 pat = single_set (p);
6786 /* First check for something that sets some reg equal to GOAL. */
6787 if (pat != 0
6788 && ((regno >= 0
6789 && true_regnum (SET_SRC (pat)) == regno
6790 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6792 (regno >= 0
6793 && true_regnum (SET_DEST (pat)) == regno
6794 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6796 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6797 /* When looking for stack pointer + const,
6798 make sure we don't use a stack adjust. */
6799 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6800 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6801 || (goal_mem
6802 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6803 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6804 || (goal_mem
6805 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6806 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6807 /* If we are looking for a constant,
6808 and something equivalent to that constant was copied
6809 into a reg, we can use that reg. */
6810 || (goal_const && REG_NOTES (p) != 0
6811 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6812 && ((rtx_equal_p (XEXP (tem, 0), goal)
6813 && (valueno
6814 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6815 || (REG_P (SET_DEST (pat))
6816 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6817 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6818 && CONST_INT_P (goal)
6819 && 0 != (goaltry
6820 = operand_subword (XEXP (tem, 0), 0, 0,
6821 VOIDmode))
6822 && rtx_equal_p (goal, goaltry)
6823 && (valtry
6824 = operand_subword (SET_DEST (pat), 0, 0,
6825 VOIDmode))
6826 && (valueno = true_regnum (valtry)) >= 0)))
6827 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6828 NULL_RTX))
6829 && REG_P (SET_DEST (pat))
6830 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6831 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6832 && CONST_INT_P (goal)
6833 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6834 VOIDmode))
6835 && rtx_equal_p (goal, goaltry)
6836 && (valtry
6837 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6838 && (valueno = true_regnum (valtry)) >= 0)))
6840 if (other >= 0)
6842 if (valueno != other)
6843 continue;
6845 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6846 continue;
6847 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6848 mode, valueno))
6849 continue;
6850 value = valtry;
6851 where = p;
6852 break;
6857 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6858 (or copying VALUE into GOAL, if GOAL is also a register).
6859 Now verify that VALUE is really valid. */
6861 /* VALUENO is the register number of VALUE; a hard register. */
6863 /* Don't try to re-use something that is killed in this insn. We want
6864 to be able to trust REG_UNUSED notes. */
6865 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6866 return 0;
6868 /* If we propose to get the value from the stack pointer or if GOAL is
6869 a MEM based on the stack pointer, we need a stable SP. */
6870 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6871 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6872 goal)))
6873 need_stable_sp = 1;
6875 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6876 if (GET_MODE (value) != mode)
6877 return 0;
6879 /* Reject VALUE if it was loaded from GOAL
6880 and is also a register that appears in the address of GOAL. */
6882 if (goal_mem && value == SET_DEST (single_set (where))
6883 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6884 goal, (rtx*) 0))
6885 return 0;
6887 /* Reject registers that overlap GOAL. */
6889 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6890 nregs = hard_regno_nregs[regno][mode];
6891 else
6892 nregs = 1;
6893 valuenregs = hard_regno_nregs[valueno][mode];
6895 if (!goal_mem && !goal_const
6896 && regno + nregs > valueno && regno < valueno + valuenregs)
6897 return 0;
6899 /* Reject VALUE if it is one of the regs reserved for reloads.
6900 Reload1 knows how to reuse them anyway, and it would get
6901 confused if we allocated one without its knowledge.
6902 (Now that insns introduced by reload are ignored above,
6903 this case shouldn't happen, but I'm not positive.) */
6905 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6907 int i;
6908 for (i = 0; i < valuenregs; ++i)
6909 if (reload_reg_p[valueno + i] >= 0)
6910 return 0;
6913 /* Reject VALUE if it is a register being used for an input reload
6914 even if it is not one of those reserved. */
6916 if (reload_reg_p != 0)
6918 int i;
6919 for (i = 0; i < n_reloads; i++)
6920 if (rld[i].reg_rtx != 0 && rld[i].in)
6922 int regno1 = REGNO (rld[i].reg_rtx);
6923 int nregs1 = hard_regno_nregs[regno1]
6924 [GET_MODE (rld[i].reg_rtx)];
6925 if (regno1 < valueno + valuenregs
6926 && regno1 + nregs1 > valueno)
6927 return 0;
6931 if (goal_mem)
6932 /* We must treat frame pointer as varying here,
6933 since it can vary--in a nonlocal goto as generated by expand_goto. */
6934 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6936 /* Now verify that the values of GOAL and VALUE remain unaltered
6937 until INSN is reached. */
6939 p = insn;
6940 while (1)
6942 p = PREV_INSN (p);
6943 if (p == where)
6944 return value;
6946 /* Don't trust the conversion past a function call
6947 if either of the two is in a call-clobbered register, or memory. */
6948 if (CALL_P (p))
6950 int i;
6952 if (goal_mem || need_stable_sp)
6953 return 0;
6955 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6956 for (i = 0; i < nregs; ++i)
6957 if (call_used_regs[regno + i]
6958 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6959 return 0;
6961 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6962 for (i = 0; i < valuenregs; ++i)
6963 if (call_used_regs[valueno + i]
6964 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6965 return 0;
6968 if (INSN_P (p))
6970 pat = PATTERN (p);
6972 /* Watch out for unspec_volatile, and volatile asms. */
6973 if (volatile_insn_p (pat))
6974 return 0;
6976 /* If this insn P stores in either GOAL or VALUE, return 0.
6977 If GOAL is a memory ref and this insn writes memory, return 0.
6978 If GOAL is a memory ref and its address is not constant,
6979 and this insn P changes a register used in GOAL, return 0. */
6981 if (GET_CODE (pat) == COND_EXEC)
6982 pat = COND_EXEC_CODE (pat);
6983 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6985 rtx dest = SET_DEST (pat);
6986 while (GET_CODE (dest) == SUBREG
6987 || GET_CODE (dest) == ZERO_EXTRACT
6988 || GET_CODE (dest) == STRICT_LOW_PART)
6989 dest = XEXP (dest, 0);
6990 if (REG_P (dest))
6992 int xregno = REGNO (dest);
6993 int xnregs;
6994 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6995 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6996 else
6997 xnregs = 1;
6998 if (xregno < regno + nregs && xregno + xnregs > regno)
6999 return 0;
7000 if (xregno < valueno + valuenregs
7001 && xregno + xnregs > valueno)
7002 return 0;
7003 if (goal_mem_addr_varies
7004 && reg_overlap_mentioned_for_reload_p (dest, goal))
7005 return 0;
7006 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7007 return 0;
7009 else if (goal_mem && MEM_P (dest)
7010 && ! push_operand (dest, GET_MODE (dest)))
7011 return 0;
7012 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7013 && reg_equiv_memory_loc (regno) != 0)
7014 return 0;
7015 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
7016 return 0;
7018 else if (GET_CODE (pat) == PARALLEL)
7020 int i;
7021 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7023 rtx v1 = XVECEXP (pat, 0, i);
7024 if (GET_CODE (v1) == COND_EXEC)
7025 v1 = COND_EXEC_CODE (v1);
7026 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7028 rtx dest = SET_DEST (v1);
7029 while (GET_CODE (dest) == SUBREG
7030 || GET_CODE (dest) == ZERO_EXTRACT
7031 || GET_CODE (dest) == STRICT_LOW_PART)
7032 dest = XEXP (dest, 0);
7033 if (REG_P (dest))
7035 int xregno = REGNO (dest);
7036 int xnregs;
7037 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7038 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7039 else
7040 xnregs = 1;
7041 if (xregno < regno + nregs
7042 && xregno + xnregs > regno)
7043 return 0;
7044 if (xregno < valueno + valuenregs
7045 && xregno + xnregs > valueno)
7046 return 0;
7047 if (goal_mem_addr_varies
7048 && reg_overlap_mentioned_for_reload_p (dest,
7049 goal))
7050 return 0;
7051 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7052 return 0;
7054 else if (goal_mem && MEM_P (dest)
7055 && ! push_operand (dest, GET_MODE (dest)))
7056 return 0;
7057 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7058 && reg_equiv_memory_loc (regno) != 0)
7059 return 0;
7060 else if (need_stable_sp
7061 && push_operand (dest, GET_MODE (dest)))
7062 return 0;
7067 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7069 rtx link;
7071 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7072 link = XEXP (link, 1))
7074 pat = XEXP (link, 0);
7075 if (GET_CODE (pat) == CLOBBER)
7077 rtx dest = SET_DEST (pat);
7079 if (REG_P (dest))
7081 int xregno = REGNO (dest);
7082 int xnregs
7083 = hard_regno_nregs[xregno][GET_MODE (dest)];
7085 if (xregno < regno + nregs
7086 && xregno + xnregs > regno)
7087 return 0;
7088 else if (xregno < valueno + valuenregs
7089 && xregno + xnregs > valueno)
7090 return 0;
7091 else if (goal_mem_addr_varies
7092 && reg_overlap_mentioned_for_reload_p (dest,
7093 goal))
7094 return 0;
7097 else if (goal_mem && MEM_P (dest)
7098 && ! push_operand (dest, GET_MODE (dest)))
7099 return 0;
7100 else if (need_stable_sp
7101 && push_operand (dest, GET_MODE (dest)))
7102 return 0;
7107 #ifdef AUTO_INC_DEC
7108 /* If this insn auto-increments or auto-decrements
7109 either regno or valueno, return 0 now.
7110 If GOAL is a memory ref and its address is not constant,
7111 and this insn P increments a register used in GOAL, return 0. */
7113 rtx link;
7115 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7116 if (REG_NOTE_KIND (link) == REG_INC
7117 && REG_P (XEXP (link, 0)))
7119 int incno = REGNO (XEXP (link, 0));
7120 if (incno < regno + nregs && incno >= regno)
7121 return 0;
7122 if (incno < valueno + valuenregs && incno >= valueno)
7123 return 0;
7124 if (goal_mem_addr_varies
7125 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7126 goal))
7127 return 0;
7130 #endif
7135 /* Find a place where INCED appears in an increment or decrement operator
7136 within X, and return the amount INCED is incremented or decremented by.
7137 The value is always positive. */
7139 static int
7140 find_inc_amount (rtx x, rtx inced)
7142 enum rtx_code code = GET_CODE (x);
7143 const char *fmt;
7144 int i;
7146 if (code == MEM)
7148 rtx addr = XEXP (x, 0);
7149 if ((GET_CODE (addr) == PRE_DEC
7150 || GET_CODE (addr) == POST_DEC
7151 || GET_CODE (addr) == PRE_INC
7152 || GET_CODE (addr) == POST_INC)
7153 && XEXP (addr, 0) == inced)
7154 return GET_MODE_SIZE (GET_MODE (x));
7155 else if ((GET_CODE (addr) == PRE_MODIFY
7156 || GET_CODE (addr) == POST_MODIFY)
7157 && GET_CODE (XEXP (addr, 1)) == PLUS
7158 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7159 && XEXP (addr, 0) == inced
7160 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7162 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7163 return i < 0 ? -i : i;
7167 fmt = GET_RTX_FORMAT (code);
7168 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7170 if (fmt[i] == 'e')
7172 int tem = find_inc_amount (XEXP (x, i), inced);
7173 if (tem != 0)
7174 return tem;
7176 if (fmt[i] == 'E')
7178 int j;
7179 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7181 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7182 if (tem != 0)
7183 return tem;
7188 return 0;
7191 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7192 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7194 #ifdef AUTO_INC_DEC
7195 static int
7196 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7197 rtx insn)
7199 rtx link;
7201 gcc_assert (insn);
7203 if (! INSN_P (insn))
7204 return 0;
7206 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7207 if (REG_NOTE_KIND (link) == REG_INC)
7209 unsigned int test = (int) REGNO (XEXP (link, 0));
7210 if (test >= regno && test < endregno)
7211 return 1;
7213 return 0;
7215 #else
7217 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7219 #endif
7221 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7222 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7223 REG_INC. REGNO must refer to a hard register. */
7226 regno_clobbered_p (unsigned int regno, rtx_insn *insn, machine_mode mode,
7227 int sets)
7229 unsigned int nregs, endregno;
7231 /* regno must be a hard register. */
7232 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7234 nregs = hard_regno_nregs[regno][mode];
7235 endregno = regno + nregs;
7237 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7238 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7239 && REG_P (XEXP (PATTERN (insn), 0)))
7241 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7243 return test >= regno && test < endregno;
7246 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7247 return 1;
7249 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7251 int i = XVECLEN (PATTERN (insn), 0) - 1;
7253 for (; i >= 0; i--)
7255 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7256 if ((GET_CODE (elt) == CLOBBER
7257 || (sets == 1 && GET_CODE (elt) == SET))
7258 && REG_P (XEXP (elt, 0)))
7260 unsigned int test = REGNO (XEXP (elt, 0));
7262 if (test >= regno && test < endregno)
7263 return 1;
7265 if (sets == 2
7266 && reg_inc_found_and_valid_p (regno, endregno, elt))
7267 return 1;
7271 return 0;
7274 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7276 reload_adjust_reg_for_mode (rtx reloadreg, machine_mode mode)
7278 int regno;
7280 if (GET_MODE (reloadreg) == mode)
7281 return reloadreg;
7283 regno = REGNO (reloadreg);
7285 if (REG_WORDS_BIG_ENDIAN)
7286 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7287 - (int) hard_regno_nregs[regno][mode];
7289 return gen_rtx_REG (mode, regno);
7292 static const char *const reload_when_needed_name[] =
7294 "RELOAD_FOR_INPUT",
7295 "RELOAD_FOR_OUTPUT",
7296 "RELOAD_FOR_INSN",
7297 "RELOAD_FOR_INPUT_ADDRESS",
7298 "RELOAD_FOR_INPADDR_ADDRESS",
7299 "RELOAD_FOR_OUTPUT_ADDRESS",
7300 "RELOAD_FOR_OUTADDR_ADDRESS",
7301 "RELOAD_FOR_OPERAND_ADDRESS",
7302 "RELOAD_FOR_OPADDR_ADDR",
7303 "RELOAD_OTHER",
7304 "RELOAD_FOR_OTHER_ADDRESS"
7307 /* These functions are used to print the variables set by 'find_reloads' */
7309 DEBUG_FUNCTION void
7310 debug_reload_to_stream (FILE *f)
7312 int r;
7313 const char *prefix;
7315 if (! f)
7316 f = stderr;
7317 for (r = 0; r < n_reloads; r++)
7319 fprintf (f, "Reload %d: ", r);
7321 if (rld[r].in != 0)
7323 fprintf (f, "reload_in (%s) = ",
7324 GET_MODE_NAME (rld[r].inmode));
7325 print_inline_rtx (f, rld[r].in, 24);
7326 fprintf (f, "\n\t");
7329 if (rld[r].out != 0)
7331 fprintf (f, "reload_out (%s) = ",
7332 GET_MODE_NAME (rld[r].outmode));
7333 print_inline_rtx (f, rld[r].out, 24);
7334 fprintf (f, "\n\t");
7337 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7339 fprintf (f, "%s (opnum = %d)",
7340 reload_when_needed_name[(int) rld[r].when_needed],
7341 rld[r].opnum);
7343 if (rld[r].optional)
7344 fprintf (f, ", optional");
7346 if (rld[r].nongroup)
7347 fprintf (f, ", nongroup");
7349 if (rld[r].inc != 0)
7350 fprintf (f, ", inc by %d", rld[r].inc);
7352 if (rld[r].nocombine)
7353 fprintf (f, ", can't combine");
7355 if (rld[r].secondary_p)
7356 fprintf (f, ", secondary_reload_p");
7358 if (rld[r].in_reg != 0)
7360 fprintf (f, "\n\treload_in_reg: ");
7361 print_inline_rtx (f, rld[r].in_reg, 24);
7364 if (rld[r].out_reg != 0)
7366 fprintf (f, "\n\treload_out_reg: ");
7367 print_inline_rtx (f, rld[r].out_reg, 24);
7370 if (rld[r].reg_rtx != 0)
7372 fprintf (f, "\n\treload_reg_rtx: ");
7373 print_inline_rtx (f, rld[r].reg_rtx, 24);
7376 prefix = "\n\t";
7377 if (rld[r].secondary_in_reload != -1)
7379 fprintf (f, "%ssecondary_in_reload = %d",
7380 prefix, rld[r].secondary_in_reload);
7381 prefix = ", ";
7384 if (rld[r].secondary_out_reload != -1)
7385 fprintf (f, "%ssecondary_out_reload = %d\n",
7386 prefix, rld[r].secondary_out_reload);
7388 prefix = "\n\t";
7389 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7391 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7392 insn_data[rld[r].secondary_in_icode].name);
7393 prefix = ", ";
7396 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7397 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7398 insn_data[rld[r].secondary_out_icode].name);
7400 fprintf (f, "\n");
7404 DEBUG_FUNCTION void
7405 debug_reload (void)
7407 debug_reload_to_stream (stderr);