Cleanup leftover libs.
[dragonfly.git] / contrib / gcc-4.7 / gcc / reload.c
blob8420c80807375f7e9bb4b9d5ffa1c722ba97eccf
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains subroutines used only from the file reload1.c.
23 It knows how to scan one insn for operands and values
24 that need to be copied into registers to make valid code.
25 It also finds other operands and values which are valid
26 but for which equivalent values in registers exist and
27 ought to be used instead.
29 Before processing the first insn of the function, call `init_reload'.
30 init_reload actually has to be called earlier anyway.
32 To scan an insn, call `find_reloads'. This does two things:
33 1. sets up tables describing which values must be reloaded
34 for this insn, and what kind of hard regs they must be reloaded into;
35 2. optionally record the locations where those values appear in
36 the data, so they can be replaced properly later.
37 This is done only if the second arg to `find_reloads' is nonzero.
39 The third arg to `find_reloads' specifies the number of levels
40 of indirect addressing supported by the machine. If it is zero,
41 indirect addressing is not valid. If it is one, (MEM (REG n))
42 is valid even if (REG n) did not get a hard register; if it is two,
43 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
44 hard register, and similarly for higher values.
46 Then you must choose the hard regs to reload those pseudo regs into,
47 and generate appropriate load insns before this insn and perhaps
48 also store insns after this insn. Set up the array `reload_reg_rtx'
49 to contain the REG rtx's for the registers you used. In some
50 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
51 for certain reloads. Then that tells you which register to use,
52 so you do not need to allocate one. But you still do need to add extra
53 instructions to copy the value into and out of that register.
55 Finally you must call `subst_reloads' to substitute the reload reg rtx's
56 into the locations already recorded.
58 NOTE SIDE EFFECTS:
60 find_reloads can alter the operands of the instruction it is called on.
62 1. Two operands of any sort may be interchanged, if they are in a
63 commutative instruction.
64 This happens only if find_reloads thinks the instruction will compile
65 better that way.
67 2. Pseudo-registers that are equivalent to constants are replaced
68 with those constants if they are not in hard registers.
70 1 happens every time find_reloads is called.
71 2 happens only when REPLACE is 1, which is only when
72 actually doing the reloads, not when just counting them.
74 Using a reload register for several reloads in one insn:
76 When an insn has reloads, it is considered as having three parts:
77 the input reloads, the insn itself after reloading, and the output reloads.
78 Reloads of values used in memory addresses are often needed for only one part.
80 When this is so, reload_when_needed records which part needs the reload.
81 Two reloads for different parts of the insn can share the same reload
82 register.
84 When a reload is used for addresses in multiple parts, or when it is
85 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
86 a register with any other reload. */
88 #define REG_OK_STRICT
90 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
91 #undef DEBUG_RELOAD
93 #include "config.h"
94 #include "system.h"
95 #include "coretypes.h"
96 #include "tm.h"
97 #include "rtl-error.h"
98 #include "tm_p.h"
99 #include "insn-config.h"
100 #include "expr.h"
101 #include "optabs.h"
102 #include "recog.h"
103 #include "df.h"
104 #include "reload.h"
105 #include "regs.h"
106 #include "addresses.h"
107 #include "hard-reg-set.h"
108 #include "flags.h"
109 #include "output.h"
110 #include "function.h"
111 #include "params.h"
112 #include "target.h"
113 #include "ira.h"
115 /* True if X is a constant that can be forced into the constant pool.
116 MODE is the mode of the operand, or VOIDmode if not known. */
117 #define CONST_POOL_OK_P(MODE, X) \
118 ((MODE) != VOIDmode \
119 && CONSTANT_P (X) \
120 && GET_CODE (X) != HIGH \
121 && !targetm.cannot_force_const_mem (MODE, X))
123 /* True if C is a non-empty register class that has too few registers
124 to be safely used as a reload target class. */
126 static inline bool
127 small_register_class_p (reg_class_t rclass)
129 return (reg_class_size [(int) rclass] == 1
130 || (reg_class_size [(int) rclass] >= 1
131 && targetm.class_likely_spilled_p (rclass)));
135 /* All reloads of the current insn are recorded here. See reload.h for
136 comments. */
137 int n_reloads;
138 struct reload rld[MAX_RELOADS];
140 /* All the "earlyclobber" operands of the current insn
141 are recorded here. */
142 int n_earlyclobbers;
143 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
145 int reload_n_operands;
147 /* Replacing reloads.
149 If `replace_reloads' is nonzero, then as each reload is recorded
150 an entry is made for it in the table `replacements'.
151 Then later `subst_reloads' can look through that table and
152 perform all the replacements needed. */
154 /* Nonzero means record the places to replace. */
155 static int replace_reloads;
157 /* Each replacement is recorded with a structure like this. */
158 struct replacement
160 rtx *where; /* Location to store in */
161 int what; /* which reload this is for */
162 enum machine_mode mode; /* mode it must have */
165 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
167 /* Number of replacements currently recorded. */
168 static int n_replacements;
170 /* Used to track what is modified by an operand. */
171 struct decomposition
173 int reg_flag; /* Nonzero if referencing a register. */
174 int safe; /* Nonzero if this can't conflict with anything. */
175 rtx base; /* Base address for MEM. */
176 HOST_WIDE_INT start; /* Starting offset or register number. */
177 HOST_WIDE_INT end; /* Ending offset or register number. */
180 #ifdef SECONDARY_MEMORY_NEEDED
182 /* Save MEMs needed to copy from one class of registers to another. One MEM
183 is used per mode, but normally only one or two modes are ever used.
185 We keep two versions, before and after register elimination. The one
186 after register elimination is record separately for each operand. This
187 is done in case the address is not valid to be sure that we separately
188 reload each. */
190 static rtx secondary_memlocs[NUM_MACHINE_MODES];
191 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
192 static int secondary_memlocs_elim_used = 0;
193 #endif
195 /* The instruction we are doing reloads for;
196 so we can test whether a register dies in it. */
197 static rtx this_insn;
199 /* Nonzero if this instruction is a user-specified asm with operands. */
200 static int this_insn_is_asm;
202 /* If hard_regs_live_known is nonzero,
203 we can tell which hard regs are currently live,
204 at least enough to succeed in choosing dummy reloads. */
205 static int hard_regs_live_known;
207 /* Indexed by hard reg number,
208 element is nonnegative if hard reg has been spilled.
209 This vector is passed to `find_reloads' as an argument
210 and is not changed here. */
211 static short *static_reload_reg_p;
213 /* Set to 1 in subst_reg_equivs if it changes anything. */
214 static int subst_reg_equivs_changed;
216 /* On return from push_reload, holds the reload-number for the OUT
217 operand, which can be different for that from the input operand. */
218 static int output_reloadnum;
220 /* Compare two RTX's. */
221 #define MATCHES(x, y) \
222 (x == y || (x != 0 && (REG_P (x) \
223 ? REG_P (y) && REGNO (x) == REGNO (y) \
224 : rtx_equal_p (x, y) && ! side_effects_p (x))))
226 /* Indicates if two reloads purposes are for similar enough things that we
227 can merge their reloads. */
228 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
229 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
230 || ((when1) == (when2) && (op1) == (op2)) \
231 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
232 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
233 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
234 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
235 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
237 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
238 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
239 ((when1) != (when2) \
240 || ! ((op1) == (op2) \
241 || (when1) == RELOAD_FOR_INPUT \
242 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
243 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
245 /* If we are going to reload an address, compute the reload type to
246 use. */
247 #define ADDR_TYPE(type) \
248 ((type) == RELOAD_FOR_INPUT_ADDRESS \
249 ? RELOAD_FOR_INPADDR_ADDRESS \
250 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
251 ? RELOAD_FOR_OUTADDR_ADDRESS \
252 : (type)))
254 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
255 enum machine_mode, enum reload_type,
256 enum insn_code *, secondary_reload_info *);
257 static enum reg_class find_valid_class (enum machine_mode, enum machine_mode,
258 int, unsigned int);
259 static void push_replacement (rtx *, int, enum machine_mode);
260 static void dup_replacements (rtx *, rtx *);
261 static void combine_reloads (void);
262 static int find_reusable_reload (rtx *, rtx, enum reg_class,
263 enum reload_type, int, int);
264 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
265 enum machine_mode, reg_class_t, int, int);
266 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
267 static struct decomposition decompose (rtx);
268 static int immune_p (rtx, rtx, struct decomposition);
269 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
270 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int, rtx,
271 int *);
272 static rtx make_memloc (rtx, int);
273 static int maybe_memory_address_addr_space_p (enum machine_mode, rtx,
274 addr_space_t, rtx *);
275 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
276 int, enum reload_type, int, rtx);
277 static rtx subst_reg_equivs (rtx, rtx);
278 static rtx subst_indexed_address (rtx);
279 static void update_auto_inc_notes (rtx, int, int);
280 static int find_reloads_address_1 (enum machine_mode, addr_space_t, rtx, int,
281 enum rtx_code, enum rtx_code, rtx *,
282 int, enum reload_type,int, rtx);
283 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
284 enum machine_mode, int,
285 enum reload_type, int);
286 static rtx find_reloads_subreg_address (rtx, int, int, enum reload_type,
287 int, rtx, int *);
288 static void copy_replacements_1 (rtx *, rtx *, int);
289 static int find_inc_amount (rtx, rtx);
290 static int refers_to_mem_for_reload_p (rtx);
291 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
292 rtx, rtx *);
294 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
295 list yet. */
297 static void
298 push_reg_equiv_alt_mem (int regno, rtx mem)
300 rtx it;
302 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
303 if (rtx_equal_p (XEXP (it, 0), mem))
304 return;
306 reg_equiv_alt_mem_list (regno)
307 = alloc_EXPR_LIST (REG_EQUIV, mem,
308 reg_equiv_alt_mem_list (regno));
311 /* Determine if any secondary reloads are needed for loading (if IN_P is
312 nonzero) or storing (if IN_P is zero) X to or from a reload register of
313 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
314 are needed, push them.
316 Return the reload number of the secondary reload we made, or -1 if
317 we didn't need one. *PICODE is set to the insn_code to use if we do
318 need a secondary reload. */
320 static int
321 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
322 enum reg_class reload_class,
323 enum machine_mode reload_mode, enum reload_type type,
324 enum insn_code *picode, secondary_reload_info *prev_sri)
326 enum reg_class rclass = NO_REGS;
327 enum reg_class scratch_class;
328 enum machine_mode mode = reload_mode;
329 enum insn_code icode = CODE_FOR_nothing;
330 enum insn_code t_icode = CODE_FOR_nothing;
331 enum reload_type secondary_type;
332 int s_reload, t_reload = -1;
333 const char *scratch_constraint;
334 char letter;
335 secondary_reload_info sri;
337 if (type == RELOAD_FOR_INPUT_ADDRESS
338 || type == RELOAD_FOR_OUTPUT_ADDRESS
339 || type == RELOAD_FOR_INPADDR_ADDRESS
340 || type == RELOAD_FOR_OUTADDR_ADDRESS)
341 secondary_type = type;
342 else
343 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
345 *picode = CODE_FOR_nothing;
347 /* If X is a paradoxical SUBREG, use the inner value to determine both the
348 mode and object being reloaded. */
349 if (paradoxical_subreg_p (x))
351 x = SUBREG_REG (x);
352 reload_mode = GET_MODE (x);
355 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
356 is still a pseudo-register by now, it *must* have an equivalent MEM
357 but we don't want to assume that), use that equivalent when seeing if
358 a secondary reload is needed since whether or not a reload is needed
359 might be sensitive to the form of the MEM. */
361 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
362 && reg_equiv_mem (REGNO (x)))
363 x = reg_equiv_mem (REGNO (x));
365 sri.icode = CODE_FOR_nothing;
366 sri.prev_sri = prev_sri;
367 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
368 reload_mode, &sri);
369 icode = (enum insn_code) sri.icode;
371 /* If we don't need any secondary registers, done. */
372 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
373 return -1;
375 if (rclass != NO_REGS)
376 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
377 reload_mode, type, &t_icode, &sri);
379 /* If we will be using an insn, the secondary reload is for a
380 scratch register. */
382 if (icode != CODE_FOR_nothing)
384 /* If IN_P is nonzero, the reload register will be the output in
385 operand 0. If IN_P is zero, the reload register will be the input
386 in operand 1. Outputs should have an initial "=", which we must
387 skip. */
389 /* ??? It would be useful to be able to handle only two, or more than
390 three, operands, but for now we can only handle the case of having
391 exactly three: output, input and one temp/scratch. */
392 gcc_assert (insn_data[(int) icode].n_operands == 3);
394 /* ??? We currently have no way to represent a reload that needs
395 an icode to reload from an intermediate tertiary reload register.
396 We should probably have a new field in struct reload to tag a
397 chain of scratch operand reloads onto. */
398 gcc_assert (rclass == NO_REGS);
400 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
401 gcc_assert (*scratch_constraint == '=');
402 scratch_constraint++;
403 if (*scratch_constraint == '&')
404 scratch_constraint++;
405 letter = *scratch_constraint;
406 scratch_class = (letter == 'r' ? GENERAL_REGS
407 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) letter,
408 scratch_constraint));
410 rclass = scratch_class;
411 mode = insn_data[(int) icode].operand[2].mode;
414 /* This case isn't valid, so fail. Reload is allowed to use the same
415 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
416 in the case of a secondary register, we actually need two different
417 registers for correct code. We fail here to prevent the possibility of
418 silently generating incorrect code later.
420 The convention is that secondary input reloads are valid only if the
421 secondary_class is different from class. If you have such a case, you
422 can not use secondary reloads, you must work around the problem some
423 other way.
425 Allow this when a reload_in/out pattern is being used. I.e. assume
426 that the generated code handles this case. */
428 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
429 || t_icode != CODE_FOR_nothing);
431 /* See if we can reuse an existing secondary reload. */
432 for (s_reload = 0; s_reload < n_reloads; s_reload++)
433 if (rld[s_reload].secondary_p
434 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
435 || reg_class_subset_p (rld[s_reload].rclass, rclass))
436 && ((in_p && rld[s_reload].inmode == mode)
437 || (! in_p && rld[s_reload].outmode == mode))
438 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
439 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
440 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
441 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
442 && (small_register_class_p (rclass)
443 || targetm.small_register_classes_for_mode_p (VOIDmode))
444 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
445 opnum, rld[s_reload].opnum))
447 if (in_p)
448 rld[s_reload].inmode = mode;
449 if (! in_p)
450 rld[s_reload].outmode = mode;
452 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
453 rld[s_reload].rclass = rclass;
455 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
456 rld[s_reload].optional &= optional;
457 rld[s_reload].secondary_p = 1;
458 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
459 opnum, rld[s_reload].opnum))
460 rld[s_reload].when_needed = RELOAD_OTHER;
462 break;
465 if (s_reload == n_reloads)
467 #ifdef SECONDARY_MEMORY_NEEDED
468 /* If we need a memory location to copy between the two reload regs,
469 set it up now. Note that we do the input case before making
470 the reload and the output case after. This is due to the
471 way reloads are output. */
473 if (in_p && icode == CODE_FOR_nothing
474 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
476 get_secondary_mem (x, reload_mode, opnum, type);
478 /* We may have just added new reloads. Make sure we add
479 the new reload at the end. */
480 s_reload = n_reloads;
482 #endif
484 /* We need to make a new secondary reload for this register class. */
485 rld[s_reload].in = rld[s_reload].out = 0;
486 rld[s_reload].rclass = rclass;
488 rld[s_reload].inmode = in_p ? mode : VOIDmode;
489 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
490 rld[s_reload].reg_rtx = 0;
491 rld[s_reload].optional = optional;
492 rld[s_reload].inc = 0;
493 /* Maybe we could combine these, but it seems too tricky. */
494 rld[s_reload].nocombine = 1;
495 rld[s_reload].in_reg = 0;
496 rld[s_reload].out_reg = 0;
497 rld[s_reload].opnum = opnum;
498 rld[s_reload].when_needed = secondary_type;
499 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
500 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
501 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
502 rld[s_reload].secondary_out_icode
503 = ! in_p ? t_icode : CODE_FOR_nothing;
504 rld[s_reload].secondary_p = 1;
506 n_reloads++;
508 #ifdef SECONDARY_MEMORY_NEEDED
509 if (! in_p && icode == CODE_FOR_nothing
510 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
511 get_secondary_mem (x, mode, opnum, type);
512 #endif
515 *picode = icode;
516 return s_reload;
519 /* If a secondary reload is needed, return its class. If both an intermediate
520 register and a scratch register is needed, we return the class of the
521 intermediate register. */
522 reg_class_t
523 secondary_reload_class (bool in_p, reg_class_t rclass, enum machine_mode mode,
524 rtx x)
526 enum insn_code icode;
527 secondary_reload_info sri;
529 sri.icode = CODE_FOR_nothing;
530 sri.prev_sri = NULL;
531 rclass
532 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
533 icode = (enum insn_code) sri.icode;
535 /* If there are no secondary reloads at all, we return NO_REGS.
536 If an intermediate register is needed, we return its class. */
537 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
538 return rclass;
540 /* No intermediate register is needed, but we have a special reload
541 pattern, which we assume for now needs a scratch register. */
542 return scratch_reload_class (icode);
545 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
546 three operands, verify that operand 2 is an output operand, and return
547 its register class.
548 ??? We'd like to be able to handle any pattern with at least 2 operands,
549 for zero or more scratch registers, but that needs more infrastructure. */
550 enum reg_class
551 scratch_reload_class (enum insn_code icode)
553 const char *scratch_constraint;
554 char scratch_letter;
555 enum reg_class rclass;
557 gcc_assert (insn_data[(int) icode].n_operands == 3);
558 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
559 gcc_assert (*scratch_constraint == '=');
560 scratch_constraint++;
561 if (*scratch_constraint == '&')
562 scratch_constraint++;
563 scratch_letter = *scratch_constraint;
564 if (scratch_letter == 'r')
565 return GENERAL_REGS;
566 rclass = REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
567 scratch_constraint);
568 gcc_assert (rclass != NO_REGS);
569 return rclass;
572 #ifdef SECONDARY_MEMORY_NEEDED
574 /* Return a memory location that will be used to copy X in mode MODE.
575 If we haven't already made a location for this mode in this insn,
576 call find_reloads_address on the location being returned. */
579 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
580 int opnum, enum reload_type type)
582 rtx loc;
583 int mem_valid;
585 /* By default, if MODE is narrower than a word, widen it to a word.
586 This is required because most machines that require these memory
587 locations do not support short load and stores from all registers
588 (e.g., FP registers). */
590 #ifdef SECONDARY_MEMORY_NEEDED_MODE
591 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
592 #else
593 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
594 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
595 #endif
597 /* If we already have made a MEM for this operand in MODE, return it. */
598 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
599 return secondary_memlocs_elim[(int) mode][opnum];
601 /* If this is the first time we've tried to get a MEM for this mode,
602 allocate a new one. `something_changed' in reload will get set
603 by noticing that the frame size has changed. */
605 if (secondary_memlocs[(int) mode] == 0)
607 #ifdef SECONDARY_MEMORY_NEEDED_RTX
608 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
609 #else
610 secondary_memlocs[(int) mode]
611 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
612 #endif
615 /* Get a version of the address doing any eliminations needed. If that
616 didn't give us a new MEM, make a new one if it isn't valid. */
618 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
619 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
620 MEM_ADDR_SPACE (loc));
622 if (! mem_valid && loc == secondary_memlocs[(int) mode])
623 loc = copy_rtx (loc);
625 /* The only time the call below will do anything is if the stack
626 offset is too large. In that case IND_LEVELS doesn't matter, so we
627 can just pass a zero. Adjust the type to be the address of the
628 corresponding object. If the address was valid, save the eliminated
629 address. If it wasn't valid, we need to make a reload each time, so
630 don't save it. */
632 if (! mem_valid)
634 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
635 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
636 : RELOAD_OTHER);
638 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
639 opnum, type, 0, 0);
642 secondary_memlocs_elim[(int) mode][opnum] = loc;
643 if (secondary_memlocs_elim_used <= (int)mode)
644 secondary_memlocs_elim_used = (int)mode + 1;
645 return loc;
648 /* Clear any secondary memory locations we've made. */
650 void
651 clear_secondary_mem (void)
653 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
655 #endif /* SECONDARY_MEMORY_NEEDED */
658 /* Find the largest class which has at least one register valid in
659 mode INNER, and which for every such register, that register number
660 plus N is also valid in OUTER (if in range) and is cheap to move
661 into REGNO. Such a class must exist. */
663 static enum reg_class
664 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
665 enum machine_mode inner ATTRIBUTE_UNUSED, int n,
666 unsigned int dest_regno ATTRIBUTE_UNUSED)
668 int best_cost = -1;
669 int rclass;
670 int regno;
671 enum reg_class best_class = NO_REGS;
672 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
673 unsigned int best_size = 0;
674 int cost;
676 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
678 int bad = 0;
679 int good = 0;
680 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
681 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
683 if (HARD_REGNO_MODE_OK (regno, inner))
685 good = 1;
686 if (! TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
687 || ! HARD_REGNO_MODE_OK (regno + n, outer))
688 bad = 1;
692 if (bad || !good)
693 continue;
694 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
696 if ((reg_class_size[rclass] > best_size
697 && (best_cost < 0 || best_cost >= cost))
698 || best_cost > cost)
700 best_class = (enum reg_class) rclass;
701 best_size = reg_class_size[rclass];
702 best_cost = register_move_cost (outer, (enum reg_class) rclass,
703 dest_class);
707 gcc_assert (best_size != 0);
709 return best_class;
712 /* Return the number of a previously made reload that can be combined with
713 a new one, or n_reloads if none of the existing reloads can be used.
714 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
715 push_reload, they determine the kind of the new reload that we try to
716 combine. P_IN points to the corresponding value of IN, which can be
717 modified by this function.
718 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
720 static int
721 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
722 enum reload_type type, int opnum, int dont_share)
724 rtx in = *p_in;
725 int i;
726 /* We can't merge two reloads if the output of either one is
727 earlyclobbered. */
729 if (earlyclobber_operand_p (out))
730 return n_reloads;
732 /* We can use an existing reload if the class is right
733 and at least one of IN and OUT is a match
734 and the other is at worst neutral.
735 (A zero compared against anything is neutral.)
737 For targets with small register classes, don't use existing reloads
738 unless they are for the same thing since that can cause us to need
739 more reload registers than we otherwise would. */
741 for (i = 0; i < n_reloads; i++)
742 if ((reg_class_subset_p (rclass, rld[i].rclass)
743 || reg_class_subset_p (rld[i].rclass, rclass))
744 /* If the existing reload has a register, it must fit our class. */
745 && (rld[i].reg_rtx == 0
746 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
747 true_regnum (rld[i].reg_rtx)))
748 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
749 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
750 || (out != 0 && MATCHES (rld[i].out, out)
751 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
752 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
753 && (small_register_class_p (rclass)
754 || targetm.small_register_classes_for_mode_p (VOIDmode))
755 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
756 return i;
758 /* Reloading a plain reg for input can match a reload to postincrement
759 that reg, since the postincrement's value is the right value.
760 Likewise, it can match a preincrement reload, since we regard
761 the preincrementation as happening before any ref in this insn
762 to that register. */
763 for (i = 0; i < n_reloads; i++)
764 if ((reg_class_subset_p (rclass, rld[i].rclass)
765 || reg_class_subset_p (rld[i].rclass, rclass))
766 /* If the existing reload has a register, it must fit our
767 class. */
768 && (rld[i].reg_rtx == 0
769 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
770 true_regnum (rld[i].reg_rtx)))
771 && out == 0 && rld[i].out == 0 && rld[i].in != 0
772 && ((REG_P (in)
773 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
774 && MATCHES (XEXP (rld[i].in, 0), in))
775 || (REG_P (rld[i].in)
776 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
777 && MATCHES (XEXP (in, 0), rld[i].in)))
778 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
779 && (small_register_class_p (rclass)
780 || targetm.small_register_classes_for_mode_p (VOIDmode))
781 && MERGABLE_RELOADS (type, rld[i].when_needed,
782 opnum, rld[i].opnum))
784 /* Make sure reload_in ultimately has the increment,
785 not the plain register. */
786 if (REG_P (in))
787 *p_in = rld[i].in;
788 return i;
790 return n_reloads;
793 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
794 expression. MODE is the mode that X will be used in. OUTPUT is true if
795 the function is invoked for the output part of an enclosing reload. */
797 static bool
798 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, bool output)
800 rtx inner;
802 /* Only SUBREGs are problematical. */
803 if (GET_CODE (x) != SUBREG)
804 return false;
806 inner = SUBREG_REG (x);
808 /* If INNER is a constant or PLUS, then INNER will need reloading. */
809 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
810 return true;
812 /* If INNER is not a hard register, then INNER will not need reloading. */
813 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
814 return false;
816 /* If INNER is not ok for MODE, then INNER will need reloading. */
817 if (!HARD_REGNO_MODE_OK (subreg_regno (x), mode))
818 return true;
820 /* If this is for an output, and the outer part is a word or smaller,
821 INNER is larger than a word and the number of registers in INNER is
822 not the same as the number of words in INNER, then INNER will need
823 reloading (with an in-out reload). */
824 return (output
825 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
826 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
827 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
828 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
831 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
832 requiring an extra reload register. The caller has already found that
833 IN contains some reference to REGNO, so check that we can produce the
834 new value in a single step. E.g. if we have
835 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
836 instruction that adds one to a register, this should succeed.
837 However, if we have something like
838 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
839 needs to be loaded into a register first, we need a separate reload
840 register.
841 Such PLUS reloads are generated by find_reload_address_part.
842 The out-of-range PLUS expressions are usually introduced in the instruction
843 patterns by register elimination and substituting pseudos without a home
844 by their function-invariant equivalences. */
845 static int
846 can_reload_into (rtx in, int regno, enum machine_mode mode)
848 rtx dst, test_insn;
849 int r = 0;
850 struct recog_data save_recog_data;
852 /* For matching constraints, we often get notional input reloads where
853 we want to use the original register as the reload register. I.e.
854 technically this is a non-optional input-output reload, but IN is
855 already a valid register, and has been chosen as the reload register.
856 Speed this up, since it trivially works. */
857 if (REG_P (in))
858 return 1;
860 /* To test MEMs properly, we'd have to take into account all the reloads
861 that are already scheduled, which can become quite complicated.
862 And since we've already handled address reloads for this MEM, it
863 should always succeed anyway. */
864 if (MEM_P (in))
865 return 1;
867 /* If we can make a simple SET insn that does the job, everything should
868 be fine. */
869 dst = gen_rtx_REG (mode, regno);
870 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
871 save_recog_data = recog_data;
872 if (recog_memoized (test_insn) >= 0)
874 extract_insn (test_insn);
875 r = constrain_operands (1);
877 recog_data = save_recog_data;
878 return r;
881 /* Record one reload that needs to be performed.
882 IN is an rtx saying where the data are to be found before this instruction.
883 OUT says where they must be stored after the instruction.
884 (IN is zero for data not read, and OUT is zero for data not written.)
885 INLOC and OUTLOC point to the places in the instructions where
886 IN and OUT were found.
887 If IN and OUT are both nonzero, it means the same register must be used
888 to reload both IN and OUT.
890 RCLASS is a register class required for the reloaded data.
891 INMODE is the machine mode that the instruction requires
892 for the reg that replaces IN and OUTMODE is likewise for OUT.
894 If IN is zero, then OUT's location and mode should be passed as
895 INLOC and INMODE.
897 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
899 OPTIONAL nonzero means this reload does not need to be performed:
900 it can be discarded if that is more convenient.
902 OPNUM and TYPE say what the purpose of this reload is.
904 The return value is the reload-number for this reload.
906 If both IN and OUT are nonzero, in some rare cases we might
907 want to make two separate reloads. (Actually we never do this now.)
908 Therefore, the reload-number for OUT is stored in
909 output_reloadnum when we return; the return value applies to IN.
910 Usually (presently always), when IN and OUT are nonzero,
911 the two reload-numbers are equal, but the caller should be careful to
912 distinguish them. */
915 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
916 enum reg_class rclass, enum machine_mode inmode,
917 enum machine_mode outmode, int strict_low, int optional,
918 int opnum, enum reload_type type)
920 int i;
921 int dont_share = 0;
922 int dont_remove_subreg = 0;
923 #ifdef LIMIT_RELOAD_CLASS
924 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
925 #endif
926 int secondary_in_reload = -1, secondary_out_reload = -1;
927 enum insn_code secondary_in_icode = CODE_FOR_nothing;
928 enum insn_code secondary_out_icode = CODE_FOR_nothing;
930 /* INMODE and/or OUTMODE could be VOIDmode if no mode
931 has been specified for the operand. In that case,
932 use the operand's mode as the mode to reload. */
933 if (inmode == VOIDmode && in != 0)
934 inmode = GET_MODE (in);
935 if (outmode == VOIDmode && out != 0)
936 outmode = GET_MODE (out);
938 /* If find_reloads and friends until now missed to replace a pseudo
939 with a constant of reg_equiv_constant something went wrong
940 beforehand.
941 Note that it can't simply be done here if we missed it earlier
942 since the constant might need to be pushed into the literal pool
943 and the resulting memref would probably need further
944 reloading. */
945 if (in != 0 && REG_P (in))
947 int regno = REGNO (in);
949 gcc_assert (regno < FIRST_PSEUDO_REGISTER
950 || reg_renumber[regno] >= 0
951 || reg_equiv_constant (regno) == NULL_RTX);
954 /* reg_equiv_constant only contains constants which are obviously
955 not appropriate as destination. So if we would need to replace
956 the destination pseudo with a constant we are in real
957 trouble. */
958 if (out != 0 && REG_P (out))
960 int regno = REGNO (out);
962 gcc_assert (regno < FIRST_PSEUDO_REGISTER
963 || reg_renumber[regno] >= 0
964 || reg_equiv_constant (regno) == NULL_RTX);
967 /* If we have a read-write operand with an address side-effect,
968 change either IN or OUT so the side-effect happens only once. */
969 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
970 switch (GET_CODE (XEXP (in, 0)))
972 case POST_INC: case POST_DEC: case POST_MODIFY:
973 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
974 break;
976 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
977 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
978 break;
980 default:
981 break;
984 /* If we are reloading a (SUBREG constant ...), really reload just the
985 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
986 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
987 a pseudo and hence will become a MEM) with M1 wider than M2 and the
988 register is a pseudo, also reload the inside expression.
989 For machines that extend byte loads, do this for any SUBREG of a pseudo
990 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
991 M2 is an integral mode that gets extended when loaded.
992 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
993 where either M1 is not valid for R or M2 is wider than a word but we
994 only need one register to store an M2-sized quantity in R.
995 (However, if OUT is nonzero, we need to reload the reg *and*
996 the subreg, so do nothing here, and let following statement handle it.)
998 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
999 we can't handle it here because CONST_INT does not indicate a mode.
1001 Similarly, we must reload the inside expression if we have a
1002 STRICT_LOW_PART (presumably, in == out in this case).
1004 Also reload the inner expression if it does not require a secondary
1005 reload but the SUBREG does.
1007 Finally, reload the inner expression if it is a register that is in
1008 the class whose registers cannot be referenced in a different size
1009 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1010 cannot reload just the inside since we might end up with the wrong
1011 register class. But if it is inside a STRICT_LOW_PART, we have
1012 no choice, so we hope we do get the right register class there. */
1014 if (in != 0 && GET_CODE (in) == SUBREG
1015 && (subreg_lowpart_p (in) || strict_low)
1016 #ifdef CANNOT_CHANGE_MODE_CLASS
1017 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1018 #endif
1019 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (in))]
1020 && (CONSTANT_P (SUBREG_REG (in))
1021 || GET_CODE (SUBREG_REG (in)) == PLUS
1022 || strict_low
1023 || (((REG_P (SUBREG_REG (in))
1024 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1025 || MEM_P (SUBREG_REG (in)))
1026 && ((GET_MODE_PRECISION (inmode)
1027 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1028 #ifdef LOAD_EXTEND_OP
1029 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1030 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1031 <= UNITS_PER_WORD)
1032 && (GET_MODE_PRECISION (inmode)
1033 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1034 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1035 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1036 #endif
1037 #ifdef WORD_REGISTER_OPERATIONS
1038 || ((GET_MODE_PRECISION (inmode)
1039 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1040 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1041 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1042 / UNITS_PER_WORD)))
1043 #endif
1045 || (REG_P (SUBREG_REG (in))
1046 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1047 /* The case where out is nonzero
1048 is handled differently in the following statement. */
1049 && (out == 0 || subreg_lowpart_p (in))
1050 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1051 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1052 > UNITS_PER_WORD)
1053 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1054 / UNITS_PER_WORD)
1055 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1056 [GET_MODE (SUBREG_REG (in))]))
1057 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1058 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1059 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1060 SUBREG_REG (in))
1061 == NO_REGS))
1062 #ifdef CANNOT_CHANGE_MODE_CLASS
1063 || (REG_P (SUBREG_REG (in))
1064 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1065 && REG_CANNOT_CHANGE_MODE_P
1066 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1067 #endif
1070 #ifdef LIMIT_RELOAD_CLASS
1071 in_subreg_loc = inloc;
1072 #endif
1073 inloc = &SUBREG_REG (in);
1074 in = *inloc;
1075 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1076 if (MEM_P (in))
1077 /* This is supposed to happen only for paradoxical subregs made by
1078 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1079 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1080 #endif
1081 inmode = GET_MODE (in);
1084 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1085 where M1 is not valid for R if it was not handled by the code above.
1087 Similar issue for (SUBREG constant ...) if it was not handled by the
1088 code above. This can happen if SUBREG_BYTE != 0.
1090 However, we must reload the inner reg *as well as* the subreg in
1091 that case. */
1093 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1095 enum reg_class in_class = rclass;
1097 if (REG_P (SUBREG_REG (in)))
1098 in_class
1099 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1100 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1101 GET_MODE (SUBREG_REG (in)),
1102 SUBREG_BYTE (in),
1103 GET_MODE (in)),
1104 REGNO (SUBREG_REG (in)));
1106 /* This relies on the fact that emit_reload_insns outputs the
1107 instructions for input reloads of type RELOAD_OTHER in the same
1108 order as the reloads. Thus if the outer reload is also of type
1109 RELOAD_OTHER, we are guaranteed that this inner reload will be
1110 output before the outer reload. */
1111 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1112 in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1113 dont_remove_subreg = 1;
1116 /* Similarly for paradoxical and problematical SUBREGs on the output.
1117 Note that there is no reason we need worry about the previous value
1118 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1119 entitled to clobber it all (except in the case of a word mode subreg
1120 or of a STRICT_LOW_PART, in that latter case the constraint should
1121 label it input-output.) */
1122 if (out != 0 && GET_CODE (out) == SUBREG
1123 && (subreg_lowpart_p (out) || strict_low)
1124 #ifdef CANNOT_CHANGE_MODE_CLASS
1125 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1126 #endif
1127 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (out))]
1128 && (CONSTANT_P (SUBREG_REG (out))
1129 || strict_low
1130 || (((REG_P (SUBREG_REG (out))
1131 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1132 || MEM_P (SUBREG_REG (out)))
1133 && ((GET_MODE_PRECISION (outmode)
1134 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1135 #ifdef WORD_REGISTER_OPERATIONS
1136 || ((GET_MODE_PRECISION (outmode)
1137 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1138 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1139 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1140 / UNITS_PER_WORD)))
1141 #endif
1143 || (REG_P (SUBREG_REG (out))
1144 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1145 /* The case of a word mode subreg
1146 is handled differently in the following statement. */
1147 && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1148 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1149 > UNITS_PER_WORD))
1150 && ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode))
1151 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1152 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1153 SUBREG_REG (out))
1154 == NO_REGS))
1155 #ifdef CANNOT_CHANGE_MODE_CLASS
1156 || (REG_P (SUBREG_REG (out))
1157 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1158 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1159 GET_MODE (SUBREG_REG (out)),
1160 outmode))
1161 #endif
1164 #ifdef LIMIT_RELOAD_CLASS
1165 out_subreg_loc = outloc;
1166 #endif
1167 outloc = &SUBREG_REG (out);
1168 out = *outloc;
1169 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1170 gcc_assert (!MEM_P (out)
1171 || GET_MODE_SIZE (GET_MODE (out))
1172 <= GET_MODE_SIZE (outmode));
1173 #endif
1174 outmode = GET_MODE (out);
1177 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1178 where either M1 is not valid for R or M2 is wider than a word but we
1179 only need one register to store an M2-sized quantity in R.
1181 However, we must reload the inner reg *as well as* the subreg in
1182 that case and the inner reg is an in-out reload. */
1184 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1186 enum reg_class in_out_class
1187 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1188 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1189 GET_MODE (SUBREG_REG (out)),
1190 SUBREG_BYTE (out),
1191 GET_MODE (out)),
1192 REGNO (SUBREG_REG (out)));
1194 /* This relies on the fact that emit_reload_insns outputs the
1195 instructions for output reloads of type RELOAD_OTHER in reverse
1196 order of the reloads. Thus if the outer reload is also of type
1197 RELOAD_OTHER, we are guaranteed that this inner reload will be
1198 output after the outer reload. */
1199 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1200 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1201 0, 0, opnum, RELOAD_OTHER);
1202 dont_remove_subreg = 1;
1205 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1206 if (in != 0 && out != 0 && MEM_P (out)
1207 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1208 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1209 dont_share = 1;
1211 /* If IN is a SUBREG of a hard register, make a new REG. This
1212 simplifies some of the cases below. */
1214 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1215 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1216 && ! dont_remove_subreg)
1217 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1219 /* Similarly for OUT. */
1220 if (out != 0 && GET_CODE (out) == SUBREG
1221 && REG_P (SUBREG_REG (out))
1222 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1223 && ! dont_remove_subreg)
1224 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1226 /* Narrow down the class of register wanted if that is
1227 desirable on this machine for efficiency. */
1229 reg_class_t preferred_class = rclass;
1231 if (in != 0)
1232 preferred_class = targetm.preferred_reload_class (in, rclass);
1234 /* Output reloads may need analogous treatment, different in detail. */
1235 if (out != 0)
1236 preferred_class
1237 = targetm.preferred_output_reload_class (out, preferred_class);
1239 /* Discard what the target said if we cannot do it. */
1240 if (preferred_class != NO_REGS
1241 || (optional && type == RELOAD_FOR_OUTPUT))
1242 rclass = (enum reg_class) preferred_class;
1245 /* Make sure we use a class that can handle the actual pseudo
1246 inside any subreg. For example, on the 386, QImode regs
1247 can appear within SImode subregs. Although GENERAL_REGS
1248 can handle SImode, QImode needs a smaller class. */
1249 #ifdef LIMIT_RELOAD_CLASS
1250 if (in_subreg_loc)
1251 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1252 else if (in != 0 && GET_CODE (in) == SUBREG)
1253 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1255 if (out_subreg_loc)
1256 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1257 if (out != 0 && GET_CODE (out) == SUBREG)
1258 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1259 #endif
1261 /* Verify that this class is at least possible for the mode that
1262 is specified. */
1263 if (this_insn_is_asm)
1265 enum machine_mode mode;
1266 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1267 mode = inmode;
1268 else
1269 mode = outmode;
1270 if (mode == VOIDmode)
1272 error_for_asm (this_insn, "cannot reload integer constant "
1273 "operand in %<asm%>");
1274 mode = word_mode;
1275 if (in != 0)
1276 inmode = word_mode;
1277 if (out != 0)
1278 outmode = word_mode;
1280 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1281 if (HARD_REGNO_MODE_OK (i, mode)
1282 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1283 break;
1284 if (i == FIRST_PSEUDO_REGISTER)
1286 error_for_asm (this_insn, "impossible register constraint "
1287 "in %<asm%>");
1288 /* Avoid further trouble with this insn. */
1289 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1290 /* We used to continue here setting class to ALL_REGS, but it triggers
1291 sanity check on i386 for:
1292 void foo(long double d)
1294 asm("" :: "a" (d));
1296 Returning zero here ought to be safe as we take care in
1297 find_reloads to not process the reloads when instruction was
1298 replaced by USE. */
1300 return 0;
1304 /* Optional output reloads are always OK even if we have no register class,
1305 since the function of these reloads is only to have spill_reg_store etc.
1306 set, so that the storing insn can be deleted later. */
1307 gcc_assert (rclass != NO_REGS
1308 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1310 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1312 if (i == n_reloads)
1314 /* See if we need a secondary reload register to move between CLASS
1315 and IN or CLASS and OUT. Get the icode and push any required reloads
1316 needed for each of them if so. */
1318 if (in != 0)
1319 secondary_in_reload
1320 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1321 &secondary_in_icode, NULL);
1322 if (out != 0 && GET_CODE (out) != SCRATCH)
1323 secondary_out_reload
1324 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1325 type, &secondary_out_icode, NULL);
1327 /* We found no existing reload suitable for re-use.
1328 So add an additional reload. */
1330 #ifdef SECONDARY_MEMORY_NEEDED
1331 /* If a memory location is needed for the copy, make one. */
1332 if (in != 0
1333 && (REG_P (in)
1334 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1335 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
1336 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
1337 rclass, inmode))
1338 get_secondary_mem (in, inmode, opnum, type);
1339 #endif
1341 i = n_reloads;
1342 rld[i].in = in;
1343 rld[i].out = out;
1344 rld[i].rclass = rclass;
1345 rld[i].inmode = inmode;
1346 rld[i].outmode = outmode;
1347 rld[i].reg_rtx = 0;
1348 rld[i].optional = optional;
1349 rld[i].inc = 0;
1350 rld[i].nocombine = 0;
1351 rld[i].in_reg = inloc ? *inloc : 0;
1352 rld[i].out_reg = outloc ? *outloc : 0;
1353 rld[i].opnum = opnum;
1354 rld[i].when_needed = type;
1355 rld[i].secondary_in_reload = secondary_in_reload;
1356 rld[i].secondary_out_reload = secondary_out_reload;
1357 rld[i].secondary_in_icode = secondary_in_icode;
1358 rld[i].secondary_out_icode = secondary_out_icode;
1359 rld[i].secondary_p = 0;
1361 n_reloads++;
1363 #ifdef SECONDARY_MEMORY_NEEDED
1364 if (out != 0
1365 && (REG_P (out)
1366 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1367 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1368 && SECONDARY_MEMORY_NEEDED (rclass,
1369 REGNO_REG_CLASS (reg_or_subregno (out)),
1370 outmode))
1371 get_secondary_mem (out, outmode, opnum, type);
1372 #endif
1374 else
1376 /* We are reusing an existing reload,
1377 but we may have additional information for it.
1378 For example, we may now have both IN and OUT
1379 while the old one may have just one of them. */
1381 /* The modes can be different. If they are, we want to reload in
1382 the larger mode, so that the value is valid for both modes. */
1383 if (inmode != VOIDmode
1384 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1385 rld[i].inmode = inmode;
1386 if (outmode != VOIDmode
1387 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1388 rld[i].outmode = outmode;
1389 if (in != 0)
1391 rtx in_reg = inloc ? *inloc : 0;
1392 /* If we merge reloads for two distinct rtl expressions that
1393 are identical in content, there might be duplicate address
1394 reloads. Remove the extra set now, so that if we later find
1395 that we can inherit this reload, we can get rid of the
1396 address reloads altogether.
1398 Do not do this if both reloads are optional since the result
1399 would be an optional reload which could potentially leave
1400 unresolved address replacements.
1402 It is not sufficient to call transfer_replacements since
1403 choose_reload_regs will remove the replacements for address
1404 reloads of inherited reloads which results in the same
1405 problem. */
1406 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1407 && ! (rld[i].optional && optional))
1409 /* We must keep the address reload with the lower operand
1410 number alive. */
1411 if (opnum > rld[i].opnum)
1413 remove_address_replacements (in);
1414 in = rld[i].in;
1415 in_reg = rld[i].in_reg;
1417 else
1418 remove_address_replacements (rld[i].in);
1420 /* When emitting reloads we don't necessarily look at the in-
1421 and outmode, but also directly at the operands (in and out).
1422 So we can't simply overwrite them with whatever we have found
1423 for this (to-be-merged) reload, we have to "merge" that too.
1424 Reusing another reload already verified that we deal with the
1425 same operands, just possibly in different modes. So we
1426 overwrite the operands only when the new mode is larger.
1427 See also PR33613. */
1428 if (!rld[i].in
1429 || GET_MODE_SIZE (GET_MODE (in))
1430 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1431 rld[i].in = in;
1432 if (!rld[i].in_reg
1433 || (in_reg
1434 && GET_MODE_SIZE (GET_MODE (in_reg))
1435 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1436 rld[i].in_reg = in_reg;
1438 if (out != 0)
1440 if (!rld[i].out
1441 || (out
1442 && GET_MODE_SIZE (GET_MODE (out))
1443 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1444 rld[i].out = out;
1445 if (outloc
1446 && (!rld[i].out_reg
1447 || GET_MODE_SIZE (GET_MODE (*outloc))
1448 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1449 rld[i].out_reg = *outloc;
1451 if (reg_class_subset_p (rclass, rld[i].rclass))
1452 rld[i].rclass = rclass;
1453 rld[i].optional &= optional;
1454 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1455 opnum, rld[i].opnum))
1456 rld[i].when_needed = RELOAD_OTHER;
1457 rld[i].opnum = MIN (rld[i].opnum, opnum);
1460 /* If the ostensible rtx being reloaded differs from the rtx found
1461 in the location to substitute, this reload is not safe to combine
1462 because we cannot reliably tell whether it appears in the insn. */
1464 if (in != 0 && in != *inloc)
1465 rld[i].nocombine = 1;
1467 #if 0
1468 /* This was replaced by changes in find_reloads_address_1 and the new
1469 function inc_for_reload, which go with a new meaning of reload_inc. */
1471 /* If this is an IN/OUT reload in an insn that sets the CC,
1472 it must be for an autoincrement. It doesn't work to store
1473 the incremented value after the insn because that would clobber the CC.
1474 So we must do the increment of the value reloaded from,
1475 increment it, store it back, then decrement again. */
1476 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1478 out = 0;
1479 rld[i].out = 0;
1480 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1481 /* If we did not find a nonzero amount-to-increment-by,
1482 that contradicts the belief that IN is being incremented
1483 in an address in this insn. */
1484 gcc_assert (rld[i].inc != 0);
1486 #endif
1488 /* If we will replace IN and OUT with the reload-reg,
1489 record where they are located so that substitution need
1490 not do a tree walk. */
1492 if (replace_reloads)
1494 if (inloc != 0)
1496 struct replacement *r = &replacements[n_replacements++];
1497 r->what = i;
1498 r->where = inloc;
1499 r->mode = inmode;
1501 if (outloc != 0 && outloc != inloc)
1503 struct replacement *r = &replacements[n_replacements++];
1504 r->what = i;
1505 r->where = outloc;
1506 r->mode = outmode;
1510 /* If this reload is just being introduced and it has both
1511 an incoming quantity and an outgoing quantity that are
1512 supposed to be made to match, see if either one of the two
1513 can serve as the place to reload into.
1515 If one of them is acceptable, set rld[i].reg_rtx
1516 to that one. */
1518 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1520 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1521 inmode, outmode,
1522 rld[i].rclass, i,
1523 earlyclobber_operand_p (out));
1525 /* If the outgoing register already contains the same value
1526 as the incoming one, we can dispense with loading it.
1527 The easiest way to tell the caller that is to give a phony
1528 value for the incoming operand (same as outgoing one). */
1529 if (rld[i].reg_rtx == out
1530 && (REG_P (in) || CONSTANT_P (in))
1531 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1532 static_reload_reg_p, i, inmode))
1533 rld[i].in = out;
1536 /* If this is an input reload and the operand contains a register that
1537 dies in this insn and is used nowhere else, see if it is the right class
1538 to be used for this reload. Use it if so. (This occurs most commonly
1539 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1540 this if it is also an output reload that mentions the register unless
1541 the output is a SUBREG that clobbers an entire register.
1543 Note that the operand might be one of the spill regs, if it is a
1544 pseudo reg and we are in a block where spilling has not taken place.
1545 But if there is no spilling in this block, that is OK.
1546 An explicitly used hard reg cannot be a spill reg. */
1548 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1550 rtx note;
1551 int regno;
1552 enum machine_mode rel_mode = inmode;
1554 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1555 rel_mode = outmode;
1557 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1558 if (REG_NOTE_KIND (note) == REG_DEAD
1559 && REG_P (XEXP (note, 0))
1560 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1561 && reg_mentioned_p (XEXP (note, 0), in)
1562 /* Check that a former pseudo is valid; see find_dummy_reload. */
1563 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1564 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1565 ORIGINAL_REGNO (XEXP (note, 0)))
1566 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1567 && ! refers_to_regno_for_reload_p (regno,
1568 end_hard_regno (rel_mode,
1569 regno),
1570 PATTERN (this_insn), inloc)
1571 /* If this is also an output reload, IN cannot be used as
1572 the reload register if it is set in this insn unless IN
1573 is also OUT. */
1574 && (out == 0 || in == out
1575 || ! hard_reg_set_here_p (regno,
1576 end_hard_regno (rel_mode, regno),
1577 PATTERN (this_insn)))
1578 /* ??? Why is this code so different from the previous?
1579 Is there any simple coherent way to describe the two together?
1580 What's going on here. */
1581 && (in != out
1582 || (GET_CODE (in) == SUBREG
1583 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1584 / UNITS_PER_WORD)
1585 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1586 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1587 /* Make sure the operand fits in the reg that dies. */
1588 && (GET_MODE_SIZE (rel_mode)
1589 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1590 && HARD_REGNO_MODE_OK (regno, inmode)
1591 && HARD_REGNO_MODE_OK (regno, outmode))
1593 unsigned int offs;
1594 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1595 hard_regno_nregs[regno][outmode]);
1597 for (offs = 0; offs < nregs; offs++)
1598 if (fixed_regs[regno + offs]
1599 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1600 regno + offs))
1601 break;
1603 if (offs == nregs
1604 && (! (refers_to_regno_for_reload_p
1605 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1606 || can_reload_into (in, regno, inmode)))
1608 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1609 break;
1614 if (out)
1615 output_reloadnum = i;
1617 return i;
1620 /* Record an additional place we must replace a value
1621 for which we have already recorded a reload.
1622 RELOADNUM is the value returned by push_reload
1623 when the reload was recorded.
1624 This is used in insn patterns that use match_dup. */
1626 static void
1627 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
1629 if (replace_reloads)
1631 struct replacement *r = &replacements[n_replacements++];
1632 r->what = reloadnum;
1633 r->where = loc;
1634 r->mode = mode;
1638 /* Duplicate any replacement we have recorded to apply at
1639 location ORIG_LOC to also be performed at DUP_LOC.
1640 This is used in insn patterns that use match_dup. */
1642 static void
1643 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1645 int i, n = n_replacements;
1647 for (i = 0; i < n; i++)
1649 struct replacement *r = &replacements[i];
1650 if (r->where == orig_loc)
1651 push_replacement (dup_loc, r->what, r->mode);
1655 /* Transfer all replacements that used to be in reload FROM to be in
1656 reload TO. */
1658 void
1659 transfer_replacements (int to, int from)
1661 int i;
1663 for (i = 0; i < n_replacements; i++)
1664 if (replacements[i].what == from)
1665 replacements[i].what = to;
1668 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1669 or a subpart of it. If we have any replacements registered for IN_RTX,
1670 cancel the reloads that were supposed to load them.
1671 Return nonzero if we canceled any reloads. */
1673 remove_address_replacements (rtx in_rtx)
1675 int i, j;
1676 char reload_flags[MAX_RELOADS];
1677 int something_changed = 0;
1679 memset (reload_flags, 0, sizeof reload_flags);
1680 for (i = 0, j = 0; i < n_replacements; i++)
1682 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1683 reload_flags[replacements[i].what] |= 1;
1684 else
1686 replacements[j++] = replacements[i];
1687 reload_flags[replacements[i].what] |= 2;
1690 /* Note that the following store must be done before the recursive calls. */
1691 n_replacements = j;
1693 for (i = n_reloads - 1; i >= 0; i--)
1695 if (reload_flags[i] == 1)
1697 deallocate_reload_reg (i);
1698 remove_address_replacements (rld[i].in);
1699 rld[i].in = 0;
1700 something_changed = 1;
1703 return something_changed;
1706 /* If there is only one output reload, and it is not for an earlyclobber
1707 operand, try to combine it with a (logically unrelated) input reload
1708 to reduce the number of reload registers needed.
1710 This is safe if the input reload does not appear in
1711 the value being output-reloaded, because this implies
1712 it is not needed any more once the original insn completes.
1714 If that doesn't work, see we can use any of the registers that
1715 die in this insn as a reload register. We can if it is of the right
1716 class and does not appear in the value being output-reloaded. */
1718 static void
1719 combine_reloads (void)
1721 int i, regno;
1722 int output_reload = -1;
1723 int secondary_out = -1;
1724 rtx note;
1726 /* Find the output reload; return unless there is exactly one
1727 and that one is mandatory. */
1729 for (i = 0; i < n_reloads; i++)
1730 if (rld[i].out != 0)
1732 if (output_reload >= 0)
1733 return;
1734 output_reload = i;
1737 if (output_reload < 0 || rld[output_reload].optional)
1738 return;
1740 /* An input-output reload isn't combinable. */
1742 if (rld[output_reload].in != 0)
1743 return;
1745 /* If this reload is for an earlyclobber operand, we can't do anything. */
1746 if (earlyclobber_operand_p (rld[output_reload].out))
1747 return;
1749 /* If there is a reload for part of the address of this operand, we would
1750 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1751 its life to the point where doing this combine would not lower the
1752 number of spill registers needed. */
1753 for (i = 0; i < n_reloads; i++)
1754 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1755 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1756 && rld[i].opnum == rld[output_reload].opnum)
1757 return;
1759 /* Check each input reload; can we combine it? */
1761 for (i = 0; i < n_reloads; i++)
1762 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1763 /* Life span of this reload must not extend past main insn. */
1764 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1765 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1766 && rld[i].when_needed != RELOAD_OTHER
1767 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1768 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1769 [(int) rld[output_reload].outmode])
1770 && rld[i].inc == 0
1771 && rld[i].reg_rtx == 0
1772 #ifdef SECONDARY_MEMORY_NEEDED
1773 /* Don't combine two reloads with different secondary
1774 memory locations. */
1775 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1776 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1777 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1778 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1779 #endif
1780 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1781 ? (rld[i].rclass == rld[output_reload].rclass)
1782 : (reg_class_subset_p (rld[i].rclass,
1783 rld[output_reload].rclass)
1784 || reg_class_subset_p (rld[output_reload].rclass,
1785 rld[i].rclass)))
1786 && (MATCHES (rld[i].in, rld[output_reload].out)
1787 /* Args reversed because the first arg seems to be
1788 the one that we imagine being modified
1789 while the second is the one that might be affected. */
1790 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1791 rld[i].in)
1792 /* However, if the input is a register that appears inside
1793 the output, then we also can't share.
1794 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1795 If the same reload reg is used for both reg 69 and the
1796 result to be stored in memory, then that result
1797 will clobber the address of the memory ref. */
1798 && ! (REG_P (rld[i].in)
1799 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1800 rld[output_reload].out))))
1801 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1802 rld[i].when_needed != RELOAD_FOR_INPUT)
1803 && (reg_class_size[(int) rld[i].rclass]
1804 || targetm.small_register_classes_for_mode_p (VOIDmode))
1805 /* We will allow making things slightly worse by combining an
1806 input and an output, but no worse than that. */
1807 && (rld[i].when_needed == RELOAD_FOR_INPUT
1808 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1810 int j;
1812 /* We have found a reload to combine with! */
1813 rld[i].out = rld[output_reload].out;
1814 rld[i].out_reg = rld[output_reload].out_reg;
1815 rld[i].outmode = rld[output_reload].outmode;
1816 /* Mark the old output reload as inoperative. */
1817 rld[output_reload].out = 0;
1818 /* The combined reload is needed for the entire insn. */
1819 rld[i].when_needed = RELOAD_OTHER;
1820 /* If the output reload had a secondary reload, copy it. */
1821 if (rld[output_reload].secondary_out_reload != -1)
1823 rld[i].secondary_out_reload
1824 = rld[output_reload].secondary_out_reload;
1825 rld[i].secondary_out_icode
1826 = rld[output_reload].secondary_out_icode;
1829 #ifdef SECONDARY_MEMORY_NEEDED
1830 /* Copy any secondary MEM. */
1831 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1832 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1833 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1834 #endif
1835 /* If required, minimize the register class. */
1836 if (reg_class_subset_p (rld[output_reload].rclass,
1837 rld[i].rclass))
1838 rld[i].rclass = rld[output_reload].rclass;
1840 /* Transfer all replacements from the old reload to the combined. */
1841 for (j = 0; j < n_replacements; j++)
1842 if (replacements[j].what == output_reload)
1843 replacements[j].what = i;
1845 return;
1848 /* If this insn has only one operand that is modified or written (assumed
1849 to be the first), it must be the one corresponding to this reload. It
1850 is safe to use anything that dies in this insn for that output provided
1851 that it does not occur in the output (we already know it isn't an
1852 earlyclobber. If this is an asm insn, give up. */
1854 if (INSN_CODE (this_insn) == -1)
1855 return;
1857 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1858 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1859 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1860 return;
1862 /* See if some hard register that dies in this insn and is not used in
1863 the output is the right class. Only works if the register we pick
1864 up can fully hold our output reload. */
1865 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1866 if (REG_NOTE_KIND (note) == REG_DEAD
1867 && REG_P (XEXP (note, 0))
1868 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1869 rld[output_reload].out)
1870 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1871 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1872 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1873 regno)
1874 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1875 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1876 /* Ensure that a secondary or tertiary reload for this output
1877 won't want this register. */
1878 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1879 || (!(TEST_HARD_REG_BIT
1880 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1881 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1882 || !(TEST_HARD_REG_BIT
1883 (reg_class_contents[(int) rld[secondary_out].rclass],
1884 regno)))))
1885 && !fixed_regs[regno]
1886 /* Check that a former pseudo is valid; see find_dummy_reload. */
1887 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1888 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1889 ORIGINAL_REGNO (XEXP (note, 0)))
1890 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1892 rld[output_reload].reg_rtx
1893 = gen_rtx_REG (rld[output_reload].outmode, regno);
1894 return;
1898 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1899 See if one of IN and OUT is a register that may be used;
1900 this is desirable since a spill-register won't be needed.
1901 If so, return the register rtx that proves acceptable.
1903 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1904 RCLASS is the register class required for the reload.
1906 If FOR_REAL is >= 0, it is the number of the reload,
1907 and in some cases when it can be discovered that OUT doesn't need
1908 to be computed, clear out rld[FOR_REAL].out.
1910 If FOR_REAL is -1, this should not be done, because this call
1911 is just to see if a register can be found, not to find and install it.
1913 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1914 puts an additional constraint on being able to use IN for OUT since
1915 IN must not appear elsewhere in the insn (it is assumed that IN itself
1916 is safe from the earlyclobber). */
1918 static rtx
1919 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1920 enum machine_mode inmode, enum machine_mode outmode,
1921 reg_class_t rclass, int for_real, int earlyclobber)
1923 rtx in = real_in;
1924 rtx out = real_out;
1925 int in_offset = 0;
1926 int out_offset = 0;
1927 rtx value = 0;
1929 /* If operands exceed a word, we can't use either of them
1930 unless they have the same size. */
1931 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1932 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1933 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1934 return 0;
1936 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1937 respectively refers to a hard register. */
1939 /* Find the inside of any subregs. */
1940 while (GET_CODE (out) == SUBREG)
1942 if (REG_P (SUBREG_REG (out))
1943 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1944 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1945 GET_MODE (SUBREG_REG (out)),
1946 SUBREG_BYTE (out),
1947 GET_MODE (out));
1948 out = SUBREG_REG (out);
1950 while (GET_CODE (in) == SUBREG)
1952 if (REG_P (SUBREG_REG (in))
1953 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1954 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1955 GET_MODE (SUBREG_REG (in)),
1956 SUBREG_BYTE (in),
1957 GET_MODE (in));
1958 in = SUBREG_REG (in);
1961 /* Narrow down the reg class, the same way push_reload will;
1962 otherwise we might find a dummy now, but push_reload won't. */
1964 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
1965 if (preferred_class != NO_REGS)
1966 rclass = (enum reg_class) preferred_class;
1969 /* See if OUT will do. */
1970 if (REG_P (out)
1971 && REGNO (out) < FIRST_PSEUDO_REGISTER)
1973 unsigned int regno = REGNO (out) + out_offset;
1974 unsigned int nwords = hard_regno_nregs[regno][outmode];
1975 rtx saved_rtx;
1977 /* When we consider whether the insn uses OUT,
1978 ignore references within IN. They don't prevent us
1979 from copying IN into OUT, because those refs would
1980 move into the insn that reloads IN.
1982 However, we only ignore IN in its role as this reload.
1983 If the insn uses IN elsewhere and it contains OUT,
1984 that counts. We can't be sure it's the "same" operand
1985 so it might not go through this reload. */
1986 saved_rtx = *inloc;
1987 *inloc = const0_rtx;
1989 if (regno < FIRST_PSEUDO_REGISTER
1990 && HARD_REGNO_MODE_OK (regno, outmode)
1991 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
1992 PATTERN (this_insn), outloc))
1994 unsigned int i;
1996 for (i = 0; i < nwords; i++)
1997 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1998 regno + i))
1999 break;
2001 if (i == nwords)
2003 if (REG_P (real_out))
2004 value = real_out;
2005 else
2006 value = gen_rtx_REG (outmode, regno);
2010 *inloc = saved_rtx;
2013 /* Consider using IN if OUT was not acceptable
2014 or if OUT dies in this insn (like the quotient in a divmod insn).
2015 We can't use IN unless it is dies in this insn,
2016 which means we must know accurately which hard regs are live.
2017 Also, the result can't go in IN if IN is used within OUT,
2018 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2019 if (hard_regs_live_known
2020 && REG_P (in)
2021 && REGNO (in) < FIRST_PSEUDO_REGISTER
2022 && (value == 0
2023 || find_reg_note (this_insn, REG_UNUSED, real_out))
2024 && find_reg_note (this_insn, REG_DEAD, real_in)
2025 && !fixed_regs[REGNO (in)]
2026 && HARD_REGNO_MODE_OK (REGNO (in),
2027 /* The only case where out and real_out might
2028 have different modes is where real_out
2029 is a subreg, and in that case, out
2030 has a real mode. */
2031 (GET_MODE (out) != VOIDmode
2032 ? GET_MODE (out) : outmode))
2033 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2034 /* However only do this if we can be sure that this input
2035 operand doesn't correspond with an uninitialized pseudo.
2036 global can assign some hardreg to it that is the same as
2037 the one assigned to a different, also live pseudo (as it
2038 can ignore the conflict). We must never introduce writes
2039 to such hardregs, as they would clobber the other live
2040 pseudo. See PR 20973. */
2041 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
2042 ORIGINAL_REGNO (in))
2043 /* Similarly, only do this if we can be sure that the death
2044 note is still valid. global can assign some hardreg to
2045 the pseudo referenced in the note and simultaneously a
2046 subword of this hardreg to a different, also live pseudo,
2047 because only another subword of the hardreg is actually
2048 used in the insn. This cannot happen if the pseudo has
2049 been assigned exactly one hardreg. See PR 33732. */
2050 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2052 unsigned int regno = REGNO (in) + in_offset;
2053 unsigned int nwords = hard_regno_nregs[regno][inmode];
2055 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2056 && ! hard_reg_set_here_p (regno, regno + nwords,
2057 PATTERN (this_insn))
2058 && (! earlyclobber
2059 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2060 PATTERN (this_insn), inloc)))
2062 unsigned int i;
2064 for (i = 0; i < nwords; i++)
2065 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2066 regno + i))
2067 break;
2069 if (i == nwords)
2071 /* If we were going to use OUT as the reload reg
2072 and changed our mind, it means OUT is a dummy that
2073 dies here. So don't bother copying value to it. */
2074 if (for_real >= 0 && value == real_out)
2075 rld[for_real].out = 0;
2076 if (REG_P (real_in))
2077 value = real_in;
2078 else
2079 value = gen_rtx_REG (inmode, regno);
2084 return value;
2087 /* This page contains subroutines used mainly for determining
2088 whether the IN or an OUT of a reload can serve as the
2089 reload register. */
2091 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2094 earlyclobber_operand_p (rtx x)
2096 int i;
2098 for (i = 0; i < n_earlyclobbers; i++)
2099 if (reload_earlyclobbers[i] == x)
2100 return 1;
2102 return 0;
2105 /* Return 1 if expression X alters a hard reg in the range
2106 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2107 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2108 X should be the body of an instruction. */
2110 static int
2111 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2113 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2115 rtx op0 = SET_DEST (x);
2117 while (GET_CODE (op0) == SUBREG)
2118 op0 = SUBREG_REG (op0);
2119 if (REG_P (op0))
2121 unsigned int r = REGNO (op0);
2123 /* See if this reg overlaps range under consideration. */
2124 if (r < end_regno
2125 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2126 return 1;
2129 else if (GET_CODE (x) == PARALLEL)
2131 int i = XVECLEN (x, 0) - 1;
2133 for (; i >= 0; i--)
2134 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2135 return 1;
2138 return 0;
2141 /* Return 1 if ADDR is a valid memory address for mode MODE
2142 in address space AS, and check that each pseudo reg has the
2143 proper kind of hard reg. */
2146 strict_memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2147 rtx addr, addr_space_t as)
2149 #ifdef GO_IF_LEGITIMATE_ADDRESS
2150 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2151 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2152 return 0;
2154 win:
2155 return 1;
2156 #else
2157 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2158 #endif
2161 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2162 if they are the same hard reg, and has special hacks for
2163 autoincrement and autodecrement.
2164 This is specifically intended for find_reloads to use
2165 in determining whether two operands match.
2166 X is the operand whose number is the lower of the two.
2168 The value is 2 if Y contains a pre-increment that matches
2169 a non-incrementing address in X. */
2171 /* ??? To be completely correct, we should arrange to pass
2172 for X the output operand and for Y the input operand.
2173 For now, we assume that the output operand has the lower number
2174 because that is natural in (SET output (... input ...)). */
2177 operands_match_p (rtx x, rtx y)
2179 int i;
2180 RTX_CODE code = GET_CODE (x);
2181 const char *fmt;
2182 int success_2;
2184 if (x == y)
2185 return 1;
2186 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2187 && (REG_P (y) || (GET_CODE (y) == SUBREG
2188 && REG_P (SUBREG_REG (y)))))
2190 int j;
2192 if (code == SUBREG)
2194 i = REGNO (SUBREG_REG (x));
2195 if (i >= FIRST_PSEUDO_REGISTER)
2196 goto slow;
2197 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2198 GET_MODE (SUBREG_REG (x)),
2199 SUBREG_BYTE (x),
2200 GET_MODE (x));
2202 else
2203 i = REGNO (x);
2205 if (GET_CODE (y) == SUBREG)
2207 j = REGNO (SUBREG_REG (y));
2208 if (j >= FIRST_PSEUDO_REGISTER)
2209 goto slow;
2210 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2211 GET_MODE (SUBREG_REG (y)),
2212 SUBREG_BYTE (y),
2213 GET_MODE (y));
2215 else
2216 j = REGNO (y);
2218 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2219 multiple hard register group of scalar integer registers, so that
2220 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2221 register. */
2222 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2223 && SCALAR_INT_MODE_P (GET_MODE (x))
2224 && i < FIRST_PSEUDO_REGISTER)
2225 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2226 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2227 && SCALAR_INT_MODE_P (GET_MODE (y))
2228 && j < FIRST_PSEUDO_REGISTER)
2229 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2231 return i == j;
2233 /* If two operands must match, because they are really a single
2234 operand of an assembler insn, then two postincrements are invalid
2235 because the assembler insn would increment only once.
2236 On the other hand, a postincrement matches ordinary indexing
2237 if the postincrement is the output operand. */
2238 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2239 return operands_match_p (XEXP (x, 0), y);
2240 /* Two preincrements are invalid
2241 because the assembler insn would increment only once.
2242 On the other hand, a preincrement matches ordinary indexing
2243 if the preincrement is the input operand.
2244 In this case, return 2, since some callers need to do special
2245 things when this happens. */
2246 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2247 || GET_CODE (y) == PRE_MODIFY)
2248 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2250 slow:
2252 /* Now we have disposed of all the cases in which different rtx codes
2253 can match. */
2254 if (code != GET_CODE (y))
2255 return 0;
2257 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2258 if (GET_MODE (x) != GET_MODE (y))
2259 return 0;
2261 /* MEMs refering to different address space are not equivalent. */
2262 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2263 return 0;
2265 switch (code)
2267 case CONST_INT:
2268 case CONST_DOUBLE:
2269 case CONST_FIXED:
2270 return 0;
2272 case LABEL_REF:
2273 return XEXP (x, 0) == XEXP (y, 0);
2274 case SYMBOL_REF:
2275 return XSTR (x, 0) == XSTR (y, 0);
2277 default:
2278 break;
2281 /* Compare the elements. If any pair of corresponding elements
2282 fail to match, return 0 for the whole things. */
2284 success_2 = 0;
2285 fmt = GET_RTX_FORMAT (code);
2286 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2288 int val, j;
2289 switch (fmt[i])
2291 case 'w':
2292 if (XWINT (x, i) != XWINT (y, i))
2293 return 0;
2294 break;
2296 case 'i':
2297 if (XINT (x, i) != XINT (y, i))
2298 return 0;
2299 break;
2301 case 'e':
2302 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2303 if (val == 0)
2304 return 0;
2305 /* If any subexpression returns 2,
2306 we should return 2 if we are successful. */
2307 if (val == 2)
2308 success_2 = 1;
2309 break;
2311 case '0':
2312 break;
2314 case 'E':
2315 if (XVECLEN (x, i) != XVECLEN (y, i))
2316 return 0;
2317 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2319 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2320 if (val == 0)
2321 return 0;
2322 if (val == 2)
2323 success_2 = 1;
2325 break;
2327 /* It is believed that rtx's at this level will never
2328 contain anything but integers and other rtx's,
2329 except for within LABEL_REFs and SYMBOL_REFs. */
2330 default:
2331 gcc_unreachable ();
2334 return 1 + success_2;
2337 /* Describe the range of registers or memory referenced by X.
2338 If X is a register, set REG_FLAG and put the first register
2339 number into START and the last plus one into END.
2340 If X is a memory reference, put a base address into BASE
2341 and a range of integer offsets into START and END.
2342 If X is pushing on the stack, we can assume it causes no trouble,
2343 so we set the SAFE field. */
2345 static struct decomposition
2346 decompose (rtx x)
2348 struct decomposition val;
2349 int all_const = 0;
2351 memset (&val, 0, sizeof (val));
2353 switch (GET_CODE (x))
2355 case MEM:
2357 rtx base = NULL_RTX, offset = 0;
2358 rtx addr = XEXP (x, 0);
2360 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2361 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2363 val.base = XEXP (addr, 0);
2364 val.start = -GET_MODE_SIZE (GET_MODE (x));
2365 val.end = GET_MODE_SIZE (GET_MODE (x));
2366 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2367 return val;
2370 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2372 if (GET_CODE (XEXP (addr, 1)) == PLUS
2373 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2374 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2376 val.base = XEXP (addr, 0);
2377 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2378 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2379 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2380 return val;
2384 if (GET_CODE (addr) == CONST)
2386 addr = XEXP (addr, 0);
2387 all_const = 1;
2389 if (GET_CODE (addr) == PLUS)
2391 if (CONSTANT_P (XEXP (addr, 0)))
2393 base = XEXP (addr, 1);
2394 offset = XEXP (addr, 0);
2396 else if (CONSTANT_P (XEXP (addr, 1)))
2398 base = XEXP (addr, 0);
2399 offset = XEXP (addr, 1);
2403 if (offset == 0)
2405 base = addr;
2406 offset = const0_rtx;
2408 if (GET_CODE (offset) == CONST)
2409 offset = XEXP (offset, 0);
2410 if (GET_CODE (offset) == PLUS)
2412 if (CONST_INT_P (XEXP (offset, 0)))
2414 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2415 offset = XEXP (offset, 0);
2417 else if (CONST_INT_P (XEXP (offset, 1)))
2419 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2420 offset = XEXP (offset, 1);
2422 else
2424 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2425 offset = const0_rtx;
2428 else if (!CONST_INT_P (offset))
2430 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2431 offset = const0_rtx;
2434 if (all_const && GET_CODE (base) == PLUS)
2435 base = gen_rtx_CONST (GET_MODE (base), base);
2437 gcc_assert (CONST_INT_P (offset));
2439 val.start = INTVAL (offset);
2440 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2441 val.base = base;
2443 break;
2445 case REG:
2446 val.reg_flag = 1;
2447 val.start = true_regnum (x);
2448 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2450 /* A pseudo with no hard reg. */
2451 val.start = REGNO (x);
2452 val.end = val.start + 1;
2454 else
2455 /* A hard reg. */
2456 val.end = end_hard_regno (GET_MODE (x), val.start);
2457 break;
2459 case SUBREG:
2460 if (!REG_P (SUBREG_REG (x)))
2461 /* This could be more precise, but it's good enough. */
2462 return decompose (SUBREG_REG (x));
2463 val.reg_flag = 1;
2464 val.start = true_regnum (x);
2465 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2466 return decompose (SUBREG_REG (x));
2467 else
2468 /* A hard reg. */
2469 val.end = val.start + subreg_nregs (x);
2470 break;
2472 case SCRATCH:
2473 /* This hasn't been assigned yet, so it can't conflict yet. */
2474 val.safe = 1;
2475 break;
2477 default:
2478 gcc_assert (CONSTANT_P (x));
2479 val.safe = 1;
2480 break;
2482 return val;
2485 /* Return 1 if altering Y will not modify the value of X.
2486 Y is also described by YDATA, which should be decompose (Y). */
2488 static int
2489 immune_p (rtx x, rtx y, struct decomposition ydata)
2491 struct decomposition xdata;
2493 if (ydata.reg_flag)
2494 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2495 if (ydata.safe)
2496 return 1;
2498 gcc_assert (MEM_P (y));
2499 /* If Y is memory and X is not, Y can't affect X. */
2500 if (!MEM_P (x))
2501 return 1;
2503 xdata = decompose (x);
2505 if (! rtx_equal_p (xdata.base, ydata.base))
2507 /* If bases are distinct symbolic constants, there is no overlap. */
2508 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2509 return 1;
2510 /* Constants and stack slots never overlap. */
2511 if (CONSTANT_P (xdata.base)
2512 && (ydata.base == frame_pointer_rtx
2513 || ydata.base == hard_frame_pointer_rtx
2514 || ydata.base == stack_pointer_rtx))
2515 return 1;
2516 if (CONSTANT_P (ydata.base)
2517 && (xdata.base == frame_pointer_rtx
2518 || xdata.base == hard_frame_pointer_rtx
2519 || xdata.base == stack_pointer_rtx))
2520 return 1;
2521 /* If either base is variable, we don't know anything. */
2522 return 0;
2525 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2528 /* Similar, but calls decompose. */
2531 safe_from_earlyclobber (rtx op, rtx clobber)
2533 struct decomposition early_data;
2535 early_data = decompose (clobber);
2536 return immune_p (op, clobber, early_data);
2539 /* Main entry point of this file: search the body of INSN
2540 for values that need reloading and record them with push_reload.
2541 REPLACE nonzero means record also where the values occur
2542 so that subst_reloads can be used.
2544 IND_LEVELS says how many levels of indirection are supported by this
2545 machine; a value of zero means that a memory reference is not a valid
2546 memory address.
2548 LIVE_KNOWN says we have valid information about which hard
2549 regs are live at each point in the program; this is true when
2550 we are called from global_alloc but false when stupid register
2551 allocation has been done.
2553 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2554 which is nonnegative if the reg has been commandeered for reloading into.
2555 It is copied into STATIC_RELOAD_REG_P and referenced from there
2556 by various subroutines.
2558 Return TRUE if some operands need to be changed, because of swapping
2559 commutative operands, reg_equiv_address substitution, or whatever. */
2562 find_reloads (rtx insn, int replace, int ind_levels, int live_known,
2563 short *reload_reg_p)
2565 int insn_code_number;
2566 int i, j;
2567 int noperands;
2568 /* These start out as the constraints for the insn
2569 and they are chewed up as we consider alternatives. */
2570 const char *constraints[MAX_RECOG_OPERANDS];
2571 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2572 a register. */
2573 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2574 char pref_or_nothing[MAX_RECOG_OPERANDS];
2575 /* Nonzero for a MEM operand whose entire address needs a reload.
2576 May be -1 to indicate the entire address may or may not need a reload. */
2577 int address_reloaded[MAX_RECOG_OPERANDS];
2578 /* Nonzero for an address operand that needs to be completely reloaded.
2579 May be -1 to indicate the entire operand may or may not need a reload. */
2580 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2581 /* Value of enum reload_type to use for operand. */
2582 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2583 /* Value of enum reload_type to use within address of operand. */
2584 enum reload_type address_type[MAX_RECOG_OPERANDS];
2585 /* Save the usage of each operand. */
2586 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2587 int no_input_reloads = 0, no_output_reloads = 0;
2588 int n_alternatives;
2589 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2590 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2591 char this_alternative_win[MAX_RECOG_OPERANDS];
2592 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2593 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2594 int this_alternative_matches[MAX_RECOG_OPERANDS];
2595 int swapped;
2596 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2597 int this_alternative_number;
2598 int goal_alternative_number = 0;
2599 int operand_reloadnum[MAX_RECOG_OPERANDS];
2600 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2601 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2602 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2603 char goal_alternative_win[MAX_RECOG_OPERANDS];
2604 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2605 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2606 int goal_alternative_swapped;
2607 int best;
2608 int commutative;
2609 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2610 rtx substed_operand[MAX_RECOG_OPERANDS];
2611 rtx body = PATTERN (insn);
2612 rtx set = single_set (insn);
2613 int goal_earlyclobber = 0, this_earlyclobber;
2614 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
2615 int retval = 0;
2617 this_insn = insn;
2618 n_reloads = 0;
2619 n_replacements = 0;
2620 n_earlyclobbers = 0;
2621 replace_reloads = replace;
2622 hard_regs_live_known = live_known;
2623 static_reload_reg_p = reload_reg_p;
2625 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2626 neither are insns that SET cc0. Insns that use CC0 are not allowed
2627 to have any input reloads. */
2628 if (JUMP_P (insn) || CALL_P (insn))
2629 no_output_reloads = 1;
2631 #ifdef HAVE_cc0
2632 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2633 no_input_reloads = 1;
2634 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2635 no_output_reloads = 1;
2636 #endif
2638 #ifdef SECONDARY_MEMORY_NEEDED
2639 /* The eliminated forms of any secondary memory locations are per-insn, so
2640 clear them out here. */
2642 if (secondary_memlocs_elim_used)
2644 memset (secondary_memlocs_elim, 0,
2645 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2646 secondary_memlocs_elim_used = 0;
2648 #endif
2650 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2651 is cheap to move between them. If it is not, there may not be an insn
2652 to do the copy, so we may need a reload. */
2653 if (GET_CODE (body) == SET
2654 && REG_P (SET_DEST (body))
2655 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2656 && REG_P (SET_SRC (body))
2657 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2658 && register_move_cost (GET_MODE (SET_SRC (body)),
2659 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2660 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2661 return 0;
2663 extract_insn (insn);
2665 noperands = reload_n_operands = recog_data.n_operands;
2666 n_alternatives = recog_data.n_alternatives;
2668 /* Just return "no reloads" if insn has no operands with constraints. */
2669 if (noperands == 0 || n_alternatives == 0)
2670 return 0;
2672 insn_code_number = INSN_CODE (insn);
2673 this_insn_is_asm = insn_code_number < 0;
2675 memcpy (operand_mode, recog_data.operand_mode,
2676 noperands * sizeof (enum machine_mode));
2677 memcpy (constraints, recog_data.constraints,
2678 noperands * sizeof (const char *));
2680 commutative = -1;
2682 /* If we will need to know, later, whether some pair of operands
2683 are the same, we must compare them now and save the result.
2684 Reloading the base and index registers will clobber them
2685 and afterward they will fail to match. */
2687 for (i = 0; i < noperands; i++)
2689 const char *p;
2690 int c;
2691 char *end;
2693 substed_operand[i] = recog_data.operand[i];
2694 p = constraints[i];
2696 modified[i] = RELOAD_READ;
2698 /* Scan this operand's constraint to see if it is an output operand,
2699 an in-out operand, is commutative, or should match another. */
2701 while ((c = *p))
2703 p += CONSTRAINT_LEN (c, p);
2704 switch (c)
2706 case '=':
2707 modified[i] = RELOAD_WRITE;
2708 break;
2709 case '+':
2710 modified[i] = RELOAD_READ_WRITE;
2711 break;
2712 case '%':
2714 /* The last operand should not be marked commutative. */
2715 gcc_assert (i != noperands - 1);
2717 /* We currently only support one commutative pair of
2718 operands. Some existing asm code currently uses more
2719 than one pair. Previously, that would usually work,
2720 but sometimes it would crash the compiler. We
2721 continue supporting that case as well as we can by
2722 silently ignoring all but the first pair. In the
2723 future we may handle it correctly. */
2724 if (commutative < 0)
2725 commutative = i;
2726 else
2727 gcc_assert (this_insn_is_asm);
2729 break;
2730 /* Use of ISDIGIT is tempting here, but it may get expensive because
2731 of locale support we don't want. */
2732 case '0': case '1': case '2': case '3': case '4':
2733 case '5': case '6': case '7': case '8': case '9':
2735 c = strtoul (p - 1, &end, 10);
2736 p = end;
2738 operands_match[c][i]
2739 = operands_match_p (recog_data.operand[c],
2740 recog_data.operand[i]);
2742 /* An operand may not match itself. */
2743 gcc_assert (c != i);
2745 /* If C can be commuted with C+1, and C might need to match I,
2746 then C+1 might also need to match I. */
2747 if (commutative >= 0)
2749 if (c == commutative || c == commutative + 1)
2751 int other = c + (c == commutative ? 1 : -1);
2752 operands_match[other][i]
2753 = operands_match_p (recog_data.operand[other],
2754 recog_data.operand[i]);
2756 if (i == commutative || i == commutative + 1)
2758 int other = i + (i == commutative ? 1 : -1);
2759 operands_match[c][other]
2760 = operands_match_p (recog_data.operand[c],
2761 recog_data.operand[other]);
2763 /* Note that C is supposed to be less than I.
2764 No need to consider altering both C and I because in
2765 that case we would alter one into the other. */
2772 /* Examine each operand that is a memory reference or memory address
2773 and reload parts of the addresses into index registers.
2774 Also here any references to pseudo regs that didn't get hard regs
2775 but are equivalent to constants get replaced in the insn itself
2776 with those constants. Nobody will ever see them again.
2778 Finally, set up the preferred classes of each operand. */
2780 for (i = 0; i < noperands; i++)
2782 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2784 address_reloaded[i] = 0;
2785 address_operand_reloaded[i] = 0;
2786 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2787 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2788 : RELOAD_OTHER);
2789 address_type[i]
2790 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2791 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2792 : RELOAD_OTHER);
2794 if (*constraints[i] == 0)
2795 /* Ignore things like match_operator operands. */
2797 else if (constraints[i][0] == 'p'
2798 || EXTRA_ADDRESS_CONSTRAINT (constraints[i][0], constraints[i]))
2800 address_operand_reloaded[i]
2801 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2802 recog_data.operand[i],
2803 recog_data.operand_loc[i],
2804 i, operand_type[i], ind_levels, insn);
2806 /* If we now have a simple operand where we used to have a
2807 PLUS or MULT, re-recognize and try again. */
2808 if ((OBJECT_P (*recog_data.operand_loc[i])
2809 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2810 && (GET_CODE (recog_data.operand[i]) == MULT
2811 || GET_CODE (recog_data.operand[i]) == PLUS))
2813 INSN_CODE (insn) = -1;
2814 retval = find_reloads (insn, replace, ind_levels, live_known,
2815 reload_reg_p);
2816 return retval;
2819 recog_data.operand[i] = *recog_data.operand_loc[i];
2820 substed_operand[i] = recog_data.operand[i];
2822 /* Address operands are reloaded in their existing mode,
2823 no matter what is specified in the machine description. */
2824 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2826 /* If the address is a single CONST_INT pick address mode
2827 instead otherwise we will later not know in which mode
2828 the reload should be performed. */
2829 if (operand_mode[i] == VOIDmode)
2830 operand_mode[i] = Pmode;
2833 else if (code == MEM)
2835 address_reloaded[i]
2836 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2837 recog_data.operand_loc[i],
2838 XEXP (recog_data.operand[i], 0),
2839 &XEXP (recog_data.operand[i], 0),
2840 i, address_type[i], ind_levels, insn);
2841 recog_data.operand[i] = *recog_data.operand_loc[i];
2842 substed_operand[i] = recog_data.operand[i];
2844 else if (code == SUBREG)
2846 rtx reg = SUBREG_REG (recog_data.operand[i]);
2847 rtx op
2848 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2849 ind_levels,
2850 set != 0
2851 && &SET_DEST (set) == recog_data.operand_loc[i],
2852 insn,
2853 &address_reloaded[i]);
2855 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2856 that didn't get a hard register, emit a USE with a REG_EQUAL
2857 note in front so that we might inherit a previous, possibly
2858 wider reload. */
2860 if (replace
2861 && MEM_P (op)
2862 && REG_P (reg)
2863 && (GET_MODE_SIZE (GET_MODE (reg))
2864 >= GET_MODE_SIZE (GET_MODE (op)))
2865 && reg_equiv_constant (REGNO (reg)) == 0)
2866 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2867 insn),
2868 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2870 substed_operand[i] = recog_data.operand[i] = op;
2872 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2873 /* We can get a PLUS as an "operand" as a result of register
2874 elimination. See eliminate_regs and gen_reload. We handle
2875 a unary operator by reloading the operand. */
2876 substed_operand[i] = recog_data.operand[i]
2877 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2878 ind_levels, 0, insn,
2879 &address_reloaded[i]);
2880 else if (code == REG)
2882 /* This is equivalent to calling find_reloads_toplev.
2883 The code is duplicated for speed.
2884 When we find a pseudo always equivalent to a constant,
2885 we replace it by the constant. We must be sure, however,
2886 that we don't try to replace it in the insn in which it
2887 is being set. */
2888 int regno = REGNO (recog_data.operand[i]);
2889 if (reg_equiv_constant (regno) != 0
2890 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2892 /* Record the existing mode so that the check if constants are
2893 allowed will work when operand_mode isn't specified. */
2895 if (operand_mode[i] == VOIDmode)
2896 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2898 substed_operand[i] = recog_data.operand[i]
2899 = reg_equiv_constant (regno);
2901 if (reg_equiv_memory_loc (regno) != 0
2902 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2903 /* We need not give a valid is_set_dest argument since the case
2904 of a constant equivalence was checked above. */
2905 substed_operand[i] = recog_data.operand[i]
2906 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2907 ind_levels, 0, insn,
2908 &address_reloaded[i]);
2910 /* If the operand is still a register (we didn't replace it with an
2911 equivalent), get the preferred class to reload it into. */
2912 code = GET_CODE (recog_data.operand[i]);
2913 preferred_class[i]
2914 = ((code == REG && REGNO (recog_data.operand[i])
2915 >= FIRST_PSEUDO_REGISTER)
2916 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2917 : NO_REGS);
2918 pref_or_nothing[i]
2919 = (code == REG
2920 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2921 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2924 /* If this is simply a copy from operand 1 to operand 0, merge the
2925 preferred classes for the operands. */
2926 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2927 && recog_data.operand[1] == SET_SRC (set))
2929 preferred_class[0] = preferred_class[1]
2930 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2931 pref_or_nothing[0] |= pref_or_nothing[1];
2932 pref_or_nothing[1] |= pref_or_nothing[0];
2935 /* Now see what we need for pseudo-regs that didn't get hard regs
2936 or got the wrong kind of hard reg. For this, we must consider
2937 all the operands together against the register constraints. */
2939 best = MAX_RECOG_OPERANDS * 2 + 600;
2941 swapped = 0;
2942 goal_alternative_swapped = 0;
2943 try_swapped:
2945 /* The constraints are made of several alternatives.
2946 Each operand's constraint looks like foo,bar,... with commas
2947 separating the alternatives. The first alternatives for all
2948 operands go together, the second alternatives go together, etc.
2950 First loop over alternatives. */
2952 for (this_alternative_number = 0;
2953 this_alternative_number < n_alternatives;
2954 this_alternative_number++)
2956 /* Loop over operands for one constraint alternative. */
2957 /* LOSERS counts those that don't fit this alternative
2958 and would require loading. */
2959 int losers = 0;
2960 /* BAD is set to 1 if it some operand can't fit this alternative
2961 even after reloading. */
2962 int bad = 0;
2963 /* REJECT is a count of how undesirable this alternative says it is
2964 if any reloading is required. If the alternative matches exactly
2965 then REJECT is ignored, but otherwise it gets this much
2966 counted against it in addition to the reloading needed. Each
2967 ? counts three times here since we want the disparaging caused by
2968 a bad register class to only count 1/3 as much. */
2969 int reject = 0;
2971 if (!recog_data.alternative_enabled_p[this_alternative_number])
2973 int i;
2975 for (i = 0; i < recog_data.n_operands; i++)
2976 constraints[i] = skip_alternative (constraints[i]);
2978 continue;
2981 this_earlyclobber = 0;
2983 for (i = 0; i < noperands; i++)
2985 const char *p = constraints[i];
2986 char *end;
2987 int len;
2988 int win = 0;
2989 int did_match = 0;
2990 /* 0 => this operand can be reloaded somehow for this alternative. */
2991 int badop = 1;
2992 /* 0 => this operand can be reloaded if the alternative allows regs. */
2993 int winreg = 0;
2994 int c;
2995 int m;
2996 rtx operand = recog_data.operand[i];
2997 int offset = 0;
2998 /* Nonzero means this is a MEM that must be reloaded into a reg
2999 regardless of what the constraint says. */
3000 int force_reload = 0;
3001 int offmemok = 0;
3002 /* Nonzero if a constant forced into memory would be OK for this
3003 operand. */
3004 int constmemok = 0;
3005 int earlyclobber = 0;
3007 /* If the predicate accepts a unary operator, it means that
3008 we need to reload the operand, but do not do this for
3009 match_operator and friends. */
3010 if (UNARY_P (operand) && *p != 0)
3011 operand = XEXP (operand, 0);
3013 /* If the operand is a SUBREG, extract
3014 the REG or MEM (or maybe even a constant) within.
3015 (Constants can occur as a result of reg_equiv_constant.) */
3017 while (GET_CODE (operand) == SUBREG)
3019 /* Offset only matters when operand is a REG and
3020 it is a hard reg. This is because it is passed
3021 to reg_fits_class_p if it is a REG and all pseudos
3022 return 0 from that function. */
3023 if (REG_P (SUBREG_REG (operand))
3024 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3026 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3027 GET_MODE (SUBREG_REG (operand)),
3028 SUBREG_BYTE (operand),
3029 GET_MODE (operand)) < 0)
3030 force_reload = 1;
3031 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3032 GET_MODE (SUBREG_REG (operand)),
3033 SUBREG_BYTE (operand),
3034 GET_MODE (operand));
3036 operand = SUBREG_REG (operand);
3037 /* Force reload if this is a constant or PLUS or if there may
3038 be a problem accessing OPERAND in the outer mode. */
3039 if (CONSTANT_P (operand)
3040 || GET_CODE (operand) == PLUS
3041 /* We must force a reload of paradoxical SUBREGs
3042 of a MEM because the alignment of the inner value
3043 may not be enough to do the outer reference. On
3044 big-endian machines, it may also reference outside
3045 the object.
3047 On machines that extend byte operations and we have a
3048 SUBREG where both the inner and outer modes are no wider
3049 than a word and the inner mode is narrower, is integral,
3050 and gets extended when loaded from memory, combine.c has
3051 made assumptions about the behavior of the machine in such
3052 register access. If the data is, in fact, in memory we
3053 must always load using the size assumed to be in the
3054 register and let the insn do the different-sized
3055 accesses.
3057 This is doubly true if WORD_REGISTER_OPERATIONS. In
3058 this case eliminate_regs has left non-paradoxical
3059 subregs for push_reload to see. Make sure it does
3060 by forcing the reload.
3062 ??? When is it right at this stage to have a subreg
3063 of a mem that is _not_ to be handled specially? IMO
3064 those should have been reduced to just a mem. */
3065 || ((MEM_P (operand)
3066 || (REG_P (operand)
3067 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3068 #ifndef WORD_REGISTER_OPERATIONS
3069 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3070 < BIGGEST_ALIGNMENT)
3071 && (GET_MODE_SIZE (operand_mode[i])
3072 > GET_MODE_SIZE (GET_MODE (operand))))
3073 || BYTES_BIG_ENDIAN
3074 #ifdef LOAD_EXTEND_OP
3075 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3076 && (GET_MODE_SIZE (GET_MODE (operand))
3077 <= UNITS_PER_WORD)
3078 && (GET_MODE_SIZE (operand_mode[i])
3079 > GET_MODE_SIZE (GET_MODE (operand)))
3080 && INTEGRAL_MODE_P (GET_MODE (operand))
3081 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3082 #endif
3084 #endif
3087 force_reload = 1;
3090 this_alternative[i] = NO_REGS;
3091 this_alternative_win[i] = 0;
3092 this_alternative_match_win[i] = 0;
3093 this_alternative_offmemok[i] = 0;
3094 this_alternative_earlyclobber[i] = 0;
3095 this_alternative_matches[i] = -1;
3097 /* An empty constraint or empty alternative
3098 allows anything which matched the pattern. */
3099 if (*p == 0 || *p == ',')
3100 win = 1, badop = 0;
3102 /* Scan this alternative's specs for this operand;
3103 set WIN if the operand fits any letter in this alternative.
3104 Otherwise, clear BADOP if this operand could
3105 fit some letter after reloads,
3106 or set WINREG if this operand could fit after reloads
3107 provided the constraint allows some registers. */
3110 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3112 case '\0':
3113 len = 0;
3114 break;
3115 case ',':
3116 c = '\0';
3117 break;
3119 case '=': case '+': case '*':
3120 break;
3122 case '%':
3123 /* We only support one commutative marker, the first
3124 one. We already set commutative above. */
3125 break;
3127 case '?':
3128 reject += 6;
3129 break;
3131 case '!':
3132 reject = 600;
3133 break;
3135 case '#':
3136 /* Ignore rest of this alternative as far as
3137 reloading is concerned. */
3139 p++;
3140 while (*p && *p != ',');
3141 len = 0;
3142 break;
3144 case '0': case '1': case '2': case '3': case '4':
3145 case '5': case '6': case '7': case '8': case '9':
3146 m = strtoul (p, &end, 10);
3147 p = end;
3148 len = 0;
3150 this_alternative_matches[i] = m;
3151 /* We are supposed to match a previous operand.
3152 If we do, we win if that one did.
3153 If we do not, count both of the operands as losers.
3154 (This is too conservative, since most of the time
3155 only a single reload insn will be needed to make
3156 the two operands win. As a result, this alternative
3157 may be rejected when it is actually desirable.) */
3158 if ((swapped && (m != commutative || i != commutative + 1))
3159 /* If we are matching as if two operands were swapped,
3160 also pretend that operands_match had been computed
3161 with swapped.
3162 But if I is the second of those and C is the first,
3163 don't exchange them, because operands_match is valid
3164 only on one side of its diagonal. */
3165 ? (operands_match
3166 [(m == commutative || m == commutative + 1)
3167 ? 2 * commutative + 1 - m : m]
3168 [(i == commutative || i == commutative + 1)
3169 ? 2 * commutative + 1 - i : i])
3170 : operands_match[m][i])
3172 /* If we are matching a non-offsettable address where an
3173 offsettable address was expected, then we must reject
3174 this combination, because we can't reload it. */
3175 if (this_alternative_offmemok[m]
3176 && MEM_P (recog_data.operand[m])
3177 && this_alternative[m] == NO_REGS
3178 && ! this_alternative_win[m])
3179 bad = 1;
3181 did_match = this_alternative_win[m];
3183 else
3185 /* Operands don't match. */
3186 rtx value;
3187 int loc1, loc2;
3188 /* Retroactively mark the operand we had to match
3189 as a loser, if it wasn't already. */
3190 if (this_alternative_win[m])
3191 losers++;
3192 this_alternative_win[m] = 0;
3193 if (this_alternative[m] == NO_REGS)
3194 bad = 1;
3195 /* But count the pair only once in the total badness of
3196 this alternative, if the pair can be a dummy reload.
3197 The pointers in operand_loc are not swapped; swap
3198 them by hand if necessary. */
3199 if (swapped && i == commutative)
3200 loc1 = commutative + 1;
3201 else if (swapped && i == commutative + 1)
3202 loc1 = commutative;
3203 else
3204 loc1 = i;
3205 if (swapped && m == commutative)
3206 loc2 = commutative + 1;
3207 else if (swapped && m == commutative + 1)
3208 loc2 = commutative;
3209 else
3210 loc2 = m;
3211 value
3212 = find_dummy_reload (recog_data.operand[i],
3213 recog_data.operand[m],
3214 recog_data.operand_loc[loc1],
3215 recog_data.operand_loc[loc2],
3216 operand_mode[i], operand_mode[m],
3217 this_alternative[m], -1,
3218 this_alternative_earlyclobber[m]);
3220 if (value != 0)
3221 losers--;
3223 /* This can be fixed with reloads if the operand
3224 we are supposed to match can be fixed with reloads. */
3225 badop = 0;
3226 this_alternative[i] = this_alternative[m];
3228 /* If we have to reload this operand and some previous
3229 operand also had to match the same thing as this
3230 operand, we don't know how to do that. So reject this
3231 alternative. */
3232 if (! did_match || force_reload)
3233 for (j = 0; j < i; j++)
3234 if (this_alternative_matches[j]
3235 == this_alternative_matches[i])
3236 badop = 1;
3237 break;
3239 case 'p':
3240 /* All necessary reloads for an address_operand
3241 were handled in find_reloads_address. */
3242 this_alternative[i]
3243 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3244 ADDRESS, SCRATCH);
3245 win = 1;
3246 badop = 0;
3247 break;
3249 case TARGET_MEM_CONSTRAINT:
3250 if (force_reload)
3251 break;
3252 if (MEM_P (operand)
3253 || (REG_P (operand)
3254 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3255 && reg_renumber[REGNO (operand)] < 0))
3256 win = 1;
3257 if (CONST_POOL_OK_P (operand_mode[i], operand))
3258 badop = 0;
3259 constmemok = 1;
3260 break;
3262 case '<':
3263 if (MEM_P (operand)
3264 && ! address_reloaded[i]
3265 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3266 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3267 win = 1;
3268 break;
3270 case '>':
3271 if (MEM_P (operand)
3272 && ! address_reloaded[i]
3273 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3274 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3275 win = 1;
3276 break;
3278 /* Memory operand whose address is not offsettable. */
3279 case 'V':
3280 if (force_reload)
3281 break;
3282 if (MEM_P (operand)
3283 && ! (ind_levels ? offsettable_memref_p (operand)
3284 : offsettable_nonstrict_memref_p (operand))
3285 /* Certain mem addresses will become offsettable
3286 after they themselves are reloaded. This is important;
3287 we don't want our own handling of unoffsettables
3288 to override the handling of reg_equiv_address. */
3289 && !(REG_P (XEXP (operand, 0))
3290 && (ind_levels == 0
3291 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3292 win = 1;
3293 break;
3295 /* Memory operand whose address is offsettable. */
3296 case 'o':
3297 if (force_reload)
3298 break;
3299 if ((MEM_P (operand)
3300 /* If IND_LEVELS, find_reloads_address won't reload a
3301 pseudo that didn't get a hard reg, so we have to
3302 reject that case. */
3303 && ((ind_levels ? offsettable_memref_p (operand)
3304 : offsettable_nonstrict_memref_p (operand))
3305 /* A reloaded address is offsettable because it is now
3306 just a simple register indirect. */
3307 || address_reloaded[i] == 1))
3308 || (REG_P (operand)
3309 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3310 && reg_renumber[REGNO (operand)] < 0
3311 /* If reg_equiv_address is nonzero, we will be
3312 loading it into a register; hence it will be
3313 offsettable, but we cannot say that reg_equiv_mem
3314 is offsettable without checking. */
3315 && ((reg_equiv_mem (REGNO (operand)) != 0
3316 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3317 || (reg_equiv_address (REGNO (operand)) != 0))))
3318 win = 1;
3319 if (CONST_POOL_OK_P (operand_mode[i], operand)
3320 || MEM_P (operand))
3321 badop = 0;
3322 constmemok = 1;
3323 offmemok = 1;
3324 break;
3326 case '&':
3327 /* Output operand that is stored before the need for the
3328 input operands (and their index registers) is over. */
3329 earlyclobber = 1, this_earlyclobber = 1;
3330 break;
3332 case 'E':
3333 case 'F':
3334 if (GET_CODE (operand) == CONST_DOUBLE
3335 || (GET_CODE (operand) == CONST_VECTOR
3336 && (GET_MODE_CLASS (GET_MODE (operand))
3337 == MODE_VECTOR_FLOAT)))
3338 win = 1;
3339 break;
3341 case 'G':
3342 case 'H':
3343 if (GET_CODE (operand) == CONST_DOUBLE
3344 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (operand, c, p))
3345 win = 1;
3346 break;
3348 case 's':
3349 if (CONST_INT_P (operand)
3350 || (GET_CODE (operand) == CONST_DOUBLE
3351 && GET_MODE (operand) == VOIDmode))
3352 break;
3353 case 'i':
3354 if (CONSTANT_P (operand)
3355 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (operand)))
3356 win = 1;
3357 break;
3359 case 'n':
3360 if (CONST_INT_P (operand)
3361 || (GET_CODE (operand) == CONST_DOUBLE
3362 && GET_MODE (operand) == VOIDmode))
3363 win = 1;
3364 break;
3366 case 'I':
3367 case 'J':
3368 case 'K':
3369 case 'L':
3370 case 'M':
3371 case 'N':
3372 case 'O':
3373 case 'P':
3374 if (CONST_INT_P (operand)
3375 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (operand), c, p))
3376 win = 1;
3377 break;
3379 case 'X':
3380 force_reload = 0;
3381 win = 1;
3382 break;
3384 case 'g':
3385 if (! force_reload
3386 /* A PLUS is never a valid operand, but reload can make
3387 it from a register when eliminating registers. */
3388 && GET_CODE (operand) != PLUS
3389 /* A SCRATCH is not a valid operand. */
3390 && GET_CODE (operand) != SCRATCH
3391 && (! CONSTANT_P (operand)
3392 || ! flag_pic
3393 || LEGITIMATE_PIC_OPERAND_P (operand))
3394 && (GENERAL_REGS == ALL_REGS
3395 || !REG_P (operand)
3396 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3397 && reg_renumber[REGNO (operand)] < 0)))
3398 win = 1;
3399 /* Drop through into 'r' case. */
3401 case 'r':
3402 this_alternative[i]
3403 = reg_class_subunion[this_alternative[i]][(int) GENERAL_REGS];
3404 goto reg;
3406 default:
3407 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS)
3409 #ifdef EXTRA_CONSTRAINT_STR
3410 if (EXTRA_MEMORY_CONSTRAINT (c, p))
3412 if (force_reload)
3413 break;
3414 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3415 win = 1;
3416 /* If the address was already reloaded,
3417 we win as well. */
3418 else if (MEM_P (operand)
3419 && address_reloaded[i] == 1)
3420 win = 1;
3421 /* Likewise if the address will be reloaded because
3422 reg_equiv_address is nonzero. For reg_equiv_mem
3423 we have to check. */
3424 else if (REG_P (operand)
3425 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3426 && reg_renumber[REGNO (operand)] < 0
3427 && ((reg_equiv_mem (REGNO (operand)) != 0
3428 && EXTRA_CONSTRAINT_STR (reg_equiv_mem (REGNO (operand)), c, p))
3429 || (reg_equiv_address (REGNO (operand)) != 0)))
3430 win = 1;
3432 /* If we didn't already win, we can reload
3433 constants via force_const_mem, and other
3434 MEMs by reloading the address like for 'o'. */
3435 if (CONST_POOL_OK_P (operand_mode[i], operand)
3436 || MEM_P (operand))
3437 badop = 0;
3438 constmemok = 1;
3439 offmemok = 1;
3440 break;
3442 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
3444 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3445 win = 1;
3447 /* If we didn't already win, we can reload
3448 the address into a base register. */
3449 this_alternative[i]
3450 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3451 ADDRESS, SCRATCH);
3452 badop = 0;
3453 break;
3456 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3457 win = 1;
3458 #endif
3459 break;
3462 this_alternative[i]
3463 = (reg_class_subunion
3464 [this_alternative[i]]
3465 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)]);
3466 reg:
3467 if (GET_MODE (operand) == BLKmode)
3468 break;
3469 winreg = 1;
3470 if (REG_P (operand)
3471 && reg_fits_class_p (operand, this_alternative[i],
3472 offset, GET_MODE (recog_data.operand[i])))
3473 win = 1;
3474 break;
3476 while ((p += len), c);
3478 constraints[i] = p;
3480 /* If this operand could be handled with a reg,
3481 and some reg is allowed, then this operand can be handled. */
3482 if (winreg && this_alternative[i] != NO_REGS
3483 && (win || !class_only_fixed_regs[this_alternative[i]]))
3484 badop = 0;
3486 /* Record which operands fit this alternative. */
3487 this_alternative_earlyclobber[i] = earlyclobber;
3488 if (win && ! force_reload)
3489 this_alternative_win[i] = 1;
3490 else if (did_match && ! force_reload)
3491 this_alternative_match_win[i] = 1;
3492 else
3494 int const_to_mem = 0;
3496 this_alternative_offmemok[i] = offmemok;
3497 losers++;
3498 if (badop)
3499 bad = 1;
3500 /* Alternative loses if it has no regs for a reg operand. */
3501 if (REG_P (operand)
3502 && this_alternative[i] == NO_REGS
3503 && this_alternative_matches[i] < 0)
3504 bad = 1;
3506 /* If this is a constant that is reloaded into the desired
3507 class by copying it to memory first, count that as another
3508 reload. This is consistent with other code and is
3509 required to avoid choosing another alternative when
3510 the constant is moved into memory by this function on
3511 an early reload pass. Note that the test here is
3512 precisely the same as in the code below that calls
3513 force_const_mem. */
3514 if (CONST_POOL_OK_P (operand_mode[i], operand)
3515 && ((targetm.preferred_reload_class (operand,
3516 this_alternative[i])
3517 == NO_REGS)
3518 || no_input_reloads))
3520 const_to_mem = 1;
3521 if (this_alternative[i] != NO_REGS)
3522 losers++;
3525 /* Alternative loses if it requires a type of reload not
3526 permitted for this insn. We can always reload SCRATCH
3527 and objects with a REG_UNUSED note. */
3528 if (GET_CODE (operand) != SCRATCH
3529 && modified[i] != RELOAD_READ && no_output_reloads
3530 && ! find_reg_note (insn, REG_UNUSED, operand))
3531 bad = 1;
3532 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3533 && ! const_to_mem)
3534 bad = 1;
3536 /* If we can't reload this value at all, reject this
3537 alternative. Note that we could also lose due to
3538 LIMIT_RELOAD_CLASS, but we don't check that
3539 here. */
3541 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3543 if (targetm.preferred_reload_class (operand, this_alternative[i])
3544 == NO_REGS)
3545 reject = 600;
3547 if (operand_type[i] == RELOAD_FOR_OUTPUT
3548 && (targetm.preferred_output_reload_class (operand,
3549 this_alternative[i])
3550 == NO_REGS))
3551 reject = 600;
3554 /* We prefer to reload pseudos over reloading other things,
3555 since such reloads may be able to be eliminated later.
3556 If we are reloading a SCRATCH, we won't be generating any
3557 insns, just using a register, so it is also preferred.
3558 So bump REJECT in other cases. Don't do this in the
3559 case where we are forcing a constant into memory and
3560 it will then win since we don't want to have a different
3561 alternative match then. */
3562 if (! (REG_P (operand)
3563 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3564 && GET_CODE (operand) != SCRATCH
3565 && ! (const_to_mem && constmemok))
3566 reject += 2;
3568 /* Input reloads can be inherited more often than output
3569 reloads can be removed, so penalize output reloads. */
3570 if (operand_type[i] != RELOAD_FOR_INPUT
3571 && GET_CODE (operand) != SCRATCH)
3572 reject++;
3575 /* If this operand is a pseudo register that didn't get a hard
3576 reg and this alternative accepts some register, see if the
3577 class that we want is a subset of the preferred class for this
3578 register. If not, but it intersects that class, use the
3579 preferred class instead. If it does not intersect the preferred
3580 class, show that usage of this alternative should be discouraged;
3581 it will be discouraged more still if the register is `preferred
3582 or nothing'. We do this because it increases the chance of
3583 reusing our spill register in a later insn and avoiding a pair
3584 of memory stores and loads.
3586 Don't bother with this if this alternative will accept this
3587 operand.
3589 Don't do this for a multiword operand, since it is only a
3590 small win and has the risk of requiring more spill registers,
3591 which could cause a large loss.
3593 Don't do this if the preferred class has only one register
3594 because we might otherwise exhaust the class. */
3596 if (! win && ! did_match
3597 && this_alternative[i] != NO_REGS
3598 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3599 && reg_class_size [(int) preferred_class[i]] > 0
3600 && ! small_register_class_p (preferred_class[i]))
3602 if (! reg_class_subset_p (this_alternative[i],
3603 preferred_class[i]))
3605 /* Since we don't have a way of forming the intersection,
3606 we just do something special if the preferred class
3607 is a subset of the class we have; that's the most
3608 common case anyway. */
3609 if (reg_class_subset_p (preferred_class[i],
3610 this_alternative[i]))
3611 this_alternative[i] = preferred_class[i];
3612 else
3613 reject += (2 + 2 * pref_or_nothing[i]);
3618 /* Now see if any output operands that are marked "earlyclobber"
3619 in this alternative conflict with any input operands
3620 or any memory addresses. */
3622 for (i = 0; i < noperands; i++)
3623 if (this_alternative_earlyclobber[i]
3624 && (this_alternative_win[i] || this_alternative_match_win[i]))
3626 struct decomposition early_data;
3628 early_data = decompose (recog_data.operand[i]);
3630 gcc_assert (modified[i] != RELOAD_READ);
3632 if (this_alternative[i] == NO_REGS)
3634 this_alternative_earlyclobber[i] = 0;
3635 gcc_assert (this_insn_is_asm);
3636 error_for_asm (this_insn,
3637 "%<&%> constraint used with no register class");
3640 for (j = 0; j < noperands; j++)
3641 /* Is this an input operand or a memory ref? */
3642 if ((MEM_P (recog_data.operand[j])
3643 || modified[j] != RELOAD_WRITE)
3644 && j != i
3645 /* Ignore things like match_operator operands. */
3646 && !recog_data.is_operator[j]
3647 /* Don't count an input operand that is constrained to match
3648 the early clobber operand. */
3649 && ! (this_alternative_matches[j] == i
3650 && rtx_equal_p (recog_data.operand[i],
3651 recog_data.operand[j]))
3652 /* Is it altered by storing the earlyclobber operand? */
3653 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3654 early_data))
3656 /* If the output is in a non-empty few-regs class,
3657 it's costly to reload it, so reload the input instead. */
3658 if (small_register_class_p (this_alternative[i])
3659 && (REG_P (recog_data.operand[j])
3660 || GET_CODE (recog_data.operand[j]) == SUBREG))
3662 losers++;
3663 this_alternative_win[j] = 0;
3664 this_alternative_match_win[j] = 0;
3666 else
3667 break;
3669 /* If an earlyclobber operand conflicts with something,
3670 it must be reloaded, so request this and count the cost. */
3671 if (j != noperands)
3673 losers++;
3674 this_alternative_win[i] = 0;
3675 this_alternative_match_win[j] = 0;
3676 for (j = 0; j < noperands; j++)
3677 if (this_alternative_matches[j] == i
3678 && this_alternative_match_win[j])
3680 this_alternative_win[j] = 0;
3681 this_alternative_match_win[j] = 0;
3682 losers++;
3687 /* If one alternative accepts all the operands, no reload required,
3688 choose that alternative; don't consider the remaining ones. */
3689 if (losers == 0)
3691 /* Unswap these so that they are never swapped at `finish'. */
3692 if (commutative >= 0)
3694 recog_data.operand[commutative] = substed_operand[commutative];
3695 recog_data.operand[commutative + 1]
3696 = substed_operand[commutative + 1];
3698 for (i = 0; i < noperands; i++)
3700 goal_alternative_win[i] = this_alternative_win[i];
3701 goal_alternative_match_win[i] = this_alternative_match_win[i];
3702 goal_alternative[i] = this_alternative[i];
3703 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3704 goal_alternative_matches[i] = this_alternative_matches[i];
3705 goal_alternative_earlyclobber[i]
3706 = this_alternative_earlyclobber[i];
3708 goal_alternative_number = this_alternative_number;
3709 goal_alternative_swapped = swapped;
3710 goal_earlyclobber = this_earlyclobber;
3711 goto finish;
3714 /* REJECT, set by the ! and ? constraint characters and when a register
3715 would be reloaded into a non-preferred class, discourages the use of
3716 this alternative for a reload goal. REJECT is incremented by six
3717 for each ? and two for each non-preferred class. */
3718 losers = losers * 6 + reject;
3720 /* If this alternative can be made to work by reloading,
3721 and it needs less reloading than the others checked so far,
3722 record it as the chosen goal for reloading. */
3723 if (! bad)
3725 if (best > losers)
3727 for (i = 0; i < noperands; i++)
3729 goal_alternative[i] = this_alternative[i];
3730 goal_alternative_win[i] = this_alternative_win[i];
3731 goal_alternative_match_win[i]
3732 = this_alternative_match_win[i];
3733 goal_alternative_offmemok[i]
3734 = this_alternative_offmemok[i];
3735 goal_alternative_matches[i] = this_alternative_matches[i];
3736 goal_alternative_earlyclobber[i]
3737 = this_alternative_earlyclobber[i];
3739 goal_alternative_swapped = swapped;
3740 best = losers;
3741 goal_alternative_number = this_alternative_number;
3742 goal_earlyclobber = this_earlyclobber;
3747 /* If insn is commutative (it's safe to exchange a certain pair of operands)
3748 then we need to try each alternative twice,
3749 the second time matching those two operands
3750 as if we had exchanged them.
3751 To do this, really exchange them in operands.
3753 If we have just tried the alternatives the second time,
3754 return operands to normal and drop through. */
3756 if (commutative >= 0)
3758 swapped = !swapped;
3759 if (swapped)
3761 enum reg_class tclass;
3762 int t;
3764 recog_data.operand[commutative] = substed_operand[commutative + 1];
3765 recog_data.operand[commutative + 1] = substed_operand[commutative];
3766 /* Swap the duplicates too. */
3767 for (i = 0; i < recog_data.n_dups; i++)
3768 if (recog_data.dup_num[i] == commutative
3769 || recog_data.dup_num[i] == commutative + 1)
3770 *recog_data.dup_loc[i]
3771 = recog_data.operand[(int) recog_data.dup_num[i]];
3773 tclass = preferred_class[commutative];
3774 preferred_class[commutative] = preferred_class[commutative + 1];
3775 preferred_class[commutative + 1] = tclass;
3777 t = pref_or_nothing[commutative];
3778 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3779 pref_or_nothing[commutative + 1] = t;
3781 t = address_reloaded[commutative];
3782 address_reloaded[commutative] = address_reloaded[commutative + 1];
3783 address_reloaded[commutative + 1] = t;
3785 memcpy (constraints, recog_data.constraints,
3786 noperands * sizeof (const char *));
3787 goto try_swapped;
3789 else
3791 recog_data.operand[commutative] = substed_operand[commutative];
3792 recog_data.operand[commutative + 1]
3793 = substed_operand[commutative + 1];
3794 /* Unswap the duplicates too. */
3795 for (i = 0; i < recog_data.n_dups; i++)
3796 if (recog_data.dup_num[i] == commutative
3797 || recog_data.dup_num[i] == commutative + 1)
3798 *recog_data.dup_loc[i]
3799 = recog_data.operand[(int) recog_data.dup_num[i]];
3803 /* The operands don't meet the constraints.
3804 goal_alternative describes the alternative
3805 that we could reach by reloading the fewest operands.
3806 Reload so as to fit it. */
3808 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3810 /* No alternative works with reloads?? */
3811 if (insn_code_number >= 0)
3812 fatal_insn ("unable to generate reloads for:", insn);
3813 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3814 /* Avoid further trouble with this insn. */
3815 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3816 n_reloads = 0;
3817 return 0;
3820 /* Jump to `finish' from above if all operands are valid already.
3821 In that case, goal_alternative_win is all 1. */
3822 finish:
3824 /* Right now, for any pair of operands I and J that are required to match,
3825 with I < J,
3826 goal_alternative_matches[J] is I.
3827 Set up goal_alternative_matched as the inverse function:
3828 goal_alternative_matched[I] = J. */
3830 for (i = 0; i < noperands; i++)
3831 goal_alternative_matched[i] = -1;
3833 for (i = 0; i < noperands; i++)
3834 if (! goal_alternative_win[i]
3835 && goal_alternative_matches[i] >= 0)
3836 goal_alternative_matched[goal_alternative_matches[i]] = i;
3838 for (i = 0; i < noperands; i++)
3839 goal_alternative_win[i] |= goal_alternative_match_win[i];
3841 /* If the best alternative is with operands 1 and 2 swapped,
3842 consider them swapped before reporting the reloads. Update the
3843 operand numbers of any reloads already pushed. */
3845 if (goal_alternative_swapped)
3847 rtx tem;
3849 tem = substed_operand[commutative];
3850 substed_operand[commutative] = substed_operand[commutative + 1];
3851 substed_operand[commutative + 1] = tem;
3852 tem = recog_data.operand[commutative];
3853 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3854 recog_data.operand[commutative + 1] = tem;
3855 tem = *recog_data.operand_loc[commutative];
3856 *recog_data.operand_loc[commutative]
3857 = *recog_data.operand_loc[commutative + 1];
3858 *recog_data.operand_loc[commutative + 1] = tem;
3860 for (i = 0; i < n_reloads; i++)
3862 if (rld[i].opnum == commutative)
3863 rld[i].opnum = commutative + 1;
3864 else if (rld[i].opnum == commutative + 1)
3865 rld[i].opnum = commutative;
3869 for (i = 0; i < noperands; i++)
3871 operand_reloadnum[i] = -1;
3873 /* If this is an earlyclobber operand, we need to widen the scope.
3874 The reload must remain valid from the start of the insn being
3875 reloaded until after the operand is stored into its destination.
3876 We approximate this with RELOAD_OTHER even though we know that we
3877 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3879 One special case that is worth checking is when we have an
3880 output that is earlyclobber but isn't used past the insn (typically
3881 a SCRATCH). In this case, we only need have the reload live
3882 through the insn itself, but not for any of our input or output
3883 reloads.
3884 But we must not accidentally narrow the scope of an existing
3885 RELOAD_OTHER reload - leave these alone.
3887 In any case, anything needed to address this operand can remain
3888 however they were previously categorized. */
3890 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3891 operand_type[i]
3892 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3893 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3896 /* Any constants that aren't allowed and can't be reloaded
3897 into registers are here changed into memory references. */
3898 for (i = 0; i < noperands; i++)
3899 if (! goal_alternative_win[i])
3901 rtx op = recog_data.operand[i];
3902 rtx subreg = NULL_RTX;
3903 rtx plus = NULL_RTX;
3904 enum machine_mode mode = operand_mode[i];
3906 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3907 push_reload so we have to let them pass here. */
3908 if (GET_CODE (op) == SUBREG)
3910 subreg = op;
3911 op = SUBREG_REG (op);
3912 mode = GET_MODE (op);
3915 if (GET_CODE (op) == PLUS)
3917 plus = op;
3918 op = XEXP (op, 1);
3921 if (CONST_POOL_OK_P (mode, op)
3922 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3923 == NO_REGS)
3924 || no_input_reloads))
3926 int this_address_reloaded;
3927 rtx tem = force_const_mem (mode, op);
3929 /* If we stripped a SUBREG or a PLUS above add it back. */
3930 if (plus != NULL_RTX)
3931 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3933 if (subreg != NULL_RTX)
3934 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3936 this_address_reloaded = 0;
3937 substed_operand[i] = recog_data.operand[i]
3938 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3939 0, insn, &this_address_reloaded);
3941 /* If the alternative accepts constant pool refs directly
3942 there will be no reload needed at all. */
3943 if (plus == NULL_RTX
3944 && subreg == NULL_RTX
3945 && alternative_allows_const_pool_ref (this_address_reloaded == 0
3946 ? substed_operand[i]
3947 : NULL,
3948 recog_data.constraints[i],
3949 goal_alternative_number))
3950 goal_alternative_win[i] = 1;
3954 /* Record the values of the earlyclobber operands for the caller. */
3955 if (goal_earlyclobber)
3956 for (i = 0; i < noperands; i++)
3957 if (goal_alternative_earlyclobber[i])
3958 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3960 /* Now record reloads for all the operands that need them. */
3961 for (i = 0; i < noperands; i++)
3962 if (! goal_alternative_win[i])
3964 /* Operands that match previous ones have already been handled. */
3965 if (goal_alternative_matches[i] >= 0)
3967 /* Handle an operand with a nonoffsettable address
3968 appearing where an offsettable address will do
3969 by reloading the address into a base register.
3971 ??? We can also do this when the operand is a register and
3972 reg_equiv_mem is not offsettable, but this is a bit tricky,
3973 so we don't bother with it. It may not be worth doing. */
3974 else if (goal_alternative_matched[i] == -1
3975 && goal_alternative_offmemok[i]
3976 && MEM_P (recog_data.operand[i]))
3978 /* If the address to be reloaded is a VOIDmode constant,
3979 use the default address mode as mode of the reload register,
3980 as would have been done by find_reloads_address. */
3981 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
3982 enum machine_mode address_mode;
3983 address_mode = GET_MODE (XEXP (recog_data.operand[i], 0));
3984 if (address_mode == VOIDmode)
3985 address_mode = targetm.addr_space.address_mode (as);
3987 operand_reloadnum[i]
3988 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
3989 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
3990 base_reg_class (VOIDmode, as, MEM, SCRATCH),
3991 address_mode,
3992 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
3993 rld[operand_reloadnum[i]].inc
3994 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
3996 /* If this operand is an output, we will have made any
3997 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
3998 now we are treating part of the operand as an input, so
3999 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
4001 if (modified[i] == RELOAD_WRITE)
4003 for (j = 0; j < n_reloads; j++)
4005 if (rld[j].opnum == i)
4007 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4008 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4009 else if (rld[j].when_needed
4010 == RELOAD_FOR_OUTADDR_ADDRESS)
4011 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4016 else if (goal_alternative_matched[i] == -1)
4018 operand_reloadnum[i]
4019 = push_reload ((modified[i] != RELOAD_WRITE
4020 ? recog_data.operand[i] : 0),
4021 (modified[i] != RELOAD_READ
4022 ? recog_data.operand[i] : 0),
4023 (modified[i] != RELOAD_WRITE
4024 ? recog_data.operand_loc[i] : 0),
4025 (modified[i] != RELOAD_READ
4026 ? recog_data.operand_loc[i] : 0),
4027 (enum reg_class) goal_alternative[i],
4028 (modified[i] == RELOAD_WRITE
4029 ? VOIDmode : operand_mode[i]),
4030 (modified[i] == RELOAD_READ
4031 ? VOIDmode : operand_mode[i]),
4032 (insn_code_number < 0 ? 0
4033 : insn_data[insn_code_number].operand[i].strict_low),
4034 0, i, operand_type[i]);
4036 /* In a matching pair of operands, one must be input only
4037 and the other must be output only.
4038 Pass the input operand as IN and the other as OUT. */
4039 else if (modified[i] == RELOAD_READ
4040 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4042 operand_reloadnum[i]
4043 = push_reload (recog_data.operand[i],
4044 recog_data.operand[goal_alternative_matched[i]],
4045 recog_data.operand_loc[i],
4046 recog_data.operand_loc[goal_alternative_matched[i]],
4047 (enum reg_class) goal_alternative[i],
4048 operand_mode[i],
4049 operand_mode[goal_alternative_matched[i]],
4050 0, 0, i, RELOAD_OTHER);
4051 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4053 else if (modified[i] == RELOAD_WRITE
4054 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4056 operand_reloadnum[goal_alternative_matched[i]]
4057 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4058 recog_data.operand[i],
4059 recog_data.operand_loc[goal_alternative_matched[i]],
4060 recog_data.operand_loc[i],
4061 (enum reg_class) goal_alternative[i],
4062 operand_mode[goal_alternative_matched[i]],
4063 operand_mode[i],
4064 0, 0, i, RELOAD_OTHER);
4065 operand_reloadnum[i] = output_reloadnum;
4067 else
4069 gcc_assert (insn_code_number < 0);
4070 error_for_asm (insn, "inconsistent operand constraints "
4071 "in an %<asm%>");
4072 /* Avoid further trouble with this insn. */
4073 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4074 n_reloads = 0;
4075 return 0;
4078 else if (goal_alternative_matched[i] < 0
4079 && goal_alternative_matches[i] < 0
4080 && address_operand_reloaded[i] != 1
4081 && optimize)
4083 /* For each non-matching operand that's a MEM or a pseudo-register
4084 that didn't get a hard register, make an optional reload.
4085 This may get done even if the insn needs no reloads otherwise. */
4087 rtx operand = recog_data.operand[i];
4089 while (GET_CODE (operand) == SUBREG)
4090 operand = SUBREG_REG (operand);
4091 if ((MEM_P (operand)
4092 || (REG_P (operand)
4093 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4094 /* If this is only for an output, the optional reload would not
4095 actually cause us to use a register now, just note that
4096 something is stored here. */
4097 && (goal_alternative[i] != NO_REGS
4098 || modified[i] == RELOAD_WRITE)
4099 && ! no_input_reloads
4100 /* An optional output reload might allow to delete INSN later.
4101 We mustn't make in-out reloads on insns that are not permitted
4102 output reloads.
4103 If this is an asm, we can't delete it; we must not even call
4104 push_reload for an optional output reload in this case,
4105 because we can't be sure that the constraint allows a register,
4106 and push_reload verifies the constraints for asms. */
4107 && (modified[i] == RELOAD_READ
4108 || (! no_output_reloads && ! this_insn_is_asm)))
4109 operand_reloadnum[i]
4110 = push_reload ((modified[i] != RELOAD_WRITE
4111 ? recog_data.operand[i] : 0),
4112 (modified[i] != RELOAD_READ
4113 ? recog_data.operand[i] : 0),
4114 (modified[i] != RELOAD_WRITE
4115 ? recog_data.operand_loc[i] : 0),
4116 (modified[i] != RELOAD_READ
4117 ? recog_data.operand_loc[i] : 0),
4118 (enum reg_class) goal_alternative[i],
4119 (modified[i] == RELOAD_WRITE
4120 ? VOIDmode : operand_mode[i]),
4121 (modified[i] == RELOAD_READ
4122 ? VOIDmode : operand_mode[i]),
4123 (insn_code_number < 0 ? 0
4124 : insn_data[insn_code_number].operand[i].strict_low),
4125 1, i, operand_type[i]);
4126 /* If a memory reference remains (either as a MEM or a pseudo that
4127 did not get a hard register), yet we can't make an optional
4128 reload, check if this is actually a pseudo register reference;
4129 we then need to emit a USE and/or a CLOBBER so that reload
4130 inheritance will do the right thing. */
4131 else if (replace
4132 && (MEM_P (operand)
4133 || (REG_P (operand)
4134 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4135 && reg_renumber [REGNO (operand)] < 0)))
4137 operand = *recog_data.operand_loc[i];
4139 while (GET_CODE (operand) == SUBREG)
4140 operand = SUBREG_REG (operand);
4141 if (REG_P (operand))
4143 if (modified[i] != RELOAD_WRITE)
4144 /* We mark the USE with QImode so that we recognize
4145 it as one that can be safely deleted at the end
4146 of reload. */
4147 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4148 insn), QImode);
4149 if (modified[i] != RELOAD_READ)
4150 emit_insn_after (gen_clobber (operand), insn);
4154 else if (goal_alternative_matches[i] >= 0
4155 && goal_alternative_win[goal_alternative_matches[i]]
4156 && modified[i] == RELOAD_READ
4157 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4158 && ! no_input_reloads && ! no_output_reloads
4159 && optimize)
4161 /* Similarly, make an optional reload for a pair of matching
4162 objects that are in MEM or a pseudo that didn't get a hard reg. */
4164 rtx operand = recog_data.operand[i];
4166 while (GET_CODE (operand) == SUBREG)
4167 operand = SUBREG_REG (operand);
4168 if ((MEM_P (operand)
4169 || (REG_P (operand)
4170 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4171 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4172 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4173 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4174 recog_data.operand[i],
4175 recog_data.operand_loc[goal_alternative_matches[i]],
4176 recog_data.operand_loc[i],
4177 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4178 operand_mode[goal_alternative_matches[i]],
4179 operand_mode[i],
4180 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4183 /* Perform whatever substitutions on the operands we are supposed
4184 to make due to commutativity or replacement of registers
4185 with equivalent constants or memory slots. */
4187 for (i = 0; i < noperands; i++)
4189 /* We only do this on the last pass through reload, because it is
4190 possible for some data (like reg_equiv_address) to be changed during
4191 later passes. Moreover, we lose the opportunity to get a useful
4192 reload_{in,out}_reg when we do these replacements. */
4194 if (replace)
4196 rtx substitution = substed_operand[i];
4198 *recog_data.operand_loc[i] = substitution;
4200 /* If we're replacing an operand with a LABEL_REF, we need to
4201 make sure that there's a REG_LABEL_OPERAND note attached to
4202 this instruction. */
4203 if (GET_CODE (substitution) == LABEL_REF
4204 && !find_reg_note (insn, REG_LABEL_OPERAND,
4205 XEXP (substitution, 0))
4206 /* For a JUMP_P, if it was a branch target it must have
4207 already been recorded as such. */
4208 && (!JUMP_P (insn)
4209 || !label_is_jump_target_p (XEXP (substitution, 0),
4210 insn)))
4212 add_reg_note (insn, REG_LABEL_OPERAND, XEXP (substitution, 0));
4213 if (LABEL_P (XEXP (substitution, 0)))
4214 ++LABEL_NUSES (XEXP (substitution, 0));
4218 else
4219 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4222 /* If this insn pattern contains any MATCH_DUP's, make sure that
4223 they will be substituted if the operands they match are substituted.
4224 Also do now any substitutions we already did on the operands.
4226 Don't do this if we aren't making replacements because we might be
4227 propagating things allocated by frame pointer elimination into places
4228 it doesn't expect. */
4230 if (insn_code_number >= 0 && replace)
4231 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4233 int opno = recog_data.dup_num[i];
4234 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4235 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4238 #if 0
4239 /* This loses because reloading of prior insns can invalidate the equivalence
4240 (or at least find_equiv_reg isn't smart enough to find it any more),
4241 causing this insn to need more reload regs than it needed before.
4242 It may be too late to make the reload regs available.
4243 Now this optimization is done safely in choose_reload_regs. */
4245 /* For each reload of a reg into some other class of reg,
4246 search for an existing equivalent reg (same value now) in the right class.
4247 We can use it as long as we don't need to change its contents. */
4248 for (i = 0; i < n_reloads; i++)
4249 if (rld[i].reg_rtx == 0
4250 && rld[i].in != 0
4251 && REG_P (rld[i].in)
4252 && rld[i].out == 0)
4254 rld[i].reg_rtx
4255 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4256 static_reload_reg_p, 0, rld[i].inmode);
4257 /* Prevent generation of insn to load the value
4258 because the one we found already has the value. */
4259 if (rld[i].reg_rtx)
4260 rld[i].in = rld[i].reg_rtx;
4262 #endif
4264 /* If we detected error and replaced asm instruction by USE, forget about the
4265 reloads. */
4266 if (GET_CODE (PATTERN (insn)) == USE
4267 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4268 n_reloads = 0;
4270 /* Perhaps an output reload can be combined with another
4271 to reduce needs by one. */
4272 if (!goal_earlyclobber)
4273 combine_reloads ();
4275 /* If we have a pair of reloads for parts of an address, they are reloading
4276 the same object, the operands themselves were not reloaded, and they
4277 are for two operands that are supposed to match, merge the reloads and
4278 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4280 for (i = 0; i < n_reloads; i++)
4282 int k;
4284 for (j = i + 1; j < n_reloads; j++)
4285 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4286 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4287 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4288 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4289 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4290 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4291 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4292 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4293 && rtx_equal_p (rld[i].in, rld[j].in)
4294 && (operand_reloadnum[rld[i].opnum] < 0
4295 || rld[operand_reloadnum[rld[i].opnum]].optional)
4296 && (operand_reloadnum[rld[j].opnum] < 0
4297 || rld[operand_reloadnum[rld[j].opnum]].optional)
4298 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4299 || (goal_alternative_matches[rld[j].opnum]
4300 == rld[i].opnum)))
4302 for (k = 0; k < n_replacements; k++)
4303 if (replacements[k].what == j)
4304 replacements[k].what = i;
4306 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4307 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4308 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4309 else
4310 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4311 rld[j].in = 0;
4315 /* Scan all the reloads and update their type.
4316 If a reload is for the address of an operand and we didn't reload
4317 that operand, change the type. Similarly, change the operand number
4318 of a reload when two operands match. If a reload is optional, treat it
4319 as though the operand isn't reloaded.
4321 ??? This latter case is somewhat odd because if we do the optional
4322 reload, it means the object is hanging around. Thus we need only
4323 do the address reload if the optional reload was NOT done.
4325 Change secondary reloads to be the address type of their operand, not
4326 the normal type.
4328 If an operand's reload is now RELOAD_OTHER, change any
4329 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4330 RELOAD_FOR_OTHER_ADDRESS. */
4332 for (i = 0; i < n_reloads; i++)
4334 if (rld[i].secondary_p
4335 && rld[i].when_needed == operand_type[rld[i].opnum])
4336 rld[i].when_needed = address_type[rld[i].opnum];
4338 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4339 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4340 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4341 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4342 && (operand_reloadnum[rld[i].opnum] < 0
4343 || rld[operand_reloadnum[rld[i].opnum]].optional))
4345 /* If we have a secondary reload to go along with this reload,
4346 change its type to RELOAD_FOR_OPADDR_ADDR. */
4348 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4349 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4350 && rld[i].secondary_in_reload != -1)
4352 int secondary_in_reload = rld[i].secondary_in_reload;
4354 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4356 /* If there's a tertiary reload we have to change it also. */
4357 if (secondary_in_reload > 0
4358 && rld[secondary_in_reload].secondary_in_reload != -1)
4359 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4360 = RELOAD_FOR_OPADDR_ADDR;
4363 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4364 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4365 && rld[i].secondary_out_reload != -1)
4367 int secondary_out_reload = rld[i].secondary_out_reload;
4369 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4371 /* If there's a tertiary reload we have to change it also. */
4372 if (secondary_out_reload
4373 && rld[secondary_out_reload].secondary_out_reload != -1)
4374 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4375 = RELOAD_FOR_OPADDR_ADDR;
4378 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4379 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4380 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4381 else
4382 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4385 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4386 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4387 && operand_reloadnum[rld[i].opnum] >= 0
4388 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4389 == RELOAD_OTHER))
4390 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4392 if (goal_alternative_matches[rld[i].opnum] >= 0)
4393 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4396 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4397 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4398 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4400 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4401 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4402 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4403 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4404 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4405 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4406 This is complicated by the fact that a single operand can have more
4407 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4408 choose_reload_regs without affecting code quality, and cases that
4409 actually fail are extremely rare, so it turns out to be better to fix
4410 the problem here by not generating cases that choose_reload_regs will
4411 fail for. */
4412 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4413 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4414 a single operand.
4415 We can reduce the register pressure by exploiting that a
4416 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4417 does not conflict with any of them, if it is only used for the first of
4418 the RELOAD_FOR_X_ADDRESS reloads. */
4420 int first_op_addr_num = -2;
4421 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4422 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4423 int need_change = 0;
4424 /* We use last_op_addr_reload and the contents of the above arrays
4425 first as flags - -2 means no instance encountered, -1 means exactly
4426 one instance encountered.
4427 If more than one instance has been encountered, we store the reload
4428 number of the first reload of the kind in question; reload numbers
4429 are known to be non-negative. */
4430 for (i = 0; i < noperands; i++)
4431 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4432 for (i = n_reloads - 1; i >= 0; i--)
4434 switch (rld[i].when_needed)
4436 case RELOAD_FOR_OPERAND_ADDRESS:
4437 if (++first_op_addr_num >= 0)
4439 first_op_addr_num = i;
4440 need_change = 1;
4442 break;
4443 case RELOAD_FOR_INPUT_ADDRESS:
4444 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4446 first_inpaddr_num[rld[i].opnum] = i;
4447 need_change = 1;
4449 break;
4450 case RELOAD_FOR_OUTPUT_ADDRESS:
4451 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4453 first_outpaddr_num[rld[i].opnum] = i;
4454 need_change = 1;
4456 break;
4457 default:
4458 break;
4462 if (need_change)
4464 for (i = 0; i < n_reloads; i++)
4466 int first_num;
4467 enum reload_type type;
4469 switch (rld[i].when_needed)
4471 case RELOAD_FOR_OPADDR_ADDR:
4472 first_num = first_op_addr_num;
4473 type = RELOAD_FOR_OPERAND_ADDRESS;
4474 break;
4475 case RELOAD_FOR_INPADDR_ADDRESS:
4476 first_num = first_inpaddr_num[rld[i].opnum];
4477 type = RELOAD_FOR_INPUT_ADDRESS;
4478 break;
4479 case RELOAD_FOR_OUTADDR_ADDRESS:
4480 first_num = first_outpaddr_num[rld[i].opnum];
4481 type = RELOAD_FOR_OUTPUT_ADDRESS;
4482 break;
4483 default:
4484 continue;
4486 if (first_num < 0)
4487 continue;
4488 else if (i > first_num)
4489 rld[i].when_needed = type;
4490 else
4492 /* Check if the only TYPE reload that uses reload I is
4493 reload FIRST_NUM. */
4494 for (j = n_reloads - 1; j > first_num; j--)
4496 if (rld[j].when_needed == type
4497 && (rld[i].secondary_p
4498 ? rld[j].secondary_in_reload == i
4499 : reg_mentioned_p (rld[i].in, rld[j].in)))
4501 rld[i].when_needed = type;
4502 break;
4510 /* See if we have any reloads that are now allowed to be merged
4511 because we've changed when the reload is needed to
4512 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4513 check for the most common cases. */
4515 for (i = 0; i < n_reloads; i++)
4516 if (rld[i].in != 0 && rld[i].out == 0
4517 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4518 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4519 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4520 for (j = 0; j < n_reloads; j++)
4521 if (i != j && rld[j].in != 0 && rld[j].out == 0
4522 && rld[j].when_needed == rld[i].when_needed
4523 && MATCHES (rld[i].in, rld[j].in)
4524 && rld[i].rclass == rld[j].rclass
4525 && !rld[i].nocombine && !rld[j].nocombine
4526 && rld[i].reg_rtx == rld[j].reg_rtx)
4528 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4529 transfer_replacements (i, j);
4530 rld[j].in = 0;
4533 #ifdef HAVE_cc0
4534 /* If we made any reloads for addresses, see if they violate a
4535 "no input reloads" requirement for this insn. But loads that we
4536 do after the insn (such as for output addresses) are fine. */
4537 if (no_input_reloads)
4538 for (i = 0; i < n_reloads; i++)
4539 gcc_assert (rld[i].in == 0
4540 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4541 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4542 #endif
4544 /* Compute reload_mode and reload_nregs. */
4545 for (i = 0; i < n_reloads; i++)
4547 rld[i].mode
4548 = (rld[i].inmode == VOIDmode
4549 || (GET_MODE_SIZE (rld[i].outmode)
4550 > GET_MODE_SIZE (rld[i].inmode)))
4551 ? rld[i].outmode : rld[i].inmode;
4553 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4556 /* Special case a simple move with an input reload and a
4557 destination of a hard reg, if the hard reg is ok, use it. */
4558 for (i = 0; i < n_reloads; i++)
4559 if (rld[i].when_needed == RELOAD_FOR_INPUT
4560 && GET_CODE (PATTERN (insn)) == SET
4561 && REG_P (SET_DEST (PATTERN (insn)))
4562 && (SET_SRC (PATTERN (insn)) == rld[i].in
4563 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4564 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4566 rtx dest = SET_DEST (PATTERN (insn));
4567 unsigned int regno = REGNO (dest);
4569 if (regno < FIRST_PSEUDO_REGISTER
4570 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4571 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4573 int nr = hard_regno_nregs[regno][rld[i].mode];
4574 int ok = 1, nri;
4576 for (nri = 1; nri < nr; nri ++)
4577 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4578 ok = 0;
4580 if (ok)
4581 rld[i].reg_rtx = dest;
4585 return retval;
4588 /* Return true if alternative number ALTNUM in constraint-string
4589 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4590 MEM gives the reference if it didn't need any reloads, otherwise it
4591 is null. */
4593 static bool
4594 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4595 const char *constraint, int altnum)
4597 int c;
4599 /* Skip alternatives before the one requested. */
4600 while (altnum > 0)
4602 while (*constraint++ != ',')
4604 altnum--;
4606 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4607 If one of them is present, this alternative accepts the result of
4608 passing a constant-pool reference through find_reloads_toplev.
4610 The same is true of extra memory constraints if the address
4611 was reloaded into a register. However, the target may elect
4612 to disallow the original constant address, forcing it to be
4613 reloaded into a register instead. */
4614 for (; (c = *constraint) && c != ',' && c != '#';
4615 constraint += CONSTRAINT_LEN (c, constraint))
4617 if (c == TARGET_MEM_CONSTRAINT || c == 'o')
4618 return true;
4619 #ifdef EXTRA_CONSTRAINT_STR
4620 if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
4621 && (mem == NULL || EXTRA_CONSTRAINT_STR (mem, c, constraint)))
4622 return true;
4623 #endif
4625 return false;
4628 /* Scan X for memory references and scan the addresses for reloading.
4629 Also checks for references to "constant" regs that we want to eliminate
4630 and replaces them with the values they stand for.
4631 We may alter X destructively if it contains a reference to such.
4632 If X is just a constant reg, we return the equivalent value
4633 instead of X.
4635 IND_LEVELS says how many levels of indirect addressing this machine
4636 supports.
4638 OPNUM and TYPE identify the purpose of the reload.
4640 IS_SET_DEST is true if X is the destination of a SET, which is not
4641 appropriate to be replaced by a constant.
4643 INSN, if nonzero, is the insn in which we do the reload. It is used
4644 to determine if we may generate output reloads, and where to put USEs
4645 for pseudos that we have to replace with stack slots.
4647 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4648 result of find_reloads_address. */
4650 static rtx
4651 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4652 int ind_levels, int is_set_dest, rtx insn,
4653 int *address_reloaded)
4655 RTX_CODE code = GET_CODE (x);
4657 const char *fmt = GET_RTX_FORMAT (code);
4658 int i;
4659 int copied;
4661 if (code == REG)
4663 /* This code is duplicated for speed in find_reloads. */
4664 int regno = REGNO (x);
4665 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4666 x = reg_equiv_constant (regno);
4667 #if 0
4668 /* This creates (subreg (mem...)) which would cause an unnecessary
4669 reload of the mem. */
4670 else if (reg_equiv_mem (regno) != 0)
4671 x = reg_equiv_mem (regno);
4672 #endif
4673 else if (reg_equiv_memory_loc (regno)
4674 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4676 rtx mem = make_memloc (x, regno);
4677 if (reg_equiv_address (regno)
4678 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4680 /* If this is not a toplevel operand, find_reloads doesn't see
4681 this substitution. We have to emit a USE of the pseudo so
4682 that delete_output_reload can see it. */
4683 if (replace_reloads && recog_data.operand[opnum] != x)
4684 /* We mark the USE with QImode so that we recognize it
4685 as one that can be safely deleted at the end of
4686 reload. */
4687 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4688 QImode);
4689 x = mem;
4690 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4691 opnum, type, ind_levels, insn);
4692 if (!rtx_equal_p (x, mem))
4693 push_reg_equiv_alt_mem (regno, x);
4694 if (address_reloaded)
4695 *address_reloaded = i;
4698 return x;
4700 if (code == MEM)
4702 rtx tem = x;
4704 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4705 opnum, type, ind_levels, insn);
4706 if (address_reloaded)
4707 *address_reloaded = i;
4709 return tem;
4712 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4714 /* Check for SUBREG containing a REG that's equivalent to a
4715 constant. If the constant has a known value, truncate it
4716 right now. Similarly if we are extracting a single-word of a
4717 multi-word constant. If the constant is symbolic, allow it
4718 to be substituted normally. push_reload will strip the
4719 subreg later. The constant must not be VOIDmode, because we
4720 will lose the mode of the register (this should never happen
4721 because one of the cases above should handle it). */
4723 int regno = REGNO (SUBREG_REG (x));
4724 rtx tem;
4726 if (regno >= FIRST_PSEUDO_REGISTER
4727 && reg_renumber[regno] < 0
4728 && reg_equiv_constant (regno) != 0)
4730 tem =
4731 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4732 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4733 gcc_assert (tem);
4734 if (CONSTANT_P (tem)
4735 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4737 tem = force_const_mem (GET_MODE (x), tem);
4738 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4739 &XEXP (tem, 0), opnum, type,
4740 ind_levels, insn);
4741 if (address_reloaded)
4742 *address_reloaded = i;
4744 return tem;
4747 /* If the subreg contains a reg that will be converted to a mem,
4748 convert the subreg to a narrower memref now.
4749 Otherwise, we would get (subreg (mem ...) ...),
4750 which would force reload of the mem.
4752 We also need to do this if there is an equivalent MEM that is
4753 not offsettable. In that case, alter_subreg would produce an
4754 invalid address on big-endian machines.
4756 For machines that extend byte loads, we must not reload using
4757 a wider mode if we have a paradoxical SUBREG. find_reloads will
4758 force a reload in that case. So we should not do anything here. */
4760 if (regno >= FIRST_PSEUDO_REGISTER
4761 #ifdef LOAD_EXTEND_OP
4762 && !paradoxical_subreg_p (x)
4763 #endif
4764 && (reg_equiv_address (regno) != 0
4765 || (reg_equiv_mem (regno) != 0
4766 && (! strict_memory_address_addr_space_p
4767 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
4768 MEM_ADDR_SPACE (reg_equiv_mem (regno)))
4769 || ! offsettable_memref_p (reg_equiv_mem (regno))
4770 || num_not_at_initial_offset))))
4771 x = find_reloads_subreg_address (x, 1, opnum, type, ind_levels,
4772 insn, address_reloaded);
4775 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4777 if (fmt[i] == 'e')
4779 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4780 ind_levels, is_set_dest, insn,
4781 address_reloaded);
4782 /* If we have replaced a reg with it's equivalent memory loc -
4783 that can still be handled here e.g. if it's in a paradoxical
4784 subreg - we must make the change in a copy, rather than using
4785 a destructive change. This way, find_reloads can still elect
4786 not to do the change. */
4787 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4789 x = shallow_copy_rtx (x);
4790 copied = 1;
4792 XEXP (x, i) = new_part;
4795 return x;
4798 /* Return a mem ref for the memory equivalent of reg REGNO.
4799 This mem ref is not shared with anything. */
4801 static rtx
4802 make_memloc (rtx ad, int regno)
4804 /* We must rerun eliminate_regs, in case the elimination
4805 offsets have changed. */
4806 rtx tem
4807 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4810 /* If TEM might contain a pseudo, we must copy it to avoid
4811 modifying it when we do the substitution for the reload. */
4812 if (rtx_varies_p (tem, 0))
4813 tem = copy_rtx (tem);
4815 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4816 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4818 /* Copy the result if it's still the same as the equivalence, to avoid
4819 modifying it when we do the substitution for the reload. */
4820 if (tem == reg_equiv_memory_loc (regno))
4821 tem = copy_rtx (tem);
4822 return tem;
4825 /* Returns true if AD could be turned into a valid memory reference
4826 to mode MODE in address space AS by reloading the part pointed to
4827 by PART into a register. */
4829 static int
4830 maybe_memory_address_addr_space_p (enum machine_mode mode, rtx ad,
4831 addr_space_t as, rtx *part)
4833 int retv;
4834 rtx tem = *part;
4835 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4837 *part = reg;
4838 retv = memory_address_addr_space_p (mode, ad, as);
4839 *part = tem;
4841 return retv;
4844 /* Record all reloads needed for handling memory address AD
4845 which appears in *LOC in a memory reference to mode MODE
4846 which itself is found in location *MEMREFLOC.
4847 Note that we take shortcuts assuming that no multi-reg machine mode
4848 occurs as part of an address.
4850 OPNUM and TYPE specify the purpose of this reload.
4852 IND_LEVELS says how many levels of indirect addressing this machine
4853 supports.
4855 INSN, if nonzero, is the insn in which we do the reload. It is used
4856 to determine if we may generate output reloads, and where to put USEs
4857 for pseudos that we have to replace with stack slots.
4859 Value is one if this address is reloaded or replaced as a whole; it is
4860 zero if the top level of this address was not reloaded or replaced, and
4861 it is -1 if it may or may not have been reloaded or replaced.
4863 Note that there is no verification that the address will be valid after
4864 this routine does its work. Instead, we rely on the fact that the address
4865 was valid when reload started. So we need only undo things that reload
4866 could have broken. These are wrong register types, pseudos not allocated
4867 to a hard register, and frame pointer elimination. */
4869 static int
4870 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
4871 rtx *loc, int opnum, enum reload_type type,
4872 int ind_levels, rtx insn)
4874 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4875 : ADDR_SPACE_GENERIC;
4876 int regno;
4877 int removed_and = 0;
4878 int op_index;
4879 rtx tem;
4881 /* If the address is a register, see if it is a legitimate address and
4882 reload if not. We first handle the cases where we need not reload
4883 or where we must reload in a non-standard way. */
4885 if (REG_P (ad))
4887 regno = REGNO (ad);
4889 if (reg_equiv_constant (regno) != 0)
4891 find_reloads_address_part (reg_equiv_constant (regno), loc,
4892 base_reg_class (mode, as, MEM, SCRATCH),
4893 GET_MODE (ad), opnum, type, ind_levels);
4894 return 1;
4897 tem = reg_equiv_memory_loc (regno);
4898 if (tem != 0)
4900 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4902 tem = make_memloc (ad, regno);
4903 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4904 XEXP (tem, 0),
4905 MEM_ADDR_SPACE (tem)))
4907 rtx orig = tem;
4909 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4910 &XEXP (tem, 0), opnum,
4911 ADDR_TYPE (type), ind_levels, insn);
4912 if (!rtx_equal_p (tem, orig))
4913 push_reg_equiv_alt_mem (regno, tem);
4915 /* We can avoid a reload if the register's equivalent memory
4916 expression is valid as an indirect memory address.
4917 But not all addresses are valid in a mem used as an indirect
4918 address: only reg or reg+constant. */
4920 if (ind_levels > 0
4921 && strict_memory_address_addr_space_p (mode, tem, as)
4922 && (REG_P (XEXP (tem, 0))
4923 || (GET_CODE (XEXP (tem, 0)) == PLUS
4924 && REG_P (XEXP (XEXP (tem, 0), 0))
4925 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4927 /* TEM is not the same as what we'll be replacing the
4928 pseudo with after reload, put a USE in front of INSN
4929 in the final reload pass. */
4930 if (replace_reloads
4931 && num_not_at_initial_offset
4932 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4934 *loc = tem;
4935 /* We mark the USE with QImode so that we
4936 recognize it as one that can be safely
4937 deleted at the end of reload. */
4938 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4939 insn), QImode);
4941 /* This doesn't really count as replacing the address
4942 as a whole, since it is still a memory access. */
4944 return 0;
4946 ad = tem;
4950 /* The only remaining case where we can avoid a reload is if this is a
4951 hard register that is valid as a base register and which is not the
4952 subject of a CLOBBER in this insn. */
4954 else if (regno < FIRST_PSEUDO_REGISTER
4955 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4956 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4957 return 0;
4959 /* If we do not have one of the cases above, we must do the reload. */
4960 push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4961 base_reg_class (mode, as, MEM, SCRATCH),
4962 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4963 return 1;
4966 if (strict_memory_address_addr_space_p (mode, ad, as))
4968 /* The address appears valid, so reloads are not needed.
4969 But the address may contain an eliminable register.
4970 This can happen because a machine with indirect addressing
4971 may consider a pseudo register by itself a valid address even when
4972 it has failed to get a hard reg.
4973 So do a tree-walk to find and eliminate all such regs. */
4975 /* But first quickly dispose of a common case. */
4976 if (GET_CODE (ad) == PLUS
4977 && CONST_INT_P (XEXP (ad, 1))
4978 && REG_P (XEXP (ad, 0))
4979 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4980 return 0;
4982 subst_reg_equivs_changed = 0;
4983 *loc = subst_reg_equivs (ad, insn);
4985 if (! subst_reg_equivs_changed)
4986 return 0;
4988 /* Check result for validity after substitution. */
4989 if (strict_memory_address_addr_space_p (mode, ad, as))
4990 return 0;
4993 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4996 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
4998 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
4999 ind_levels, win);
5001 break;
5002 win:
5003 *memrefloc = copy_rtx (*memrefloc);
5004 XEXP (*memrefloc, 0) = ad;
5005 move_replacements (&ad, &XEXP (*memrefloc, 0));
5006 return -1;
5008 while (0);
5009 #endif
5011 /* The address is not valid. We have to figure out why. First see if
5012 we have an outer AND and remove it if so. Then analyze what's inside. */
5014 if (GET_CODE (ad) == AND)
5016 removed_and = 1;
5017 loc = &XEXP (ad, 0);
5018 ad = *loc;
5021 /* One possibility for why the address is invalid is that it is itself
5022 a MEM. This can happen when the frame pointer is being eliminated, a
5023 pseudo is not allocated to a hard register, and the offset between the
5024 frame and stack pointers is not its initial value. In that case the
5025 pseudo will have been replaced by a MEM referring to the
5026 stack pointer. */
5027 if (MEM_P (ad))
5029 /* First ensure that the address in this MEM is valid. Then, unless
5030 indirect addresses are valid, reload the MEM into a register. */
5031 tem = ad;
5032 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5033 opnum, ADDR_TYPE (type),
5034 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5036 /* If tem was changed, then we must create a new memory reference to
5037 hold it and store it back into memrefloc. */
5038 if (tem != ad && memrefloc)
5040 *memrefloc = copy_rtx (*memrefloc);
5041 copy_replacements (tem, XEXP (*memrefloc, 0));
5042 loc = &XEXP (*memrefloc, 0);
5043 if (removed_and)
5044 loc = &XEXP (*loc, 0);
5047 /* Check similar cases as for indirect addresses as above except
5048 that we can allow pseudos and a MEM since they should have been
5049 taken care of above. */
5051 if (ind_levels == 0
5052 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5053 || MEM_P (XEXP (tem, 0))
5054 || ! (REG_P (XEXP (tem, 0))
5055 || (GET_CODE (XEXP (tem, 0)) == PLUS
5056 && REG_P (XEXP (XEXP (tem, 0), 0))
5057 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5059 /* Must use TEM here, not AD, since it is the one that will
5060 have any subexpressions reloaded, if needed. */
5061 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5062 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5063 VOIDmode, 0,
5064 0, opnum, type);
5065 return ! removed_and;
5067 else
5068 return 0;
5071 /* If we have address of a stack slot but it's not valid because the
5072 displacement is too large, compute the sum in a register.
5073 Handle all base registers here, not just fp/ap/sp, because on some
5074 targets (namely SH) we can also get too large displacements from
5075 big-endian corrections. */
5076 else if (GET_CODE (ad) == PLUS
5077 && REG_P (XEXP (ad, 0))
5078 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5079 && CONST_INT_P (XEXP (ad, 1))
5080 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5081 CONST_INT)
5082 /* Similarly, if we were to reload the base register and the
5083 mem+offset address is still invalid, then we want to reload
5084 the whole address, not just the base register. */
5085 || ! maybe_memory_address_addr_space_p
5086 (mode, ad, as, &(XEXP (ad, 0)))))
5089 /* Unshare the MEM rtx so we can safely alter it. */
5090 if (memrefloc)
5092 *memrefloc = copy_rtx (*memrefloc);
5093 loc = &XEXP (*memrefloc, 0);
5094 if (removed_and)
5095 loc = &XEXP (*loc, 0);
5098 if (double_reg_address_ok
5099 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5100 PLUS, CONST_INT))
5102 /* Unshare the sum as well. */
5103 *loc = ad = copy_rtx (ad);
5105 /* Reload the displacement into an index reg.
5106 We assume the frame pointer or arg pointer is a base reg. */
5107 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5108 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5109 type, ind_levels);
5110 return 0;
5112 else
5114 /* If the sum of two regs is not necessarily valid,
5115 reload the sum into a base reg.
5116 That will at least work. */
5117 find_reloads_address_part (ad, loc,
5118 base_reg_class (mode, as, MEM, SCRATCH),
5119 GET_MODE (ad), opnum, type, ind_levels);
5121 return ! removed_and;
5124 /* If we have an indexed stack slot, there are three possible reasons why
5125 it might be invalid: The index might need to be reloaded, the address
5126 might have been made by frame pointer elimination and hence have a
5127 constant out of range, or both reasons might apply.
5129 We can easily check for an index needing reload, but even if that is the
5130 case, we might also have an invalid constant. To avoid making the
5131 conservative assumption and requiring two reloads, we see if this address
5132 is valid when not interpreted strictly. If it is, the only problem is
5133 that the index needs a reload and find_reloads_address_1 will take care
5134 of it.
5136 Handle all base registers here, not just fp/ap/sp, because on some
5137 targets (namely SPARC) we can also get invalid addresses from preventive
5138 subreg big-endian corrections made by find_reloads_toplev. We
5139 can also get expressions involving LO_SUM (rather than PLUS) from
5140 find_reloads_subreg_address.
5142 If we decide to do something, it must be that `double_reg_address_ok'
5143 is true. We generate a reload of the base register + constant and
5144 rework the sum so that the reload register will be added to the index.
5145 This is safe because we know the address isn't shared.
5147 We check for the base register as both the first and second operand of
5148 the innermost PLUS and/or LO_SUM. */
5150 for (op_index = 0; op_index < 2; ++op_index)
5152 rtx operand, addend;
5153 enum rtx_code inner_code;
5155 if (GET_CODE (ad) != PLUS)
5156 continue;
5158 inner_code = GET_CODE (XEXP (ad, 0));
5159 if (!(GET_CODE (ad) == PLUS
5160 && CONST_INT_P (XEXP (ad, 1))
5161 && (inner_code == PLUS || inner_code == LO_SUM)))
5162 continue;
5164 operand = XEXP (XEXP (ad, 0), op_index);
5165 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5166 continue;
5168 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5170 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5171 GET_CODE (addend))
5172 || operand == frame_pointer_rtx
5173 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
5174 || operand == hard_frame_pointer_rtx
5175 #endif
5176 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5177 || operand == arg_pointer_rtx
5178 #endif
5179 || operand == stack_pointer_rtx)
5180 && ! maybe_memory_address_addr_space_p
5181 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5183 rtx offset_reg;
5184 enum reg_class cls;
5186 offset_reg = plus_constant (operand, INTVAL (XEXP (ad, 1)));
5188 /* Form the adjusted address. */
5189 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5190 ad = gen_rtx_PLUS (GET_MODE (ad),
5191 op_index == 0 ? offset_reg : addend,
5192 op_index == 0 ? addend : offset_reg);
5193 else
5194 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5195 op_index == 0 ? offset_reg : addend,
5196 op_index == 0 ? addend : offset_reg);
5197 *loc = ad;
5199 cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5200 find_reloads_address_part (XEXP (ad, op_index),
5201 &XEXP (ad, op_index), cls,
5202 GET_MODE (ad), opnum, type, ind_levels);
5203 find_reloads_address_1 (mode, as,
5204 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5205 GET_CODE (XEXP (ad, op_index)),
5206 &XEXP (ad, 1 - op_index), opnum,
5207 type, 0, insn);
5209 return 0;
5213 /* See if address becomes valid when an eliminable register
5214 in a sum is replaced. */
5216 tem = ad;
5217 if (GET_CODE (ad) == PLUS)
5218 tem = subst_indexed_address (ad);
5219 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5221 /* Ok, we win that way. Replace any additional eliminable
5222 registers. */
5224 subst_reg_equivs_changed = 0;
5225 tem = subst_reg_equivs (tem, insn);
5227 /* Make sure that didn't make the address invalid again. */
5229 if (! subst_reg_equivs_changed
5230 || strict_memory_address_addr_space_p (mode, tem, as))
5232 *loc = tem;
5233 return 0;
5237 /* If constants aren't valid addresses, reload the constant address
5238 into a register. */
5239 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5241 enum machine_mode address_mode = GET_MODE (ad);
5242 if (address_mode == VOIDmode)
5243 address_mode = targetm.addr_space.address_mode (as);
5245 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5246 Unshare it so we can safely alter it. */
5247 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5248 && CONSTANT_POOL_ADDRESS_P (ad))
5250 *memrefloc = copy_rtx (*memrefloc);
5251 loc = &XEXP (*memrefloc, 0);
5252 if (removed_and)
5253 loc = &XEXP (*loc, 0);
5256 find_reloads_address_part (ad, loc,
5257 base_reg_class (mode, as, MEM, SCRATCH),
5258 address_mode, opnum, type, ind_levels);
5259 return ! removed_and;
5262 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5263 opnum, type, ind_levels, insn);
5266 /* Find all pseudo regs appearing in AD
5267 that are eliminable in favor of equivalent values
5268 and do not have hard regs; replace them by their equivalents.
5269 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5270 front of it for pseudos that we have to replace with stack slots. */
5272 static rtx
5273 subst_reg_equivs (rtx ad, rtx insn)
5275 RTX_CODE code = GET_CODE (ad);
5276 int i;
5277 const char *fmt;
5279 switch (code)
5281 case HIGH:
5282 case CONST_INT:
5283 case CONST:
5284 case CONST_DOUBLE:
5285 case CONST_FIXED:
5286 case CONST_VECTOR:
5287 case SYMBOL_REF:
5288 case LABEL_REF:
5289 case PC:
5290 case CC0:
5291 return ad;
5293 case REG:
5295 int regno = REGNO (ad);
5297 if (reg_equiv_constant (regno) != 0)
5299 subst_reg_equivs_changed = 1;
5300 return reg_equiv_constant (regno);
5302 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5304 rtx mem = make_memloc (ad, regno);
5305 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5307 subst_reg_equivs_changed = 1;
5308 /* We mark the USE with QImode so that we recognize it
5309 as one that can be safely deleted at the end of
5310 reload. */
5311 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5312 QImode);
5313 return mem;
5317 return ad;
5319 case PLUS:
5320 /* Quickly dispose of a common case. */
5321 if (XEXP (ad, 0) == frame_pointer_rtx
5322 && CONST_INT_P (XEXP (ad, 1)))
5323 return ad;
5324 break;
5326 default:
5327 break;
5330 fmt = GET_RTX_FORMAT (code);
5331 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5332 if (fmt[i] == 'e')
5333 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5334 return ad;
5337 /* Compute the sum of X and Y, making canonicalizations assumed in an
5338 address, namely: sum constant integers, surround the sum of two
5339 constants with a CONST, put the constant as the second operand, and
5340 group the constant on the outermost sum.
5342 This routine assumes both inputs are already in canonical form. */
5345 form_sum (enum machine_mode mode, rtx x, rtx y)
5347 rtx tem;
5349 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5350 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5352 if (CONST_INT_P (x))
5353 return plus_constant (y, INTVAL (x));
5354 else if (CONST_INT_P (y))
5355 return plus_constant (x, INTVAL (y));
5356 else if (CONSTANT_P (x))
5357 tem = x, x = y, y = tem;
5359 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5360 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5362 /* Note that if the operands of Y are specified in the opposite
5363 order in the recursive calls below, infinite recursion will occur. */
5364 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5365 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5367 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5368 constant will have been placed second. */
5369 if (CONSTANT_P (x) && CONSTANT_P (y))
5371 if (GET_CODE (x) == CONST)
5372 x = XEXP (x, 0);
5373 if (GET_CODE (y) == CONST)
5374 y = XEXP (y, 0);
5376 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5379 return gen_rtx_PLUS (mode, x, y);
5382 /* If ADDR is a sum containing a pseudo register that should be
5383 replaced with a constant (from reg_equiv_constant),
5384 return the result of doing so, and also apply the associative
5385 law so that the result is more likely to be a valid address.
5386 (But it is not guaranteed to be one.)
5388 Note that at most one register is replaced, even if more are
5389 replaceable. Also, we try to put the result into a canonical form
5390 so it is more likely to be a valid address.
5392 In all other cases, return ADDR. */
5394 static rtx
5395 subst_indexed_address (rtx addr)
5397 rtx op0 = 0, op1 = 0, op2 = 0;
5398 rtx tem;
5399 int regno;
5401 if (GET_CODE (addr) == PLUS)
5403 /* Try to find a register to replace. */
5404 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5405 if (REG_P (op0)
5406 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5407 && reg_renumber[regno] < 0
5408 && reg_equiv_constant (regno) != 0)
5409 op0 = reg_equiv_constant (regno);
5410 else if (REG_P (op1)
5411 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5412 && reg_renumber[regno] < 0
5413 && reg_equiv_constant (regno) != 0)
5414 op1 = reg_equiv_constant (regno);
5415 else if (GET_CODE (op0) == PLUS
5416 && (tem = subst_indexed_address (op0)) != op0)
5417 op0 = tem;
5418 else if (GET_CODE (op1) == PLUS
5419 && (tem = subst_indexed_address (op1)) != op1)
5420 op1 = tem;
5421 else
5422 return addr;
5424 /* Pick out up to three things to add. */
5425 if (GET_CODE (op1) == PLUS)
5426 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5427 else if (GET_CODE (op0) == PLUS)
5428 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5430 /* Compute the sum. */
5431 if (op2 != 0)
5432 op1 = form_sum (GET_MODE (addr), op1, op2);
5433 if (op1 != 0)
5434 op0 = form_sum (GET_MODE (addr), op0, op1);
5436 return op0;
5438 return addr;
5441 /* Update the REG_INC notes for an insn. It updates all REG_INC
5442 notes for the instruction which refer to REGNO the to refer
5443 to the reload number.
5445 INSN is the insn for which any REG_INC notes need updating.
5447 REGNO is the register number which has been reloaded.
5449 RELOADNUM is the reload number. */
5451 static void
5452 update_auto_inc_notes (rtx insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5453 int reloadnum ATTRIBUTE_UNUSED)
5455 #ifdef AUTO_INC_DEC
5456 rtx link;
5458 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5459 if (REG_NOTE_KIND (link) == REG_INC
5460 && (int) REGNO (XEXP (link, 0)) == regno)
5461 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5462 #endif
5465 /* Record the pseudo registers we must reload into hard registers in a
5466 subexpression of a would-be memory address, X referring to a value
5467 in mode MODE. (This function is not called if the address we find
5468 is strictly valid.)
5470 CONTEXT = 1 means we are considering regs as index regs,
5471 = 0 means we are considering them as base regs.
5472 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5473 or an autoinc code.
5474 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5475 is the code of the index part of the address. Otherwise, pass SCRATCH
5476 for this argument.
5477 OPNUM and TYPE specify the purpose of any reloads made.
5479 IND_LEVELS says how many levels of indirect addressing are
5480 supported at this point in the address.
5482 INSN, if nonzero, is the insn in which we do the reload. It is used
5483 to determine if we may generate output reloads.
5485 We return nonzero if X, as a whole, is reloaded or replaced. */
5487 /* Note that we take shortcuts assuming that no multi-reg machine mode
5488 occurs as part of an address.
5489 Also, this is not fully machine-customizable; it works for machines
5490 such as VAXen and 68000's and 32000's, but other possible machines
5491 could have addressing modes that this does not handle right.
5492 If you add push_reload calls here, you need to make sure gen_reload
5493 handles those cases gracefully. */
5495 static int
5496 find_reloads_address_1 (enum machine_mode mode, addr_space_t as,
5497 rtx x, int context,
5498 enum rtx_code outer_code, enum rtx_code index_code,
5499 rtx *loc, int opnum, enum reload_type type,
5500 int ind_levels, rtx insn)
5502 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5503 ((CONTEXT) == 0 \
5504 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5505 : REGNO_OK_FOR_INDEX_P (REGNO))
5507 enum reg_class context_reg_class;
5508 RTX_CODE code = GET_CODE (x);
5510 if (context == 1)
5511 context_reg_class = INDEX_REG_CLASS;
5512 else
5513 context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5515 switch (code)
5517 case PLUS:
5519 rtx orig_op0 = XEXP (x, 0);
5520 rtx orig_op1 = XEXP (x, 1);
5521 RTX_CODE code0 = GET_CODE (orig_op0);
5522 RTX_CODE code1 = GET_CODE (orig_op1);
5523 rtx op0 = orig_op0;
5524 rtx op1 = orig_op1;
5526 if (GET_CODE (op0) == SUBREG)
5528 op0 = SUBREG_REG (op0);
5529 code0 = GET_CODE (op0);
5530 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5531 op0 = gen_rtx_REG (word_mode,
5532 (REGNO (op0) +
5533 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5534 GET_MODE (SUBREG_REG (orig_op0)),
5535 SUBREG_BYTE (orig_op0),
5536 GET_MODE (orig_op0))));
5539 if (GET_CODE (op1) == SUBREG)
5541 op1 = SUBREG_REG (op1);
5542 code1 = GET_CODE (op1);
5543 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5544 /* ??? Why is this given op1's mode and above for
5545 ??? op0 SUBREGs we use word_mode? */
5546 op1 = gen_rtx_REG (GET_MODE (op1),
5547 (REGNO (op1) +
5548 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5549 GET_MODE (SUBREG_REG (orig_op1)),
5550 SUBREG_BYTE (orig_op1),
5551 GET_MODE (orig_op1))));
5553 /* Plus in the index register may be created only as a result of
5554 register rematerialization for expression like &localvar*4. Reload it.
5555 It may be possible to combine the displacement on the outer level,
5556 but it is probably not worthwhile to do so. */
5557 if (context == 1)
5559 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5560 opnum, ADDR_TYPE (type), ind_levels, insn);
5561 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5562 context_reg_class,
5563 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5564 return 1;
5567 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5568 || code0 == ZERO_EXTEND || code1 == MEM)
5570 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5571 &XEXP (x, 0), opnum, type, ind_levels,
5572 insn);
5573 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5574 &XEXP (x, 1), opnum, type, ind_levels,
5575 insn);
5578 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5579 || code1 == ZERO_EXTEND || code0 == MEM)
5581 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5582 &XEXP (x, 0), opnum, type, ind_levels,
5583 insn);
5584 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5585 &XEXP (x, 1), opnum, type, ind_levels,
5586 insn);
5589 else if (code0 == CONST_INT || code0 == CONST
5590 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5591 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5592 &XEXP (x, 1), opnum, type, ind_levels,
5593 insn);
5595 else if (code1 == CONST_INT || code1 == CONST
5596 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5597 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5598 &XEXP (x, 0), opnum, type, ind_levels,
5599 insn);
5601 else if (code0 == REG && code1 == REG)
5603 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5604 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5605 return 0;
5606 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5607 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5608 return 0;
5609 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5610 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5611 &XEXP (x, 1), opnum, type, ind_levels,
5612 insn);
5613 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5614 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5615 &XEXP (x, 0), opnum, type, ind_levels,
5616 insn);
5617 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5618 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5619 &XEXP (x, 0), opnum, type, ind_levels,
5620 insn);
5621 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5622 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5623 &XEXP (x, 1), opnum, type, ind_levels,
5624 insn);
5625 else
5627 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5628 &XEXP (x, 0), opnum, type, ind_levels,
5629 insn);
5630 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5631 &XEXP (x, 1), opnum, type, ind_levels,
5632 insn);
5636 else if (code0 == REG)
5638 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5639 &XEXP (x, 0), opnum, type, ind_levels,
5640 insn);
5641 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5642 &XEXP (x, 1), opnum, type, ind_levels,
5643 insn);
5646 else if (code1 == REG)
5648 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5649 &XEXP (x, 1), opnum, type, ind_levels,
5650 insn);
5651 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5652 &XEXP (x, 0), opnum, type, ind_levels,
5653 insn);
5657 return 0;
5659 case POST_MODIFY:
5660 case PRE_MODIFY:
5662 rtx op0 = XEXP (x, 0);
5663 rtx op1 = XEXP (x, 1);
5664 enum rtx_code index_code;
5665 int regno;
5666 int reloadnum;
5668 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5669 return 0;
5671 /* Currently, we only support {PRE,POST}_MODIFY constructs
5672 where a base register is {inc,dec}remented by the contents
5673 of another register or by a constant value. Thus, these
5674 operands must match. */
5675 gcc_assert (op0 == XEXP (op1, 0));
5677 /* Require index register (or constant). Let's just handle the
5678 register case in the meantime... If the target allows
5679 auto-modify by a constant then we could try replacing a pseudo
5680 register with its equivalent constant where applicable.
5682 We also handle the case where the register was eliminated
5683 resulting in a PLUS subexpression.
5685 If we later decide to reload the whole PRE_MODIFY or
5686 POST_MODIFY, inc_for_reload might clobber the reload register
5687 before reading the index. The index register might therefore
5688 need to live longer than a TYPE reload normally would, so be
5689 conservative and class it as RELOAD_OTHER. */
5690 if ((REG_P (XEXP (op1, 1))
5691 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5692 || GET_CODE (XEXP (op1, 1)) == PLUS)
5693 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5694 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5695 ind_levels, insn);
5697 gcc_assert (REG_P (XEXP (op1, 0)));
5699 regno = REGNO (XEXP (op1, 0));
5700 index_code = GET_CODE (XEXP (op1, 1));
5702 /* A register that is incremented cannot be constant! */
5703 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5704 || reg_equiv_constant (regno) == 0);
5706 /* Handle a register that is equivalent to a memory location
5707 which cannot be addressed directly. */
5708 if (reg_equiv_memory_loc (regno) != 0
5709 && (reg_equiv_address (regno) != 0
5710 || num_not_at_initial_offset))
5712 rtx tem = make_memloc (XEXP (x, 0), regno);
5714 if (reg_equiv_address (regno)
5715 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5717 rtx orig = tem;
5719 /* First reload the memory location's address.
5720 We can't use ADDR_TYPE (type) here, because we need to
5721 write back the value after reading it, hence we actually
5722 need two registers. */
5723 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5724 &XEXP (tem, 0), opnum,
5725 RELOAD_OTHER,
5726 ind_levels, insn);
5728 if (!rtx_equal_p (tem, orig))
5729 push_reg_equiv_alt_mem (regno, tem);
5731 /* Then reload the memory location into a base
5732 register. */
5733 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5734 &XEXP (op1, 0),
5735 base_reg_class (mode, as,
5736 code, index_code),
5737 GET_MODE (x), GET_MODE (x), 0,
5738 0, opnum, RELOAD_OTHER);
5740 update_auto_inc_notes (this_insn, regno, reloadnum);
5741 return 0;
5745 if (reg_renumber[regno] >= 0)
5746 regno = reg_renumber[regno];
5748 /* We require a base register here... */
5749 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5751 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5752 &XEXP (op1, 0), &XEXP (x, 0),
5753 base_reg_class (mode, as,
5754 code, index_code),
5755 GET_MODE (x), GET_MODE (x), 0, 0,
5756 opnum, RELOAD_OTHER);
5758 update_auto_inc_notes (this_insn, regno, reloadnum);
5759 return 0;
5762 return 0;
5764 case POST_INC:
5765 case POST_DEC:
5766 case PRE_INC:
5767 case PRE_DEC:
5768 if (REG_P (XEXP (x, 0)))
5770 int regno = REGNO (XEXP (x, 0));
5771 int value = 0;
5772 rtx x_orig = x;
5774 /* A register that is incremented cannot be constant! */
5775 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5776 || reg_equiv_constant (regno) == 0);
5778 /* Handle a register that is equivalent to a memory location
5779 which cannot be addressed directly. */
5780 if (reg_equiv_memory_loc (regno) != 0
5781 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5783 rtx tem = make_memloc (XEXP (x, 0), regno);
5784 if (reg_equiv_address (regno)
5785 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5787 rtx orig = tem;
5789 /* First reload the memory location's address.
5790 We can't use ADDR_TYPE (type) here, because we need to
5791 write back the value after reading it, hence we actually
5792 need two registers. */
5793 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5794 &XEXP (tem, 0), opnum, type,
5795 ind_levels, insn);
5796 if (!rtx_equal_p (tem, orig))
5797 push_reg_equiv_alt_mem (regno, tem);
5798 /* Put this inside a new increment-expression. */
5799 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5800 /* Proceed to reload that, as if it contained a register. */
5804 /* If we have a hard register that is ok in this incdec context,
5805 don't make a reload. If the register isn't nice enough for
5806 autoincdec, we can reload it. But, if an autoincrement of a
5807 register that we here verified as playing nice, still outside
5808 isn't "valid", it must be that no autoincrement is "valid".
5809 If that is true and something made an autoincrement anyway,
5810 this must be a special context where one is allowed.
5811 (For example, a "push" instruction.)
5812 We can't improve this address, so leave it alone. */
5814 /* Otherwise, reload the autoincrement into a suitable hard reg
5815 and record how much to increment by. */
5817 if (reg_renumber[regno] >= 0)
5818 regno = reg_renumber[regno];
5819 if (regno >= FIRST_PSEUDO_REGISTER
5820 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5821 index_code))
5823 int reloadnum;
5825 /* If we can output the register afterwards, do so, this
5826 saves the extra update.
5827 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5828 CALL_INSN - and it does not set CC0.
5829 But don't do this if we cannot directly address the
5830 memory location, since this will make it harder to
5831 reuse address reloads, and increases register pressure.
5832 Also don't do this if we can probably update x directly. */
5833 rtx equiv = (MEM_P (XEXP (x, 0))
5834 ? XEXP (x, 0)
5835 : reg_equiv_mem (regno));
5836 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5837 if (insn && NONJUMP_INSN_P (insn) && equiv
5838 && memory_operand (equiv, GET_MODE (equiv))
5839 #ifdef HAVE_cc0
5840 && ! sets_cc0_p (PATTERN (insn))
5841 #endif
5842 && ! (icode != CODE_FOR_nothing
5843 && insn_operand_matches (icode, 0, equiv)
5844 && insn_operand_matches (icode, 1, equiv)))
5846 /* We use the original pseudo for loc, so that
5847 emit_reload_insns() knows which pseudo this
5848 reload refers to and updates the pseudo rtx, not
5849 its equivalent memory location, as well as the
5850 corresponding entry in reg_last_reload_reg. */
5851 loc = &XEXP (x_orig, 0);
5852 x = XEXP (x, 0);
5853 reloadnum
5854 = push_reload (x, x, loc, loc,
5855 context_reg_class,
5856 GET_MODE (x), GET_MODE (x), 0, 0,
5857 opnum, RELOAD_OTHER);
5859 else
5861 reloadnum
5862 = push_reload (x, x, loc, (rtx*) 0,
5863 context_reg_class,
5864 GET_MODE (x), GET_MODE (x), 0, 0,
5865 opnum, type);
5866 rld[reloadnum].inc
5867 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5869 value = 1;
5872 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5873 reloadnum);
5875 return value;
5877 return 0;
5879 case TRUNCATE:
5880 case SIGN_EXTEND:
5881 case ZERO_EXTEND:
5882 /* Look for parts to reload in the inner expression and reload them
5883 too, in addition to this operation. Reloading all inner parts in
5884 addition to this one shouldn't be necessary, but at this point,
5885 we don't know if we can possibly omit any part that *can* be
5886 reloaded. Targets that are better off reloading just either part
5887 (or perhaps even a different part of an outer expression), should
5888 define LEGITIMIZE_RELOAD_ADDRESS. */
5889 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5890 context, code, SCRATCH, &XEXP (x, 0), opnum,
5891 type, ind_levels, insn);
5892 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5893 context_reg_class,
5894 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5895 return 1;
5897 case MEM:
5898 /* This is probably the result of a substitution, by eliminate_regs, of
5899 an equivalent address for a pseudo that was not allocated to a hard
5900 register. Verify that the specified address is valid and reload it
5901 into a register.
5903 Since we know we are going to reload this item, don't decrement for
5904 the indirection level.
5906 Note that this is actually conservative: it would be slightly more
5907 efficient to use the value of SPILL_INDIRECT_LEVELS from
5908 reload1.c here. */
5910 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5911 opnum, ADDR_TYPE (type), ind_levels, insn);
5912 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5913 context_reg_class,
5914 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5915 return 1;
5917 case REG:
5919 int regno = REGNO (x);
5921 if (reg_equiv_constant (regno) != 0)
5923 find_reloads_address_part (reg_equiv_constant (regno), loc,
5924 context_reg_class,
5925 GET_MODE (x), opnum, type, ind_levels);
5926 return 1;
5929 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5930 that feeds this insn. */
5931 if (reg_equiv_mem (regno) != 0)
5933 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5934 context_reg_class,
5935 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5936 return 1;
5938 #endif
5940 if (reg_equiv_memory_loc (regno)
5941 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5943 rtx tem = make_memloc (x, regno);
5944 if (reg_equiv_address (regno) != 0
5945 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5947 x = tem;
5948 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5949 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5950 ind_levels, insn);
5951 if (!rtx_equal_p (x, tem))
5952 push_reg_equiv_alt_mem (regno, x);
5956 if (reg_renumber[regno] >= 0)
5957 regno = reg_renumber[regno];
5959 if (regno >= FIRST_PSEUDO_REGISTER
5960 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5961 index_code))
5963 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5964 context_reg_class,
5965 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5966 return 1;
5969 /* If a register appearing in an address is the subject of a CLOBBER
5970 in this insn, reload it into some other register to be safe.
5971 The CLOBBER is supposed to make the register unavailable
5972 from before this insn to after it. */
5973 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5975 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5976 context_reg_class,
5977 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5978 return 1;
5981 return 0;
5983 case SUBREG:
5984 if (REG_P (SUBREG_REG (x)))
5986 /* If this is a SUBREG of a hard register and the resulting register
5987 is of the wrong class, reload the whole SUBREG. This avoids
5988 needless copies if SUBREG_REG is multi-word. */
5989 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5991 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5993 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5994 index_code))
5996 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5997 context_reg_class,
5998 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5999 return 1;
6002 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6003 is larger than the class size, then reload the whole SUBREG. */
6004 else
6006 enum reg_class rclass = context_reg_class;
6007 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6008 > reg_class_size[(int) rclass])
6010 x = find_reloads_subreg_address (x, 0, opnum,
6011 ADDR_TYPE (type),
6012 ind_levels, insn, NULL);
6013 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6014 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6015 return 1;
6019 break;
6021 default:
6022 break;
6026 const char *fmt = GET_RTX_FORMAT (code);
6027 int i;
6029 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6031 if (fmt[i] == 'e')
6032 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6033 we get here. */
6034 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6035 code, SCRATCH, &XEXP (x, i),
6036 opnum, type, ind_levels, insn);
6040 #undef REG_OK_FOR_CONTEXT
6041 return 0;
6044 /* X, which is found at *LOC, is a part of an address that needs to be
6045 reloaded into a register of class RCLASS. If X is a constant, or if
6046 X is a PLUS that contains a constant, check that the constant is a
6047 legitimate operand and that we are supposed to be able to load
6048 it into the register.
6050 If not, force the constant into memory and reload the MEM instead.
6052 MODE is the mode to use, in case X is an integer constant.
6054 OPNUM and TYPE describe the purpose of any reloads made.
6056 IND_LEVELS says how many levels of indirect addressing this machine
6057 supports. */
6059 static void
6060 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6061 enum machine_mode mode, int opnum,
6062 enum reload_type type, int ind_levels)
6064 if (CONSTANT_P (x)
6065 && (!targetm.legitimate_constant_p (mode, x)
6066 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6068 x = force_const_mem (mode, x);
6069 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6070 opnum, type, ind_levels, 0);
6073 else if (GET_CODE (x) == PLUS
6074 && CONSTANT_P (XEXP (x, 1))
6075 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6076 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6077 == NO_REGS))
6079 rtx tem;
6081 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6082 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6083 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6084 opnum, type, ind_levels, 0);
6087 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6088 mode, VOIDmode, 0, 0, opnum, type);
6091 /* X, a subreg of a pseudo, is a part of an address that needs to be
6092 reloaded.
6094 If the pseudo is equivalent to a memory location that cannot be directly
6095 addressed, make the necessary address reloads.
6097 If address reloads have been necessary, or if the address is changed
6098 by register elimination, return the rtx of the memory location;
6099 otherwise, return X.
6101 If FORCE_REPLACE is nonzero, unconditionally replace the subreg with the
6102 memory location.
6104 OPNUM and TYPE identify the purpose of the reload.
6106 IND_LEVELS says how many levels of indirect addressing are
6107 supported at this point in the address.
6109 INSN, if nonzero, is the insn in which we do the reload. It is used
6110 to determine where to put USEs for pseudos that we have to replace with
6111 stack slots. */
6113 static rtx
6114 find_reloads_subreg_address (rtx x, int force_replace, int opnum,
6115 enum reload_type type, int ind_levels, rtx insn,
6116 int *address_reloaded)
6118 int regno = REGNO (SUBREG_REG (x));
6119 int reloaded = 0;
6121 if (reg_equiv_memory_loc (regno))
6123 /* If the address is not directly addressable, or if the address is not
6124 offsettable, then it must be replaced. */
6125 if (! force_replace
6126 && (reg_equiv_address (regno)
6127 || ! offsettable_memref_p (reg_equiv_mem (regno))))
6128 force_replace = 1;
6130 if (force_replace || num_not_at_initial_offset)
6132 rtx tem = make_memloc (SUBREG_REG (x), regno);
6134 /* If the address changes because of register elimination, then
6135 it must be replaced. */
6136 if (force_replace
6137 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
6139 unsigned outer_size = GET_MODE_SIZE (GET_MODE (x));
6140 unsigned inner_size = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
6141 int offset;
6142 rtx orig = tem;
6144 /* For big-endian paradoxical subregs, SUBREG_BYTE does not
6145 hold the correct (negative) byte offset. */
6146 if (BYTES_BIG_ENDIAN && outer_size > inner_size)
6147 offset = inner_size - outer_size;
6148 else
6149 offset = SUBREG_BYTE (x);
6151 XEXP (tem, 0) = plus_constant (XEXP (tem, 0), offset);
6152 PUT_MODE (tem, GET_MODE (x));
6153 if (MEM_OFFSET_KNOWN_P (tem))
6154 set_mem_offset (tem, MEM_OFFSET (tem) + offset);
6155 if (MEM_SIZE_KNOWN_P (tem)
6156 && MEM_SIZE (tem) != (HOST_WIDE_INT) outer_size)
6157 set_mem_size (tem, outer_size);
6159 /* If this was a paradoxical subreg that we replaced, the
6160 resulting memory must be sufficiently aligned to allow
6161 us to widen the mode of the memory. */
6162 if (outer_size > inner_size)
6164 rtx base;
6166 base = XEXP (tem, 0);
6167 if (GET_CODE (base) == PLUS)
6169 if (CONST_INT_P (XEXP (base, 1))
6170 && INTVAL (XEXP (base, 1)) % outer_size != 0)
6171 return x;
6172 base = XEXP (base, 0);
6174 if (!REG_P (base)
6175 || (REGNO_POINTER_ALIGN (REGNO (base))
6176 < outer_size * BITS_PER_UNIT))
6177 return x;
6180 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6181 XEXP (tem, 0), &XEXP (tem, 0),
6182 opnum, type, ind_levels, insn);
6183 /* ??? Do we need to handle nonzero offsets somehow? */
6184 if (!offset && !rtx_equal_p (tem, orig))
6185 push_reg_equiv_alt_mem (regno, tem);
6187 /* For some processors an address may be valid in the
6188 original mode but not in a smaller mode. For
6189 example, ARM accepts a scaled index register in
6190 SImode but not in HImode. Note that this is only
6191 a problem if the address in reg_equiv_mem is already
6192 invalid in the new mode; other cases would be fixed
6193 by find_reloads_address as usual.
6195 ??? We attempt to handle such cases here by doing an
6196 additional reload of the full address after the
6197 usual processing by find_reloads_address. Note that
6198 this may not work in the general case, but it seems
6199 to cover the cases where this situation currently
6200 occurs. A more general fix might be to reload the
6201 *value* instead of the address, but this would not
6202 be expected by the callers of this routine as-is.
6204 If find_reloads_address already completed replaced
6205 the address, there is nothing further to do. */
6206 if (reloaded == 0
6207 && reg_equiv_mem (regno) != 0
6208 && !strict_memory_address_addr_space_p
6209 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6210 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6212 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6213 base_reg_class (GET_MODE (tem),
6214 MEM_ADDR_SPACE (tem),
6215 MEM, SCRATCH),
6216 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0,
6217 opnum, type);
6218 reloaded = 1;
6220 /* If this is not a toplevel operand, find_reloads doesn't see
6221 this substitution. We have to emit a USE of the pseudo so
6222 that delete_output_reload can see it. */
6223 if (replace_reloads && recog_data.operand[opnum] != x)
6224 /* We mark the USE with QImode so that we recognize it
6225 as one that can be safely deleted at the end of
6226 reload. */
6227 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode,
6228 SUBREG_REG (x)),
6229 insn), QImode);
6230 x = tem;
6234 if (address_reloaded)
6235 *address_reloaded = reloaded;
6237 return x;
6240 /* Substitute into the current INSN the registers into which we have reloaded
6241 the things that need reloading. The array `replacements'
6242 contains the locations of all pointers that must be changed
6243 and says what to replace them with.
6245 Return the rtx that X translates into; usually X, but modified. */
6247 void
6248 subst_reloads (rtx insn)
6250 int i;
6252 for (i = 0; i < n_replacements; i++)
6254 struct replacement *r = &replacements[i];
6255 rtx reloadreg = rld[r->what].reg_rtx;
6256 if (reloadreg)
6258 #ifdef DEBUG_RELOAD
6259 /* This checking takes a very long time on some platforms
6260 causing the gcc.c-torture/compile/limits-fnargs.c test
6261 to time out during testing. See PR 31850.
6263 Internal consistency test. Check that we don't modify
6264 anything in the equivalence arrays. Whenever something from
6265 those arrays needs to be reloaded, it must be unshared before
6266 being substituted into; the equivalence must not be modified.
6267 Otherwise, if the equivalence is used after that, it will
6268 have been modified, and the thing substituted (probably a
6269 register) is likely overwritten and not a usable equivalence. */
6270 int check_regno;
6272 for (check_regno = 0; check_regno < max_regno; check_regno++)
6274 #define CHECK_MODF(ARRAY) \
6275 gcc_assert (!VEC_index (reg_equivs_t, reg_equivs, check_regno).ARRAY \
6276 || !loc_mentioned_in_p (r->where, \
6277 VEC_index (reg_equivs_t, reg_equivs, check_regno).ARRAY))
6279 CHECK_MODF (equiv_constant);
6280 CHECK_MODF (equiv_memory_loc);
6281 CHECK_MODF (equiv_address);
6282 CHECK_MODF (equiv_mem);
6283 #undef CHECK_MODF
6285 #endif /* DEBUG_RELOAD */
6287 /* If we're replacing a LABEL_REF with a register, there must
6288 already be an indication (to e.g. flow) which label this
6289 register refers to. */
6290 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6291 || !JUMP_P (insn)
6292 || find_reg_note (insn,
6293 REG_LABEL_OPERAND,
6294 XEXP (*r->where, 0))
6295 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6297 /* Encapsulate RELOADREG so its machine mode matches what
6298 used to be there. Note that gen_lowpart_common will
6299 do the wrong thing if RELOADREG is multi-word. RELOADREG
6300 will always be a REG here. */
6301 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6302 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6304 *r->where = reloadreg;
6306 /* If reload got no reg and isn't optional, something's wrong. */
6307 else
6308 gcc_assert (rld[r->what].optional);
6312 /* Make a copy of any replacements being done into X and move those
6313 copies to locations in Y, a copy of X. */
6315 void
6316 copy_replacements (rtx x, rtx y)
6318 copy_replacements_1 (&x, &y, n_replacements);
6321 static void
6322 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6324 int i, j;
6325 rtx x, y;
6326 struct replacement *r;
6327 enum rtx_code code;
6328 const char *fmt;
6330 for (j = 0; j < orig_replacements; j++)
6331 if (replacements[j].where == px)
6333 r = &replacements[n_replacements++];
6334 r->where = py;
6335 r->what = replacements[j].what;
6336 r->mode = replacements[j].mode;
6339 x = *px;
6340 y = *py;
6341 code = GET_CODE (x);
6342 fmt = GET_RTX_FORMAT (code);
6344 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6346 if (fmt[i] == 'e')
6347 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6348 else if (fmt[i] == 'E')
6349 for (j = XVECLEN (x, i); --j >= 0; )
6350 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6351 orig_replacements);
6355 /* Change any replacements being done to *X to be done to *Y. */
6357 void
6358 move_replacements (rtx *x, rtx *y)
6360 int i;
6362 for (i = 0; i < n_replacements; i++)
6363 if (replacements[i].where == x)
6364 replacements[i].where = y;
6367 /* If LOC was scheduled to be replaced by something, return the replacement.
6368 Otherwise, return *LOC. */
6371 find_replacement (rtx *loc)
6373 struct replacement *r;
6375 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6377 rtx reloadreg = rld[r->what].reg_rtx;
6379 if (reloadreg && r->where == loc)
6381 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6382 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6384 return reloadreg;
6386 else if (reloadreg && GET_CODE (*loc) == SUBREG
6387 && r->where == &SUBREG_REG (*loc))
6389 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6390 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6392 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6393 GET_MODE (SUBREG_REG (*loc)),
6394 SUBREG_BYTE (*loc));
6398 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6399 what's inside and make a new rtl if so. */
6400 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6401 || GET_CODE (*loc) == MULT)
6403 rtx x = find_replacement (&XEXP (*loc, 0));
6404 rtx y = find_replacement (&XEXP (*loc, 1));
6406 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6407 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6410 return *loc;
6413 /* Return nonzero if register in range [REGNO, ENDREGNO)
6414 appears either explicitly or implicitly in X
6415 other than being stored into (except for earlyclobber operands).
6417 References contained within the substructure at LOC do not count.
6418 LOC may be zero, meaning don't ignore anything.
6420 This is similar to refers_to_regno_p in rtlanal.c except that we
6421 look at equivalences for pseudos that didn't get hard registers. */
6423 static int
6424 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6425 rtx x, rtx *loc)
6427 int i;
6428 unsigned int r;
6429 RTX_CODE code;
6430 const char *fmt;
6432 if (x == 0)
6433 return 0;
6435 repeat:
6436 code = GET_CODE (x);
6438 switch (code)
6440 case REG:
6441 r = REGNO (x);
6443 /* If this is a pseudo, a hard register must not have been allocated.
6444 X must therefore either be a constant or be in memory. */
6445 if (r >= FIRST_PSEUDO_REGISTER)
6447 if (reg_equiv_memory_loc (r))
6448 return refers_to_regno_for_reload_p (regno, endregno,
6449 reg_equiv_memory_loc (r),
6450 (rtx*) 0);
6452 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6453 return 0;
6456 return (endregno > r
6457 && regno < r + (r < FIRST_PSEUDO_REGISTER
6458 ? hard_regno_nregs[r][GET_MODE (x)]
6459 : 1));
6461 case SUBREG:
6462 /* If this is a SUBREG of a hard reg, we can see exactly which
6463 registers are being modified. Otherwise, handle normally. */
6464 if (REG_P (SUBREG_REG (x))
6465 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6467 unsigned int inner_regno = subreg_regno (x);
6468 unsigned int inner_endregno
6469 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6470 ? subreg_nregs (x) : 1);
6472 return endregno > inner_regno && regno < inner_endregno;
6474 break;
6476 case CLOBBER:
6477 case SET:
6478 if (&SET_DEST (x) != loc
6479 /* Note setting a SUBREG counts as referring to the REG it is in for
6480 a pseudo but not for hard registers since we can
6481 treat each word individually. */
6482 && ((GET_CODE (SET_DEST (x)) == SUBREG
6483 && loc != &SUBREG_REG (SET_DEST (x))
6484 && REG_P (SUBREG_REG (SET_DEST (x)))
6485 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6486 && refers_to_regno_for_reload_p (regno, endregno,
6487 SUBREG_REG (SET_DEST (x)),
6488 loc))
6489 /* If the output is an earlyclobber operand, this is
6490 a conflict. */
6491 || ((!REG_P (SET_DEST (x))
6492 || earlyclobber_operand_p (SET_DEST (x)))
6493 && refers_to_regno_for_reload_p (regno, endregno,
6494 SET_DEST (x), loc))))
6495 return 1;
6497 if (code == CLOBBER || loc == &SET_SRC (x))
6498 return 0;
6499 x = SET_SRC (x);
6500 goto repeat;
6502 default:
6503 break;
6506 /* X does not match, so try its subexpressions. */
6508 fmt = GET_RTX_FORMAT (code);
6509 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6511 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6513 if (i == 0)
6515 x = XEXP (x, 0);
6516 goto repeat;
6518 else
6519 if (refers_to_regno_for_reload_p (regno, endregno,
6520 XEXP (x, i), loc))
6521 return 1;
6523 else if (fmt[i] == 'E')
6525 int j;
6526 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6527 if (loc != &XVECEXP (x, i, j)
6528 && refers_to_regno_for_reload_p (regno, endregno,
6529 XVECEXP (x, i, j), loc))
6530 return 1;
6533 return 0;
6536 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6537 we check if any register number in X conflicts with the relevant register
6538 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6539 contains a MEM (we don't bother checking for memory addresses that can't
6540 conflict because we expect this to be a rare case.
6542 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6543 that we look at equivalences for pseudos that didn't get hard registers. */
6546 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6548 int regno, endregno;
6550 /* Overly conservative. */
6551 if (GET_CODE (x) == STRICT_LOW_PART
6552 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6553 x = XEXP (x, 0);
6555 /* If either argument is a constant, then modifying X can not affect IN. */
6556 if (CONSTANT_P (x) || CONSTANT_P (in))
6557 return 0;
6558 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6559 return refers_to_mem_for_reload_p (in);
6560 else if (GET_CODE (x) == SUBREG)
6562 regno = REGNO (SUBREG_REG (x));
6563 if (regno < FIRST_PSEUDO_REGISTER)
6564 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6565 GET_MODE (SUBREG_REG (x)),
6566 SUBREG_BYTE (x),
6567 GET_MODE (x));
6568 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6569 ? subreg_nregs (x) : 1);
6571 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6573 else if (REG_P (x))
6575 regno = REGNO (x);
6577 /* If this is a pseudo, it must not have been assigned a hard register.
6578 Therefore, it must either be in memory or be a constant. */
6580 if (regno >= FIRST_PSEUDO_REGISTER)
6582 if (reg_equiv_memory_loc (regno))
6583 return refers_to_mem_for_reload_p (in);
6584 gcc_assert (reg_equiv_constant (regno));
6585 return 0;
6588 endregno = END_HARD_REGNO (x);
6590 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6592 else if (MEM_P (x))
6593 return refers_to_mem_for_reload_p (in);
6594 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6595 || GET_CODE (x) == CC0)
6596 return reg_mentioned_p (x, in);
6597 else
6599 gcc_assert (GET_CODE (x) == PLUS);
6601 /* We actually want to know if X is mentioned somewhere inside IN.
6602 We must not say that (plus (sp) (const_int 124)) is in
6603 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6604 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6605 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6606 while (MEM_P (in))
6607 in = XEXP (in, 0);
6608 if (REG_P (in))
6609 return 0;
6610 else if (GET_CODE (in) == PLUS)
6611 return (rtx_equal_p (x, in)
6612 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6613 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6614 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6615 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6618 gcc_unreachable ();
6621 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6622 registers. */
6624 static int
6625 refers_to_mem_for_reload_p (rtx x)
6627 const char *fmt;
6628 int i;
6630 if (MEM_P (x))
6631 return 1;
6633 if (REG_P (x))
6634 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6635 && reg_equiv_memory_loc (REGNO (x)));
6637 fmt = GET_RTX_FORMAT (GET_CODE (x));
6638 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6639 if (fmt[i] == 'e'
6640 && (MEM_P (XEXP (x, i))
6641 || refers_to_mem_for_reload_p (XEXP (x, i))))
6642 return 1;
6644 return 0;
6647 /* Check the insns before INSN to see if there is a suitable register
6648 containing the same value as GOAL.
6649 If OTHER is -1, look for a register in class RCLASS.
6650 Otherwise, just see if register number OTHER shares GOAL's value.
6652 Return an rtx for the register found, or zero if none is found.
6654 If RELOAD_REG_P is (short *)1,
6655 we reject any hard reg that appears in reload_reg_rtx
6656 because such a hard reg is also needed coming into this insn.
6658 If RELOAD_REG_P is any other nonzero value,
6659 it is a vector indexed by hard reg number
6660 and we reject any hard reg whose element in the vector is nonnegative
6661 as well as any that appears in reload_reg_rtx.
6663 If GOAL is zero, then GOALREG is a register number; we look
6664 for an equivalent for that register.
6666 MODE is the machine mode of the value we want an equivalence for.
6667 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6669 This function is used by jump.c as well as in the reload pass.
6671 If GOAL is the sum of the stack pointer and a constant, we treat it
6672 as if it were a constant except that sp is required to be unchanging. */
6675 find_equiv_reg (rtx goal, rtx insn, enum reg_class rclass, int other,
6676 short *reload_reg_p, int goalreg, enum machine_mode mode)
6678 rtx p = insn;
6679 rtx goaltry, valtry, value, where;
6680 rtx pat;
6681 int regno = -1;
6682 int valueno;
6683 int goal_mem = 0;
6684 int goal_const = 0;
6685 int goal_mem_addr_varies = 0;
6686 int need_stable_sp = 0;
6687 int nregs;
6688 int valuenregs;
6689 int num = 0;
6691 if (goal == 0)
6692 regno = goalreg;
6693 else if (REG_P (goal))
6694 regno = REGNO (goal);
6695 else if (MEM_P (goal))
6697 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6698 if (MEM_VOLATILE_P (goal))
6699 return 0;
6700 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6701 return 0;
6702 /* An address with side effects must be reexecuted. */
6703 switch (code)
6705 case POST_INC:
6706 case PRE_INC:
6707 case POST_DEC:
6708 case PRE_DEC:
6709 case POST_MODIFY:
6710 case PRE_MODIFY:
6711 return 0;
6712 default:
6713 break;
6715 goal_mem = 1;
6717 else if (CONSTANT_P (goal))
6718 goal_const = 1;
6719 else if (GET_CODE (goal) == PLUS
6720 && XEXP (goal, 0) == stack_pointer_rtx
6721 && CONSTANT_P (XEXP (goal, 1)))
6722 goal_const = need_stable_sp = 1;
6723 else if (GET_CODE (goal) == PLUS
6724 && XEXP (goal, 0) == frame_pointer_rtx
6725 && CONSTANT_P (XEXP (goal, 1)))
6726 goal_const = 1;
6727 else
6728 return 0;
6730 num = 0;
6731 /* Scan insns back from INSN, looking for one that copies
6732 a value into or out of GOAL.
6733 Stop and give up if we reach a label. */
6735 while (1)
6737 p = PREV_INSN (p);
6738 if (p && DEBUG_INSN_P (p))
6739 continue;
6740 num++;
6741 if (p == 0 || LABEL_P (p)
6742 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6743 return 0;
6745 /* Don't reuse register contents from before a setjmp-type
6746 function call; on the second return (from the longjmp) it
6747 might have been clobbered by a later reuse. It doesn't
6748 seem worthwhile to actually go and see if it is actually
6749 reused even if that information would be readily available;
6750 just don't reuse it across the setjmp call. */
6751 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6752 return 0;
6754 if (NONJUMP_INSN_P (p)
6755 /* If we don't want spill regs ... */
6756 && (! (reload_reg_p != 0
6757 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6758 /* ... then ignore insns introduced by reload; they aren't
6759 useful and can cause results in reload_as_needed to be
6760 different from what they were when calculating the need for
6761 spills. If we notice an input-reload insn here, we will
6762 reject it below, but it might hide a usable equivalent.
6763 That makes bad code. It may even fail: perhaps no reg was
6764 spilled for this insn because it was assumed we would find
6765 that equivalent. */
6766 || INSN_UID (p) < reload_first_uid))
6768 rtx tem;
6769 pat = single_set (p);
6771 /* First check for something that sets some reg equal to GOAL. */
6772 if (pat != 0
6773 && ((regno >= 0
6774 && true_regnum (SET_SRC (pat)) == regno
6775 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6777 (regno >= 0
6778 && true_regnum (SET_DEST (pat)) == regno
6779 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6781 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6782 /* When looking for stack pointer + const,
6783 make sure we don't use a stack adjust. */
6784 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6785 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6786 || (goal_mem
6787 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6788 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6789 || (goal_mem
6790 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6791 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6792 /* If we are looking for a constant,
6793 and something equivalent to that constant was copied
6794 into a reg, we can use that reg. */
6795 || (goal_const && REG_NOTES (p) != 0
6796 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6797 && ((rtx_equal_p (XEXP (tem, 0), goal)
6798 && (valueno
6799 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6800 || (REG_P (SET_DEST (pat))
6801 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6802 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6803 && CONST_INT_P (goal)
6804 && 0 != (goaltry
6805 = operand_subword (XEXP (tem, 0), 0, 0,
6806 VOIDmode))
6807 && rtx_equal_p (goal, goaltry)
6808 && (valtry
6809 = operand_subword (SET_DEST (pat), 0, 0,
6810 VOIDmode))
6811 && (valueno = true_regnum (valtry)) >= 0)))
6812 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6813 NULL_RTX))
6814 && REG_P (SET_DEST (pat))
6815 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6816 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6817 && CONST_INT_P (goal)
6818 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6819 VOIDmode))
6820 && rtx_equal_p (goal, goaltry)
6821 && (valtry
6822 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6823 && (valueno = true_regnum (valtry)) >= 0)))
6825 if (other >= 0)
6827 if (valueno != other)
6828 continue;
6830 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6831 continue;
6832 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6833 mode, valueno))
6834 continue;
6835 value = valtry;
6836 where = p;
6837 break;
6842 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6843 (or copying VALUE into GOAL, if GOAL is also a register).
6844 Now verify that VALUE is really valid. */
6846 /* VALUENO is the register number of VALUE; a hard register. */
6848 /* Don't try to re-use something that is killed in this insn. We want
6849 to be able to trust REG_UNUSED notes. */
6850 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6851 return 0;
6853 /* If we propose to get the value from the stack pointer or if GOAL is
6854 a MEM based on the stack pointer, we need a stable SP. */
6855 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6856 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6857 goal)))
6858 need_stable_sp = 1;
6860 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6861 if (GET_MODE (value) != mode)
6862 return 0;
6864 /* Reject VALUE if it was loaded from GOAL
6865 and is also a register that appears in the address of GOAL. */
6867 if (goal_mem && value == SET_DEST (single_set (where))
6868 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6869 goal, (rtx*) 0))
6870 return 0;
6872 /* Reject registers that overlap GOAL. */
6874 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6875 nregs = hard_regno_nregs[regno][mode];
6876 else
6877 nregs = 1;
6878 valuenregs = hard_regno_nregs[valueno][mode];
6880 if (!goal_mem && !goal_const
6881 && regno + nregs > valueno && regno < valueno + valuenregs)
6882 return 0;
6884 /* Reject VALUE if it is one of the regs reserved for reloads.
6885 Reload1 knows how to reuse them anyway, and it would get
6886 confused if we allocated one without its knowledge.
6887 (Now that insns introduced by reload are ignored above,
6888 this case shouldn't happen, but I'm not positive.) */
6890 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6892 int i;
6893 for (i = 0; i < valuenregs; ++i)
6894 if (reload_reg_p[valueno + i] >= 0)
6895 return 0;
6898 /* Reject VALUE if it is a register being used for an input reload
6899 even if it is not one of those reserved. */
6901 if (reload_reg_p != 0)
6903 int i;
6904 for (i = 0; i < n_reloads; i++)
6905 if (rld[i].reg_rtx != 0 && rld[i].in)
6907 int regno1 = REGNO (rld[i].reg_rtx);
6908 int nregs1 = hard_regno_nregs[regno1]
6909 [GET_MODE (rld[i].reg_rtx)];
6910 if (regno1 < valueno + valuenregs
6911 && regno1 + nregs1 > valueno)
6912 return 0;
6916 if (goal_mem)
6917 /* We must treat frame pointer as varying here,
6918 since it can vary--in a nonlocal goto as generated by expand_goto. */
6919 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6921 /* Now verify that the values of GOAL and VALUE remain unaltered
6922 until INSN is reached. */
6924 p = insn;
6925 while (1)
6927 p = PREV_INSN (p);
6928 if (p == where)
6929 return value;
6931 /* Don't trust the conversion past a function call
6932 if either of the two is in a call-clobbered register, or memory. */
6933 if (CALL_P (p))
6935 int i;
6937 if (goal_mem || need_stable_sp)
6938 return 0;
6940 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6941 for (i = 0; i < nregs; ++i)
6942 if (call_used_regs[regno + i]
6943 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6944 return 0;
6946 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6947 for (i = 0; i < valuenregs; ++i)
6948 if (call_used_regs[valueno + i]
6949 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6950 return 0;
6953 if (INSN_P (p))
6955 pat = PATTERN (p);
6957 /* Watch out for unspec_volatile, and volatile asms. */
6958 if (volatile_insn_p (pat))
6959 return 0;
6961 /* If this insn P stores in either GOAL or VALUE, return 0.
6962 If GOAL is a memory ref and this insn writes memory, return 0.
6963 If GOAL is a memory ref and its address is not constant,
6964 and this insn P changes a register used in GOAL, return 0. */
6966 if (GET_CODE (pat) == COND_EXEC)
6967 pat = COND_EXEC_CODE (pat);
6968 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6970 rtx dest = SET_DEST (pat);
6971 while (GET_CODE (dest) == SUBREG
6972 || GET_CODE (dest) == ZERO_EXTRACT
6973 || GET_CODE (dest) == STRICT_LOW_PART)
6974 dest = XEXP (dest, 0);
6975 if (REG_P (dest))
6977 int xregno = REGNO (dest);
6978 int xnregs;
6979 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6980 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6981 else
6982 xnregs = 1;
6983 if (xregno < regno + nregs && xregno + xnregs > regno)
6984 return 0;
6985 if (xregno < valueno + valuenregs
6986 && xregno + xnregs > valueno)
6987 return 0;
6988 if (goal_mem_addr_varies
6989 && reg_overlap_mentioned_for_reload_p (dest, goal))
6990 return 0;
6991 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6992 return 0;
6994 else if (goal_mem && MEM_P (dest)
6995 && ! push_operand (dest, GET_MODE (dest)))
6996 return 0;
6997 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6998 && reg_equiv_memory_loc (regno) != 0)
6999 return 0;
7000 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
7001 return 0;
7003 else if (GET_CODE (pat) == PARALLEL)
7005 int i;
7006 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7008 rtx v1 = XVECEXP (pat, 0, i);
7009 if (GET_CODE (v1) == COND_EXEC)
7010 v1 = COND_EXEC_CODE (v1);
7011 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7013 rtx dest = SET_DEST (v1);
7014 while (GET_CODE (dest) == SUBREG
7015 || GET_CODE (dest) == ZERO_EXTRACT
7016 || GET_CODE (dest) == STRICT_LOW_PART)
7017 dest = XEXP (dest, 0);
7018 if (REG_P (dest))
7020 int xregno = REGNO (dest);
7021 int xnregs;
7022 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7023 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7024 else
7025 xnregs = 1;
7026 if (xregno < regno + nregs
7027 && xregno + xnregs > regno)
7028 return 0;
7029 if (xregno < valueno + valuenregs
7030 && xregno + xnregs > valueno)
7031 return 0;
7032 if (goal_mem_addr_varies
7033 && reg_overlap_mentioned_for_reload_p (dest,
7034 goal))
7035 return 0;
7036 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7037 return 0;
7039 else if (goal_mem && MEM_P (dest)
7040 && ! push_operand (dest, GET_MODE (dest)))
7041 return 0;
7042 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7043 && reg_equiv_memory_loc (regno) != 0)
7044 return 0;
7045 else if (need_stable_sp
7046 && push_operand (dest, GET_MODE (dest)))
7047 return 0;
7052 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7054 rtx link;
7056 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7057 link = XEXP (link, 1))
7059 pat = XEXP (link, 0);
7060 if (GET_CODE (pat) == CLOBBER)
7062 rtx dest = SET_DEST (pat);
7064 if (REG_P (dest))
7066 int xregno = REGNO (dest);
7067 int xnregs
7068 = hard_regno_nregs[xregno][GET_MODE (dest)];
7070 if (xregno < regno + nregs
7071 && xregno + xnregs > regno)
7072 return 0;
7073 else if (xregno < valueno + valuenregs
7074 && xregno + xnregs > valueno)
7075 return 0;
7076 else if (goal_mem_addr_varies
7077 && reg_overlap_mentioned_for_reload_p (dest,
7078 goal))
7079 return 0;
7082 else if (goal_mem && MEM_P (dest)
7083 && ! push_operand (dest, GET_MODE (dest)))
7084 return 0;
7085 else if (need_stable_sp
7086 && push_operand (dest, GET_MODE (dest)))
7087 return 0;
7092 #ifdef AUTO_INC_DEC
7093 /* If this insn auto-increments or auto-decrements
7094 either regno or valueno, return 0 now.
7095 If GOAL is a memory ref and its address is not constant,
7096 and this insn P increments a register used in GOAL, return 0. */
7098 rtx link;
7100 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7101 if (REG_NOTE_KIND (link) == REG_INC
7102 && REG_P (XEXP (link, 0)))
7104 int incno = REGNO (XEXP (link, 0));
7105 if (incno < regno + nregs && incno >= regno)
7106 return 0;
7107 if (incno < valueno + valuenregs && incno >= valueno)
7108 return 0;
7109 if (goal_mem_addr_varies
7110 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7111 goal))
7112 return 0;
7115 #endif
7120 /* Find a place where INCED appears in an increment or decrement operator
7121 within X, and return the amount INCED is incremented or decremented by.
7122 The value is always positive. */
7124 static int
7125 find_inc_amount (rtx x, rtx inced)
7127 enum rtx_code code = GET_CODE (x);
7128 const char *fmt;
7129 int i;
7131 if (code == MEM)
7133 rtx addr = XEXP (x, 0);
7134 if ((GET_CODE (addr) == PRE_DEC
7135 || GET_CODE (addr) == POST_DEC
7136 || GET_CODE (addr) == PRE_INC
7137 || GET_CODE (addr) == POST_INC)
7138 && XEXP (addr, 0) == inced)
7139 return GET_MODE_SIZE (GET_MODE (x));
7140 else if ((GET_CODE (addr) == PRE_MODIFY
7141 || GET_CODE (addr) == POST_MODIFY)
7142 && GET_CODE (XEXP (addr, 1)) == PLUS
7143 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7144 && XEXP (addr, 0) == inced
7145 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7147 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7148 return i < 0 ? -i : i;
7152 fmt = GET_RTX_FORMAT (code);
7153 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7155 if (fmt[i] == 'e')
7157 int tem = find_inc_amount (XEXP (x, i), inced);
7158 if (tem != 0)
7159 return tem;
7161 if (fmt[i] == 'E')
7163 int j;
7164 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7166 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7167 if (tem != 0)
7168 return tem;
7173 return 0;
7176 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7177 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7179 #ifdef AUTO_INC_DEC
7180 static int
7181 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7182 rtx insn)
7184 rtx link;
7186 gcc_assert (insn);
7188 if (! INSN_P (insn))
7189 return 0;
7191 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7192 if (REG_NOTE_KIND (link) == REG_INC)
7194 unsigned int test = (int) REGNO (XEXP (link, 0));
7195 if (test >= regno && test < endregno)
7196 return 1;
7198 return 0;
7200 #else
7202 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7204 #endif
7206 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7207 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7208 REG_INC. REGNO must refer to a hard register. */
7211 regno_clobbered_p (unsigned int regno, rtx insn, enum machine_mode mode,
7212 int sets)
7214 unsigned int nregs, endregno;
7216 /* regno must be a hard register. */
7217 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7219 nregs = hard_regno_nregs[regno][mode];
7220 endregno = regno + nregs;
7222 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7223 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7224 && REG_P (XEXP (PATTERN (insn), 0)))
7226 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7228 return test >= regno && test < endregno;
7231 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7232 return 1;
7234 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7236 int i = XVECLEN (PATTERN (insn), 0) - 1;
7238 for (; i >= 0; i--)
7240 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7241 if ((GET_CODE (elt) == CLOBBER
7242 || (sets == 1 && GET_CODE (elt) == SET))
7243 && REG_P (XEXP (elt, 0)))
7245 unsigned int test = REGNO (XEXP (elt, 0));
7247 if (test >= regno && test < endregno)
7248 return 1;
7250 if (sets == 2
7251 && reg_inc_found_and_valid_p (regno, endregno, elt))
7252 return 1;
7256 return 0;
7259 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7261 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
7263 int regno;
7265 if (GET_MODE (reloadreg) == mode)
7266 return reloadreg;
7268 regno = REGNO (reloadreg);
7270 if (REG_WORDS_BIG_ENDIAN)
7271 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7272 - (int) hard_regno_nregs[regno][mode];
7274 return gen_rtx_REG (mode, regno);
7277 static const char *const reload_when_needed_name[] =
7279 "RELOAD_FOR_INPUT",
7280 "RELOAD_FOR_OUTPUT",
7281 "RELOAD_FOR_INSN",
7282 "RELOAD_FOR_INPUT_ADDRESS",
7283 "RELOAD_FOR_INPADDR_ADDRESS",
7284 "RELOAD_FOR_OUTPUT_ADDRESS",
7285 "RELOAD_FOR_OUTADDR_ADDRESS",
7286 "RELOAD_FOR_OPERAND_ADDRESS",
7287 "RELOAD_FOR_OPADDR_ADDR",
7288 "RELOAD_OTHER",
7289 "RELOAD_FOR_OTHER_ADDRESS"
7292 /* These functions are used to print the variables set by 'find_reloads' */
7294 DEBUG_FUNCTION void
7295 debug_reload_to_stream (FILE *f)
7297 int r;
7298 const char *prefix;
7300 if (! f)
7301 f = stderr;
7302 for (r = 0; r < n_reloads; r++)
7304 fprintf (f, "Reload %d: ", r);
7306 if (rld[r].in != 0)
7308 fprintf (f, "reload_in (%s) = ",
7309 GET_MODE_NAME (rld[r].inmode));
7310 print_inline_rtx (f, rld[r].in, 24);
7311 fprintf (f, "\n\t");
7314 if (rld[r].out != 0)
7316 fprintf (f, "reload_out (%s) = ",
7317 GET_MODE_NAME (rld[r].outmode));
7318 print_inline_rtx (f, rld[r].out, 24);
7319 fprintf (f, "\n\t");
7322 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7324 fprintf (f, "%s (opnum = %d)",
7325 reload_when_needed_name[(int) rld[r].when_needed],
7326 rld[r].opnum);
7328 if (rld[r].optional)
7329 fprintf (f, ", optional");
7331 if (rld[r].nongroup)
7332 fprintf (f, ", nongroup");
7334 if (rld[r].inc != 0)
7335 fprintf (f, ", inc by %d", rld[r].inc);
7337 if (rld[r].nocombine)
7338 fprintf (f, ", can't combine");
7340 if (rld[r].secondary_p)
7341 fprintf (f, ", secondary_reload_p");
7343 if (rld[r].in_reg != 0)
7345 fprintf (f, "\n\treload_in_reg: ");
7346 print_inline_rtx (f, rld[r].in_reg, 24);
7349 if (rld[r].out_reg != 0)
7351 fprintf (f, "\n\treload_out_reg: ");
7352 print_inline_rtx (f, rld[r].out_reg, 24);
7355 if (rld[r].reg_rtx != 0)
7357 fprintf (f, "\n\treload_reg_rtx: ");
7358 print_inline_rtx (f, rld[r].reg_rtx, 24);
7361 prefix = "\n\t";
7362 if (rld[r].secondary_in_reload != -1)
7364 fprintf (f, "%ssecondary_in_reload = %d",
7365 prefix, rld[r].secondary_in_reload);
7366 prefix = ", ";
7369 if (rld[r].secondary_out_reload != -1)
7370 fprintf (f, "%ssecondary_out_reload = %d\n",
7371 prefix, rld[r].secondary_out_reload);
7373 prefix = "\n\t";
7374 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7376 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7377 insn_data[rld[r].secondary_in_icode].name);
7378 prefix = ", ";
7381 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7382 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7383 insn_data[rld[r].secondary_out_icode].name);
7385 fprintf (f, "\n");
7389 DEBUG_FUNCTION void
7390 debug_reload (void)
7392 debug_reload_to_stream (stderr);